 3dadaa4432
			
		
	
	
		3dadaa4432
		
	
	
	
	
		
			
			Add default robots.txt that allows bots access to all paths. Add mix task to generate robots.txt taht allows bots access to no paths. Document custom emojis, MRF and static_dir static_dir documentation includes docs for the robots.txt Mix task.
		
			
				
	
	
		
			32 lines
		
	
	
	
		
			891 B
		
	
	
	
		
			Elixir
		
	
	
	
	
	
			
		
		
	
	
			32 lines
		
	
	
	
		
			891 B
		
	
	
	
		
			Elixir
		
	
	
	
	
	
| # Pleroma: A lightweight social networking server
 | |
| # Copyright © 2019 Pleroma Authors <https://pleroma.social/>
 | |
| # SPDX-License-Identifier: AGPL-3.0-only
 | |
| 
 | |
| defmodule Mix.Tasks.Pleroma.RobotsTxt do
 | |
|   use Mix.Task
 | |
| 
 | |
|   @shortdoc "Generate robots.txt"
 | |
|   @moduledoc """
 | |
|   Generates robots.txt
 | |
| 
 | |
|   ## Overwrite robots.txt to disallow all
 | |
| 
 | |
|       mix pleroma.robots_txt disallow_all
 | |
| 
 | |
|   This will write a robots.txt that will hide all paths on your instance
 | |
|   from search engines and other robots that obey robots.txt
 | |
| 
 | |
|   """
 | |
|   def run(["disallow_all"]) do
 | |
|     static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
 | |
| 
 | |
|     if !File.exists?(static_dir) do
 | |
|       File.mkdir_p!(static_dir)
 | |
|     end
 | |
| 
 | |
|     robots_txt_path = Path.join(static_dir, "robots.txt")
 | |
|     robots_txt_content = "User-Agent: *\nDisallow: /\n"
 | |
| 
 | |
|     File.write!(robots_txt_path, robots_txt_content, [:write])
 | |
|   end
 | |
| end
 |