summaryrefslogtreecommitdiff
path: root/lib/mix/tasks/pleroma/robots_txt.ex
blob: 24f08180ec6ecb48f6b644c8a596e749d2df43e2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only

defmodule Mix.Tasks.Pleroma.RobotsTxt do
  use Mix.Task

  @shortdoc "Generate robots.txt"
  @moduledoc """
  Generates robots.txt

  ## Overwrite robots.txt to disallow all

      mix pleroma.robots_txt disallow_all

  This will write a robots.txt that will hide all paths on your instance
  from search engines and other robots that obey robots.txt

  """
  def run(["disallow_all"]) do
    Mix.Pleroma.start_pleroma()
    static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")

    if !File.exists?(static_dir) do
      File.mkdir_p!(static_dir)
    end

    robots_txt_path = Path.join(static_dir, "robots.txt")
    robots_txt_content = "User-Agent: *\nDisallow: /\n"

    File.write!(robots_txt_path, robots_txt_content, [:write])
  end
end