summaryrefslogtreecommitdiff
path: root/lib/pleroma/web/rel_me.ex
blob: 8e2b515086beb211b8781823c03bf6334f72c805 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only

defmodule Pleroma.Web.RelMe do
  @options [
    pool: :media,
    max_body: 2_000_000
  ]

  if Pleroma.Config.get(:env) == :test do
    def parse(url) when is_binary(url), do: parse_url(url)
  else
    def parse(url) when is_binary(url) do
      Cachex.fetch!(:rel_me_cache, url, fn _ ->
        {:commit, parse_url(url)}
      end)
    rescue
      e -> {:error, "Cachex error: #{inspect(e)}"}
    end
  end

  def parse(_), do: {:error, "No URL provided"}

  defp parse_url(url) do
    opts =
      if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
        Keyword.merge(@options,
          recv_timeout: 2_000,
          with_body: true
        )
      else
        @options
      end

    with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
           Pleroma.HTTP.get(url, [], adapter: opts),
         {:ok, html_tree} <- Floki.parse_document(html),
         data <-
           Floki.attribute(html_tree, "link[rel~=me]", "href") ++
             Floki.attribute(html_tree, "a[rel~=me]", "href") do
      {:ok, data}
    end
  rescue
    e -> {:error, "Parsing error: #{inspect(e)}"}
  end

  def maybe_put_rel_me("http" <> _ = target_page, profile_urls) when is_list(profile_urls) do
    {:ok, rel_me_hrefs} = parse(target_page)

    true = Enum.any?(rel_me_hrefs, fn x -> x in profile_urls end)

    "me"
  rescue
    _ -> nil
  end

  def maybe_put_rel_me(_, _) do
    nil
  end
end