summaryrefslogtreecommitdiff
path: root/lib/pleroma/web/media_proxy.ex
blob: 27f3371389edbb5178e5b6435f746b2d19c65be6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only

defmodule Pleroma.Web.MediaProxy do
  alias Pleroma.Config
  alias Pleroma.Helpers.UriHelper
  alias Pleroma.Upload
  alias Pleroma.Web
  alias Pleroma.Web.MediaProxy.Invalidation

  @base64_opts [padding: false]
  @cache_table :banned_urls_cache

  @cachex Pleroma.Config.get([:cachex, :provider], Cachex)

  def cache_table, do: @cache_table

  @spec in_banned_urls(String.t()) :: boolean()
  def in_banned_urls(url), do: elem(@cachex.exists?(@cache_table, url(url)), 1)

  def remove_from_banned_urls(urls) when is_list(urls) do
    @cachex.execute!(@cache_table, fn cache ->
      Enum.each(Invalidation.prepare_urls(urls), &@cachex.del(cache, &1))
    end)
  end

  def remove_from_banned_urls(url) when is_binary(url) do
    @cachex.del(@cache_table, url(url))
  end

  def put_in_banned_urls(urls) when is_list(urls) do
    @cachex.execute!(@cache_table, fn cache ->
      Enum.each(Invalidation.prepare_urls(urls), &@cachex.put(cache, &1, true))
    end)
  end

  def put_in_banned_urls(url) when is_binary(url) do
    @cachex.put(@cache_table, url(url), true)
  end

  def url(url) when is_nil(url) or url == "", do: nil
  def url("/" <> _ = url), do: url

  def url(url) do
    if enabled?() and url_proxiable?(url) do
      encode_url(url)
    else
      url
    end
  end

  @spec url_proxiable?(String.t()) :: boolean()
  def url_proxiable?(url) do
    not local?(url) and not whitelisted?(url)
  end

  def preview_url(url, preview_params \\ []) do
    if preview_enabled?() do
      encode_preview_url(url, preview_params)
    else
      url(url)
    end
  end

  def enabled?, do: Config.get([:media_proxy, :enabled], false)

  # Note: media proxy must be enabled for media preview proxy in order to load all
  #   non-local non-whitelisted URLs through it and be sure that body size constraint is preserved.
  def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled])

  def local?(url), do: String.starts_with?(url, Web.base_url())

  def whitelisted?(url) do
    %{host: domain} = URI.parse(url)

    mediaproxy_whitelist_domains =
      [:media_proxy, :whitelist]
      |> Config.get()
      |> Kernel.++(["#{Upload.base_url()}"])
      |> Enum.map(&maybe_get_domain_from_url/1)

    domain in mediaproxy_whitelist_domains
  end

  defp maybe_get_domain_from_url("http" <> _ = url) do
    URI.parse(url).host
  end

  defp maybe_get_domain_from_url(domain), do: domain

  defp base64_sig64(url) do
    base64 = Base.url_encode64(url, @base64_opts)

    sig64 =
      base64
      |> signed_url()
      |> Base.url_encode64(@base64_opts)

    {base64, sig64}
  end

  def encode_url(url) do
    {base64, sig64} = base64_sig64(url)

    build_url(sig64, base64, filename(url))
  end

  def encode_preview_url(url, preview_params \\ []) do
    {base64, sig64} = base64_sig64(url)

    build_preview_url(sig64, base64, filename(url), preview_params)
  end

  def decode_url(sig, url) do
    with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
         signature when signature == sig <- signed_url(url) do
      {:ok, Base.url_decode64!(url, @base64_opts)}
    else
      _ -> {:error, :invalid_signature}
    end
  end

  defp signed_url(url) do
    :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url)
  end

  def filename(url_or_path) do
    if path = URI.parse(url_or_path).path, do: Path.basename(path)
  end

  def base_url do
    Config.get([:media_proxy, :base_url], Web.base_url())
  end

  defp proxy_url(path, sig_base64, url_base64, filename) do
    [
      base_url(),
      path,
      sig_base64,
      url_base64,
      filename
    ]
    |> Enum.filter(& &1)
    |> Path.join()
  end

  def build_url(sig_base64, url_base64, filename \\ nil) do
    proxy_url("proxy", sig_base64, url_base64, filename)
  end

  def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do
    uri = proxy_url("proxy/preview", sig_base64, url_base64, filename)

    UriHelper.modify_uri_params(uri, preview_params)
  end

  def verify_request_path_and_url(
        %Plug.Conn{params: %{"filename" => _}, request_path: request_path},
        url
      ) do
    verify_request_path_and_url(request_path, url)
  end

  def verify_request_path_and_url(request_path, url) when is_binary(request_path) do
    filename = filename(url)

    if filename && not basename_matches?(request_path, filename) do
      {:wrong_filename, filename}
    else
      :ok
    end
  end

  def verify_request_path_and_url(_, _), do: :ok

  defp basename_matches?(path, filename) do
    basename = Path.basename(path)
    basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename
  end
end