summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMaksim Pechnikov <parallel588@gmail.com>2020-09-18 22:13:05 +0300
committerMaksim Pechnikov <parallel588@gmail.com>2020-09-18 22:13:05 +0300
commited3bc53fa137ec4652606cad1df5783c47afb830 (patch)
treeab05f72720d2118b6f22b159c80bf5de4961d7ce
parent2159daa9af8967fe4b6d31e19c7048979b4cb165 (diff)
parent6c052bd5b6cc29d321b500654bd6b098d0e6c56a (diff)
Merge branch 'develop' into issue/2099
-rw-r--r--CHANGELOG.md9
-rw-r--r--config/config.exs28
-rw-r--r--config/description.exs56
-rw-r--r--config/test.exs5
-rw-r--r--docs/configuration/cheatsheet.md18
-rw-r--r--lib/pleroma/application.ex9
-rw-r--r--lib/pleroma/helpers/media_helper.ex150
-rw-r--r--lib/pleroma/helpers/qt_fast_start.ex131
-rw-r--r--lib/pleroma/helpers/uri_helper.ex14
-rw-r--r--lib/pleroma/instances/instance.ex4
-rw-r--r--lib/pleroma/object/fetcher.ex60
-rw-r--r--lib/pleroma/reverse_proxy/reverse_proxy.ex5
-rw-r--r--lib/pleroma/signature.ex6
-rw-r--r--lib/pleroma/user.ex10
-rw-r--r--lib/pleroma/web/activity_pub/activity_pub.ex14
-rw-r--r--lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex14
-rw-r--r--lib/pleroma/web/activity_pub/publisher.ex34
-rw-r--r--lib/pleroma/web/activity_pub/transmogrifier.ex2
-rw-r--r--lib/pleroma/web/fed_sockets/fed_registry.ex185
-rw-r--r--lib/pleroma/web/fed_sockets/fed_socket.ex137
-rw-r--r--lib/pleroma/web/fed_sockets/fed_sockets.ex185
-rw-r--r--lib/pleroma/web/fed_sockets/fetch_registry.ex151
-rw-r--r--lib/pleroma/web/fed_sockets/incoming_handler.ex88
-rw-r--r--lib/pleroma/web/fed_sockets/ingester_worker.ex33
-rw-r--r--lib/pleroma/web/fed_sockets/outgoing_handler.ex146
-rw-r--r--lib/pleroma/web/fed_sockets/socket_info.ex52
-rw-r--r--lib/pleroma/web/fed_sockets/supervisor.ex59
-rw-r--r--lib/pleroma/web/mastodon_api/views/account_view.ex10
-rw-r--r--lib/pleroma/web/mastodon_api/views/status_view.ex3
-rw-r--r--lib/pleroma/web/media_proxy/invalidation.ex4
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy.ex87
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy_controller.ex195
-rw-r--r--lib/pleroma/web/metadata/utils.ex2
-rw-r--r--lib/pleroma/web/oauth/oauth_controller.ex4
-rw-r--r--lib/pleroma/web/router.ex2
-rw-r--r--mix.lock4
-rwxr-xr-xtest/fixtures/image.gifbin0 -> 1001718 bytes
-rwxr-xr-xtest/fixtures/image.pngbin0 -> 104426 bytes
-rw-r--r--test/web/activity_pub/mrf/mediaproxy_warming_policy_test.exs2
-rw-r--r--test/web/fed_sockets/fed_registry_test.exs124
-rw-r--r--test/web/fed_sockets/fetch_registry_test.exs67
-rw-r--r--test/web/fed_sockets/socket_info_test.exs118
-rw-r--r--test/web/mastodon_api/views/account_view_test.exs38
-rw-r--r--test/web/media_proxy/media_proxy_controller_test.exs312
-rw-r--r--test/web/media_proxy/media_proxy_test.exs73
-rw-r--r--test/web/push/impl_test.exs18
46 files changed, 2487 insertions, 181 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5abafdf69..e61b1d144 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Unreleased
+### Added
+- Experimental websocket-based federation between Pleroma instances.
+
### Changed
- Renamed `:await_up_timeout` in `:connections_pool` namespace to `:connect_timeout`, old name is deprecated.
@@ -13,6 +16,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Users with the `discoverable` field set to false will not show up in searches.
- Minimum lifetime for ephmeral activities changed to 10 minutes and made configurable (`:min_lifetime` option).
+### Added
+- Media preview proxy (requires media proxy be enabled; see `:media_preview_proxy` config for more details).
+- Pleroma API: Importing the mutes users from CSV files.
+
### Removed
- **Breaking:** `Pleroma.Workers.Cron.StatsWorker` setting from Oban `:crontab` (moved to a simpler implementation).
@@ -21,8 +28,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Removed `:managed_config` option. In practice, it was accidentally removed with 2.0.0 release when frontends were
switched to a new configuration mechanism, however it was not officially removed until now.
-### Added
-- Pleroma API: Importing the mutes users from CSV files.
## [2.1.2] - 2020-09-17
diff --git a/config/config.exs b/config/config.exs
index c204814d0..00624bf00 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -130,6 +130,7 @@ config :pleroma, Pleroma.Web.Endpoint,
dispatch: [
{:_,
[
+ {"/api/fedsocket/v1", Pleroma.Web.FedSockets.IncomingHandler, []},
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
{Phoenix.Transports.WebSocket,
@@ -148,6 +149,16 @@ config :pleroma, Pleroma.Web.Endpoint,
"SameSite=Lax"
]
+config :pleroma, :fed_sockets,
+ enabled: false,
+ connection_duration: :timer.hours(8),
+ rejection_duration: :timer.minutes(15),
+ fed_socket_fetches: [
+ default: 12_000,
+ interval: 3_000,
+ lazy: false
+ ]
+
# Configures Elixir's Logger
config :logger, :console,
level: :debug,
@@ -423,6 +434,8 @@ config :pleroma, :media_proxy,
proxy_opts: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
+ # Note: max_read_duration defaults to Pleroma.ReverseProxy.max_read_duration_default/1
+ max_read_duration: 30_000,
http: [
follow_redirect: true,
pool: :media
@@ -437,6 +450,14 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
+# Note: media preview proxy depends on media proxy to be enabled
+config :pleroma, :media_preview_proxy,
+ enabled: false,
+ thumbnail_max_width: 600,
+ thumbnail_max_height: 600,
+ image_quality: 85,
+ min_content_length: 100 * 1024
+
config :pleroma, :chat, enabled: true
config :phoenix, :format_encoders, json: Jason
@@ -532,6 +553,7 @@ config :pleroma, Oban,
token_expiration: 5,
federator_incoming: 50,
federator_outgoing: 50,
+ ingestion_queue: 50,
web_push: 50,
mailer: 10,
transmogrifier: 20,
@@ -741,8 +763,8 @@ config :pleroma, :pools,
],
media: [
size: 50,
- max_waiting: 10,
- recv_timeout: 10_000
+ max_waiting: 20,
+ recv_timeout: 15_000
],
upload: [
size: 25,
@@ -787,6 +809,8 @@ config :tzdata, :http_client, Pleroma.HTTP.Tzdata
config :ex_aws, http_client: Pleroma.HTTP.ExAws
+config :web_push_encryption, http_client: Pleroma.HTTP
+
config :pleroma, :instances_favicons, enabled: false
config :floki, :html_parser, Floki.HTMLParser.FastHtml
diff --git a/config/description.exs b/config/description.exs
index 2b30f8148..097e98633 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -272,6 +272,19 @@ config :pleroma, :config_description, [
},
%{
group: :pleroma,
+ key: :fed_sockets,
+ type: :group,
+ description: "Websocket based federation",
+ children: [
+ %{
+ key: :enabled,
+ type: :boolean,
+ description: "Enable FedSockets"
+ }
+ ]
+ },
+ %{
+ group: :pleroma,
key: Pleroma.Emails.Mailer,
type: :group,
description: "Mailer-related settings",
@@ -1874,6 +1887,7 @@ config :pleroma, :config_description, [
suggestions: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
+ max_read_duration: 30_000,
http: [
follow_redirect: true,
pool: :media
@@ -1895,6 +1909,11 @@ config :pleroma, :config_description, [
"specified length. It is validated with the `content-length` header and also verified when proxying."
},
%{
+ key: :max_read_duration,
+ type: :integer,
+ description: "Timeout (in milliseconds) of GET request to remote URI."
+ },
+ %{
key: :http,
label: "HTTP",
type: :keyword,
@@ -1942,6 +1961,43 @@ config :pleroma, :config_description, [
},
%{
group: :pleroma,
+ key: :media_preview_proxy,
+ type: :group,
+ description: "Media preview proxy",
+ children: [
+ %{
+ key: :enabled,
+ type: :boolean,
+ description:
+ "Enables proxying of remote media preview to the instance's proxy. Requires enabled media proxy."
+ },
+ %{
+ key: :thumbnail_max_width,
+ type: :integer,
+ description:
+ "Max width of preview thumbnail for images (video preview always has original dimensions)."
+ },
+ %{
+ key: :thumbnail_max_height,
+ type: :integer,
+ description:
+ "Max height of preview thumbnail for images (video preview always has original dimensions)."
+ },
+ %{
+ key: :image_quality,
+ type: :integer,
+ description: "Quality of the output. Ranges from 0 (min quality) to 100 (max quality)."
+ },
+ %{
+ key: :min_content_length,
+ type: :integer,
+ description:
+ "Min content length to perform preview, in bytes. If greater than 0, media smaller in size will be served as is, without thumbnailing."
+ }
+ ]
+ },
+ %{
+ group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Http,
type: :group,
description: "HTTP invalidate settings",
diff --git a/config/test.exs b/config/test.exs
index 0ee6f1b7f..93a0e2a61 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -19,6 +19,11 @@ config :logger, :console,
level: :warn,
format: "\n[$level] $message\n"
+config :pleroma, :fed_sockets,
+ enabled: false,
+ connection_duration: 5,
+ rejection_duration: 5
+
config :pleroma, :auth, oauth_consumer_strategies: []
config :pleroma, Pleroma.Upload,
diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md
index 054b8fe43..42e5fe808 100644
--- a/docs/configuration/cheatsheet.md
+++ b/docs/configuration/cheatsheet.md
@@ -225,6 +225,16 @@ Enables the worker which processes posts scheduled for deletion. Pinned posts ar
* `enabled`: whether expired activities will be sent to the job queue to be deleted
+## FedSockets
+FedSockets is an experimental feature allowing for Pleroma backends to federate using a persistant websocket connection as opposed to making each federation a seperate http connection. This feature is currently off by default. It is configurable throught he following options.
+
+### :fedsockets
+* `enabled`: Enables FedSockets for this instance. `false` by default.
+* `connection_duration`: Time an idle websocket is kept open.
+* `rejection_duration`: Failures to connect via FedSockets will not be retried for this period of time.
+* `fed_socket_fetches` and `fed_socket_rejections`: Settings passed to `cachex` for the fetch registry, and rejection stacks. See `Pleroma.Web.FedSockets` for more details.
+
+
## Frontends
### :frontend_configurations
@@ -314,6 +324,14 @@ This section describe PWA manifest instance-specific values. Currently this opti
* `enabled`: Enables purge cache
* `provider`: Which one of the [purge cache strategy](#purge-cache-strategy) to use.
+## :media_preview_proxy
+
+* `enabled`: Enables proxying of remote media preview to the instance’s proxy. Requires enabled media proxy (`media_proxy/enabled`).
+* `thumbnail_max_width`: Max width of preview thumbnail for images (video preview always has original dimensions).
+* `thumbnail_max_height`: Max height of preview thumbnail for images (video preview always has original dimensions).
+* `image_quality`: Quality of the output. Ranges from 0 (min quality) to 100 (max quality).
+* `min_content_length`: Min content length to perform preview, in bytes. If greater than 0, media smaller in size will be served as is, without thumbnailing.
+
### Purge cache strategy
#### Pleroma.Web.MediaProxy.Invalidation.Script
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index c39e24919..00ec79a2a 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -99,7 +99,7 @@ defmodule Pleroma.Application do
{Oban, Config.get(Oban)}
] ++
task_children(@env) ++
- streamer_child(@env) ++
+ dont_run_in_test(@env) ++
chat_child(@env, chat_enabled?()) ++
[
Pleroma.Web.Endpoint,
@@ -188,16 +188,17 @@ defmodule Pleroma.Application do
defp chat_enabled?, do: Config.get([:chat, :enabled])
- defp streamer_child(env) when env in [:test, :benchmark], do: []
+ defp dont_run_in_test(env) when env in [:test, :benchmark], do: []
- defp streamer_child(_) do
+ defp dont_run_in_test(_) do
[
{Registry,
[
name: Pleroma.Web.Streamer.registry(),
keys: :duplicate,
partitions: System.schedulers_online()
- ]}
+ ]},
+ Pleroma.Web.FedSockets.Supervisor
]
end
diff --git a/lib/pleroma/helpers/media_helper.ex b/lib/pleroma/helpers/media_helper.ex
new file mode 100644
index 000000000..b6f35a24b
--- /dev/null
+++ b/lib/pleroma/helpers/media_helper.ex
@@ -0,0 +1,150 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Helpers.MediaHelper do
+ @moduledoc """
+ Handles common media-related operations.
+ """
+
+ alias Pleroma.HTTP
+
+ def image_resize(url, options) do
+ with executable when is_binary(executable) <- System.find_executable("convert"),
+ {:ok, args} <- prepare_image_resize_args(options),
+ {:ok, env} <- HTTP.get(url, [], pool: :media),
+ {:ok, fifo_path} <- mkfifo() do
+ args = List.flatten([fifo_path, args])
+ run_fifo(fifo_path, env, executable, args)
+ else
+ nil -> {:error, {:convert, :command_not_found}}
+ {:error, _} = error -> error
+ end
+ end
+
+ defp prepare_image_resize_args(
+ %{max_width: max_width, max_height: max_height, format: "png"} = options
+ ) do
+ quality = options[:quality] || 85
+ resize = Enum.join([max_width, "x", max_height, ">"])
+
+ args = [
+ "-resize",
+ resize,
+ "-quality",
+ to_string(quality),
+ "png:-"
+ ]
+
+ {:ok, args}
+ end
+
+ defp prepare_image_resize_args(%{max_width: max_width, max_height: max_height} = options) do
+ quality = options[:quality] || 85
+ resize = Enum.join([max_width, "x", max_height, ">"])
+
+ args = [
+ "-interlace",
+ "Plane",
+ "-resize",
+ resize,
+ "-quality",
+ to_string(quality),
+ "jpg:-"
+ ]
+
+ {:ok, args}
+ end
+
+ defp prepare_image_resize_args(_), do: {:error, :missing_options}
+
+ # Note: video thumbnail is intentionally not resized (always has original dimensions)
+ def video_framegrab(url) do
+ with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
+ {:ok, env} <- HTTP.get(url, [], pool: :media),
+ {:ok, fifo_path} <- mkfifo(),
+ args = [
+ "-y",
+ "-i",
+ fifo_path,
+ "-vframes",
+ "1",
+ "-f",
+ "mjpeg",
+ "-loglevel",
+ "error",
+ "-"
+ ] do
+ run_fifo(fifo_path, env, executable, args)
+ else
+ nil -> {:error, {:ffmpeg, :command_not_found}}
+ {:error, _} = error -> error
+ end
+ end
+
+ defp run_fifo(fifo_path, env, executable, args) do
+ pid =
+ Port.open({:spawn_executable, executable}, [
+ :use_stdio,
+ :stream,
+ :exit_status,
+ :binary,
+ args: args
+ ])
+
+ fifo = Port.open(to_charlist(fifo_path), [:eof, :binary, :stream, :out])
+ fix = Pleroma.Helpers.QtFastStart.fix(env.body)
+ true = Port.command(fifo, fix)
+ :erlang.port_close(fifo)
+ loop_recv(pid)
+ after
+ File.rm(fifo_path)
+ end
+
+ defp mkfifo do
+ path = Path.join(System.tmp_dir!(), "pleroma-media-preview-pipe-#{Ecto.UUID.generate()}")
+
+ case System.cmd("mkfifo", [path]) do
+ {_, 0} ->
+ spawn(fifo_guard(path))
+ {:ok, path}
+
+ {_, err} ->
+ {:error, {:fifo_failed, err}}
+ end
+ end
+
+ defp fifo_guard(path) do
+ pid = self()
+
+ fn ->
+ ref = Process.monitor(pid)
+
+ receive do
+ {:DOWN, ^ref, :process, ^pid, _} ->
+ File.rm(path)
+ end
+ end
+ end
+
+ defp loop_recv(pid) do
+ loop_recv(pid, <<>>)
+ end
+
+ defp loop_recv(pid, acc) do
+ receive do
+ {^pid, {:data, data}} ->
+ loop_recv(pid, acc <> data)
+
+ {^pid, {:exit_status, 0}} ->
+ {:ok, acc}
+
+ {^pid, {:exit_status, status}} ->
+ {:error, status}
+ after
+ 5000 ->
+ :erlang.port_close(pid)
+ {:error, :timeout}
+ end
+ end
+end
diff --git a/lib/pleroma/helpers/qt_fast_start.ex b/lib/pleroma/helpers/qt_fast_start.ex
new file mode 100644
index 000000000..bb93224b5
--- /dev/null
+++ b/lib/pleroma/helpers/qt_fast_start.ex
@@ -0,0 +1,131 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Helpers.QtFastStart do
+ @moduledoc """
+ (WIP) Converts a "slow start" (data before metadatas) mov/mp4 file to a "fast start" one (metadatas before data).
+ """
+
+ # TODO: Cleanup and optimizations
+ # Inspirations: https://www.ffmpeg.org/doxygen/3.4/qt-faststart_8c_source.html
+ # https://github.com/danielgtaylor/qtfaststart/blob/master/qtfaststart/processor.py
+ # ISO/IEC 14496-12:2015, ISO/IEC 15444-12:2015
+ # Paracetamol
+
+ def fix(<<0x00, 0x00, 0x00, _, 0x66, 0x74, 0x79, 0x70, _::bits>> = binary) do
+ index = fix(binary, 0, nil, nil, [])
+
+ case index do
+ :abort -> binary
+ [{"ftyp", _, _, _, _}, {"mdat", _, _, _, _} | _] -> faststart(index)
+ [{"ftyp", _, _, _, _}, {"free", _, _, _, _}, {"mdat", _, _, _, _} | _] -> faststart(index)
+ _ -> binary
+ end
+ end
+
+ def fix(binary) do
+ binary
+ end
+
+ # MOOV have been seen before MDAT- abort
+ defp fix(<<_::bits>>, _, true, false, _) do
+ :abort
+ end
+
+ defp fix(
+ <<size::integer-big-size(32), fourcc::bits-size(32), rest::bits>>,
+ pos,
+ got_moov,
+ got_mdat,
+ acc
+ ) do
+ full_size = (size - 8) * 8
+ <<data::bits-size(full_size), rest::bits>> = rest
+
+ acc = [
+ {fourcc, pos, pos + size, size,
+ <<size::integer-big-size(32), fourcc::bits-size(32), data::bits>>}
+ | acc
+ ]
+
+ fix(rest, pos + size, got_moov || fourcc == "moov", got_mdat || fourcc == "mdat", acc)
+ end
+
+ defp fix(<<>>, _pos, _, _, acc) do
+ :lists.reverse(acc)
+ end
+
+ defp faststart(index) do
+ {{_ftyp, _, _, _, ftyp}, index} = List.keytake(index, "ftyp", 0)
+
+ # Skip re-writing the free fourcc as it's kind of useless.
+ # Why stream useless bytes when you can do without?
+ {free_size, index} =
+ case List.keytake(index, "free", 0) do
+ {{_, _, _, size, _}, index} -> {size, index}
+ _ -> {0, index}
+ end
+
+ {{_moov, _, _, moov_size, moov}, index} = List.keytake(index, "moov", 0)
+ offset = -free_size + moov_size
+ rest = for {_, _, _, _, data} <- index, do: data, into: []
+ <<moov_head::bits-size(64), moov_data::bits>> = moov
+ [ftyp, moov_head, fix_moov(moov_data, offset, []), rest]
+ end
+
+ defp fix_moov(
+ <<size::integer-big-size(32), fourcc::bits-size(32), rest::bits>>,
+ offset,
+ acc
+ ) do
+ full_size = (size - 8) * 8
+ <<data::bits-size(full_size), rest::bits>> = rest
+
+ data =
+ cond do
+ fourcc in ["trak", "mdia", "minf", "stbl"] ->
+ # Theses contains sto or co64 part
+ [<<size::integer-big-size(32), fourcc::bits-size(32)>>, fix_moov(data, offset, [])]
+
+ fourcc in ["stco", "co64"] ->
+ # fix the damn thing
+ <<version::integer-big-size(32), count::integer-big-size(32), rest::bits>> = data
+
+ entry_size =
+ case fourcc do
+ "stco" -> 32
+ "co64" -> 64
+ end
+
+ [
+ <<size::integer-big-size(32), fourcc::bits-size(32), version::integer-big-size(32),
+ count::integer-big-size(32)>>,
+ rewrite_entries(entry_size, offset, rest, [])
+ ]
+
+ true ->
+ [<<size::integer-big-size(32), fourcc::bits-size(32)>>, data]
+ end
+
+ acc = [acc | data]
+ fix_moov(rest, offset, acc)
+ end
+
+ defp fix_moov(<<>>, _, acc), do: acc
+
+ for size <- [32, 64] do
+ defp rewrite_entries(
+ unquote(size),
+ offset,
+ <<pos::integer-big-size(unquote(size)), rest::bits>>,
+ acc
+ ) do
+ rewrite_entries(unquote(size), offset, rest, [
+ acc | <<pos + offset::integer-big-size(unquote(size))>>
+ ])
+ end
+ end
+
+ defp rewrite_entries(_, _, <<>>, acc), do: acc
+end
diff --git a/lib/pleroma/helpers/uri_helper.ex b/lib/pleroma/helpers/uri_helper.ex
index 6d205a636..f1301f055 100644
--- a/lib/pleroma/helpers/uri_helper.ex
+++ b/lib/pleroma/helpers/uri_helper.ex
@@ -3,18 +3,22 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Helpers.UriHelper do
- def append_uri_params(uri, appended_params) do
+ def modify_uri_params(uri, overridden_params, deleted_params \\ []) do
uri = URI.parse(uri)
- appended_params = for {k, v} <- appended_params, into: %{}, do: {to_string(k), v}
- existing_params = URI.query_decoder(uri.query || "") |> Enum.into(%{})
- updated_params_keys = Enum.uniq(Map.keys(existing_params) ++ Map.keys(appended_params))
+
+ existing_params = URI.query_decoder(uri.query || "") |> Map.new()
+ overridden_params = Map.new(overridden_params, fn {k, v} -> {to_string(k), v} end)
+ deleted_params = Enum.map(deleted_params, &to_string/1)
updated_params =
- for k <- updated_params_keys, do: {k, appended_params[k] || existing_params[k]}
+ existing_params
+ |> Map.merge(overridden_params)
+ |> Map.drop(deleted_params)
uri
|> Map.put(:query, URI.encode_query(updated_params))
|> URI.to_string()
+ |> String.replace_suffix("?", "")
end
def maybe_add_base("/" <> uri, base), do: Path.join([base, uri])
diff --git a/lib/pleroma/instances/instance.ex b/lib/pleroma/instances/instance.ex
index 6948651c7..f0f601469 100644
--- a/lib/pleroma/instances/instance.ex
+++ b/lib/pleroma/instances/instance.ex
@@ -156,9 +156,7 @@ defmodule Pleroma.Instances.Instance do
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {:ok, %Tesla.Env{body: html}} <-
- Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
- adapter: [pool: :media]
- ),
+ Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], pool: :media),
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
{:parse,
html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index 24dc7cb95..169298b34 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -12,6 +12,7 @@ defmodule Pleroma.Object.Fetcher do
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
+ alias Pleroma.Web.FedSockets
require Logger
require Pleroma.Constants
@@ -182,9 +183,47 @@ defmodule Pleroma.Object.Fetcher do
end
end
- def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
+ def fetch_and_contain_remote_object_from_id(prm, opts \\ [])
+
+ def fetch_and_contain_remote_object_from_id(%{"id" => id}, opts),
+ do: fetch_and_contain_remote_object_from_id(id, opts)
+
+ def fetch_and_contain_remote_object_from_id(id, opts) when is_binary(id) do
Logger.debug("Fetching object #{id} via AP")
+ with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")},
+ {:ok, body} <- get_object(id, opts),
+ {:ok, data} <- safe_json_decode(body),
+ :ok <- Containment.contain_origin_from_id(id, data) do
+ {:ok, data}
+ else
+ {:scheme, _} ->
+ {:error, "Unsupported URI scheme"}
+
+ {:error, e} ->
+ {:error, e}
+
+ e ->
+ {:error, e}
+ end
+ end
+
+ def fetch_and_contain_remote_object_from_id(_id, _opts),
+ do: {:error, "id must be a string"}
+
+ defp get_object(id, opts) do
+ with false <- Keyword.get(opts, :force_http, false),
+ {:ok, fedsocket} <- FedSockets.get_or_create_fed_socket(id) do
+ Logger.debug("fetching via fedsocket - #{inspect(id)}")
+ FedSockets.fetch(fedsocket, id)
+ else
+ _other ->
+ Logger.debug("fetching via http - #{inspect(id)}")
+ get_object_http(id)
+ end
+ end
+
+ defp get_object_http(id) do
date = Pleroma.Signature.signed_date()
headers =
@@ -192,20 +231,13 @@ defmodule Pleroma.Object.Fetcher do
|> maybe_date_fetch(date)
|> sign_fetch(id, date)
- Logger.debug("Fetch headers: #{inspect(headers)}")
+ case HTTP.get(id, headers) do
+ {:ok, %{body: body, status: code}} when code in 200..299 ->
+ {:ok, body}
- with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")},
- {:ok, %{body: body, status: code}} when code in 200..299 <- HTTP.get(id, headers),
- {:ok, data} <- Jason.decode(body),
- :ok <- Containment.contain_origin_from_id(id, data) do
- {:ok, data}
- else
{:ok, %{status: code}} when code in [404, 410] ->
{:error, "Object has been deleted"}
- {:scheme, _} ->
- {:error, "Unsupported URI scheme"}
-
{:error, e} ->
{:error, e}
@@ -214,8 +246,6 @@ defmodule Pleroma.Object.Fetcher do
end
end
- def fetch_and_contain_remote_object_from_id(%{"id" => id}),
- do: fetch_and_contain_remote_object_from_id(id)
-
- def fetch_and_contain_remote_object_from_id(_id), do: {:error, "id must be a string"}
+ defp safe_json_decode(nil), do: {:ok, nil}
+ defp safe_json_decode(json), do: Jason.decode(json)
end
diff --git a/lib/pleroma/reverse_proxy/reverse_proxy.ex b/lib/pleroma/reverse_proxy/reverse_proxy.ex
index 0de4e2309..8ae1157df 100644
--- a/lib/pleroma/reverse_proxy/reverse_proxy.ex
+++ b/lib/pleroma/reverse_proxy/reverse_proxy.ex
@@ -17,6 +17,9 @@ defmodule Pleroma.ReverseProxy do
@failed_request_ttl :timer.seconds(60)
@methods ~w(GET HEAD)
+ def max_read_duration_default, do: @max_read_duration
+ def default_cache_control_header, do: @default_cache_control_header
+
@moduledoc """
A reverse proxy.
@@ -391,6 +394,8 @@ defmodule Pleroma.ReverseProxy do
defp body_size_constraint(_, _), do: :ok
+ defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max)
+
defp check_read_duration(duration, max)
when is_integer(duration) and is_integer(max) and max > 0 do
if duration > max do
diff --git a/lib/pleroma/signature.ex b/lib/pleroma/signature.ex
index 3aa6909d2..e388993b7 100644
--- a/lib/pleroma/signature.ex
+++ b/lib/pleroma/signature.ex
@@ -39,7 +39,7 @@ defmodule Pleroma.Signature do
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
- {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
+ {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id, force_http: true) do
{:ok, public_key}
else
e ->
@@ -50,8 +50,8 @@ defmodule Pleroma.Signature do
def refetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
- {:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id),
- {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
+ {:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id, force_http: true),
+ {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id, force_http: true) do
{:ok, public_key}
else
e ->
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index cf72985fe..410c9cbac 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -1769,12 +1769,12 @@ defmodule Pleroma.User do
def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
- def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
+ def fetch_by_ap_id(ap_id, opts \\ []), do: ActivityPub.make_user_from_ap_id(ap_id, opts)
- def get_or_fetch_by_ap_id(ap_id) do
+ def get_or_fetch_by_ap_id(ap_id, opts \\ []) do
cached_user = get_cached_by_ap_id(ap_id)
- maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
+ maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id, opts)
case {cached_user, maybe_fetched_user} do
{_, {:ok, %User{} = user}} ->
@@ -1847,8 +1847,8 @@ defmodule Pleroma.User do
def public_key(_), do: {:error, "key not found"}
- def get_public_key_for_ap_id(ap_id) do
- with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
+ def get_public_key_for_ap_id(ap_id, opts \\ []) do
+ with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id, opts),
{:ok, public_key} <- public_key(user) do
{:ok, public_key}
else
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index 92fc1e422..06e8e1a7c 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -1270,10 +1270,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.following_address,
+ force_http: true
+ ),
{:ok, hide_follows} <- collection_private(following_data),
{:ok, followers_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address, force_http: true),
{:ok, hide_followers} <- collection_private(followers_data) do
{:ok,
%{
@@ -1347,8 +1349,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
- def fetch_and_prepare_user_from_ap_id(ap_id) do
- with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
+ def fetch_and_prepare_user_from_ap_id(ap_id, opts \\ []) do
+ with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id, opts),
{:ok, data} <- user_data_from_user_object(data) do
{:ok, maybe_update_follow_information(data)}
else
@@ -1390,13 +1392,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
- def make_user_from_ap_id(ap_id) do
+ def make_user_from_ap_id(ap_id, opts \\ []) do
user = User.get_cached_by_ap_id(ap_id)
if user && !User.ap_enabled?(user) do
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
- with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
+ with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, opts) do
if user do
user
|> User.remote_user_changeset(data)
diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
index 98d595469..0fb05d3c4 100644
--- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
@@ -12,17 +12,21 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
- @options [
+ @adapter_options [
pool: :media,
recv_timeout: 10_000
]
def perform(:prefetch, url) do
- Logger.debug("Prefetching #{inspect(url)}")
+ # Fetching only proxiable resources
+ if MediaProxy.enabled?() and MediaProxy.url_proxiable?(url) do
+ # If preview proxy is enabled, it'll also hit media proxy (so we're caching both requests)
+ prefetch_url = MediaProxy.preview_url(url)
- url
- |> MediaProxy.url()
- |> HTTP.get([], @options)
+ Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
+
+ HTTP.get(prefetch_url, [], @adapter_options)
+ end
end
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
diff --git a/lib/pleroma/web/activity_pub/publisher.ex b/lib/pleroma/web/activity_pub/publisher.ex
index d88f7f3ee..9c3956683 100644
--- a/lib/pleroma/web/activity_pub/publisher.ex
+++ b/lib/pleroma/web/activity_pub/publisher.ex
@@ -13,6 +13,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.Transmogrifier
+ alias Pleroma.Web.FedSockets
require Pleroma.Constants
@@ -50,15 +51,35 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
Logger.debug("Federating #{id} to #{inbox}")
- uri = URI.parse(inbox)
+ case FedSockets.get_or_create_fed_socket(inbox) do
+ {:ok, fedsocket} ->
+ Logger.debug("publishing via fedsockets - #{inspect(inbox)}")
+ FedSockets.publish(fedsocket, json)
+ _ ->
+ Logger.debug("publishing via http - #{inspect(inbox)}")
+ http_publish(inbox, actor, json, params)
+ end
+ end
+
+ def publish_one(%{actor_id: actor_id} = params) do
+ actor = User.get_cached_by_id(actor_id)
+
+ params
+ |> Map.delete(:actor_id)
+ |> Map.put(:actor, actor)
+ |> publish_one()
+ end
+
+ defp http_publish(inbox, actor, json, params) do
+ uri = %{path: path} = URI.parse(inbox)
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
date = Pleroma.Signature.signed_date()
signature =
Pleroma.Signature.sign(actor, %{
- "(request-target)": "post #{uri.path}",
+ "(request-target)": "post #{path}",
host: signature_host(uri),
"content-length": byte_size(json),
digest: digest,
@@ -89,15 +110,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
end
end
- def publish_one(%{actor_id: actor_id} = params) do
- actor = User.get_cached_by_id(actor_id)
-
- params
- |> Map.delete(:actor_id)
- |> Map.put(:actor, actor)
- |> publish_one()
- end
-
defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
if port == URI.default_port(scheme) do
host
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index fcca014f0..aa6a69463 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -1000,7 +1000,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
- {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
+ {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id, force_http: true),
{:ok, user} <- update_user(user, data) do
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
{:ok, user}
diff --git a/lib/pleroma/web/fed_sockets/fed_registry.ex b/lib/pleroma/web/fed_sockets/fed_registry.ex
new file mode 100644
index 000000000..e00ea69c0
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_registry.ex
@@ -0,0 +1,185 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FedRegistry do
+ @moduledoc """
+ The FedRegistry stores the active FedSockets for quick retrieval.
+
+ The storage and retrieval portion of the FedRegistry is done in process through
+ elixir's `Registry` module for speed and its ability to monitor for terminated processes.
+
+ Dropped connections will be caught by `Registry` and deleted. Since the next
+ message will initiate a new connection there is no reason to try and reconnect at that point.
+
+ Normally outside modules should have no need to call or use the FedRegistry themselves.
+ """
+
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ require Logger
+
+ @default_rejection_duration 15 * 60 * 1000
+ @rejections :fed_socket_rejections
+
+ @doc """
+ Retrieves a FedSocket from the Registry given it's origin.
+
+ The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
+
+ Will return:
+ * {:ok, fed_socket} for working FedSockets
+ * {:error, :rejected} for origins that have been tried and refused within the rejection duration interval
+ * {:error, some_reason} usually :missing for unknown origins
+ """
+ def get_fed_socket(origin) do
+ case get_registry_data(origin) do
+ {:error, reason} ->
+ {:error, reason}
+
+ {:ok, %{state: :connected} = socket_info} ->
+ {:ok, socket_info}
+ end
+ end
+
+ @doc """
+ Adds a connected FedSocket to the Registry.
+
+ Always returns {:ok, fed_socket}
+ """
+ def add_fed_socket(origin, pid \\ nil) do
+ origin
+ |> SocketInfo.build(pid)
+ |> SocketInfo.connect()
+ |> add_socket_info
+ end
+
+ defp add_socket_info(%{origin: origin, state: :connected} = socket_info) do
+ case Registry.register(FedSockets.Registry, origin, socket_info) do
+ {:ok, _owner} ->
+ clear_prior_rejection(origin)
+ Logger.debug("fedsocket added: #{inspect(origin)}")
+
+ {:ok, socket_info}
+
+ {:error, {:already_registered, _pid}} ->
+ FedSocket.close(socket_info)
+ existing_socket_info = Registry.lookup(FedSockets.Registry, origin)
+
+ {:ok, existing_socket_info}
+
+ _ ->
+ {:error, :error_adding_socket}
+ end
+ end
+
+ @doc """
+ Mark this origin as having rejected a connection attempt.
+ This will keep it from getting additional connection attempts
+ for a period of time specified in the config.
+
+ Always returns {:ok, new_reg_data}
+ """
+ def set_host_rejected(uri) do
+ new_reg_data =
+ uri
+ |> SocketInfo.origin()
+ |> get_or_create_registry_data()
+ |> set_to_rejected()
+ |> save_registry_data()
+
+ {:ok, new_reg_data}
+ end
+
+ @doc """
+ Retrieves the FedRegistryData from the Registry given it's origin.
+
+ The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
+
+ Will return:
+ * {:ok, fed_registry_data} for known origins
+ * {:error, :missing} for uniknown origins
+ * {:error, :cache_error} indicating some low level runtime issues
+ """
+ def get_registry_data(origin) do
+ case Registry.lookup(FedSockets.Registry, origin) do
+ [] ->
+ if is_rejected?(origin) do
+ Logger.debug("previously rejected fedsocket requested")
+ {:error, :rejected}
+ else
+ {:error, :missing}
+ end
+
+ [{_pid, %{state: :connected} = socket_info}] ->
+ {:ok, socket_info}
+
+ _ ->
+ {:error, :cache_error}
+ end
+ end
+
+ @doc """
+ Retrieves a map of all sockets from the Registry. The keys are the origins and the values are the corresponding SocketInfo
+ """
+ def list_all do
+ (list_all_connected() ++ list_all_rejected())
+ |> Enum.into(%{})
+ end
+
+ defp list_all_connected do
+ FedSockets.Registry
+ |> Registry.select([{{:"$1", :_, :"$3"}, [], [{{:"$1", :"$3"}}]}])
+ end
+
+ defp list_all_rejected do
+ {:ok, keys} = Cachex.keys(@rejections)
+
+ {:ok, registry_data} =
+ Cachex.execute(@rejections, fn worker ->
+ Enum.map(keys, fn k -> {k, Cachex.get!(worker, k)} end)
+ end)
+
+ registry_data
+ end
+
+ defp clear_prior_rejection(origin),
+ do: Cachex.del(@rejections, origin)
+
+ defp is_rejected?(origin) do
+ case Cachex.get(@rejections, origin) do
+ {:ok, nil} ->
+ false
+
+ {:ok, _} ->
+ true
+ end
+ end
+
+ defp get_or_create_registry_data(origin) do
+ case get_registry_data(origin) do
+ {:error, :missing} ->
+ %SocketInfo{origin: origin}
+
+ {:ok, socket_info} ->
+ socket_info
+ end
+ end
+
+ defp save_registry_data(%SocketInfo{origin: origin, state: :connected} = socket_info) do
+ {:ok, true} = Registry.update_value(FedSockets.Registry, origin, fn _ -> socket_info end)
+ socket_info
+ end
+
+ defp save_registry_data(%SocketInfo{origin: origin, state: :rejected} = socket_info) do
+ rejection_expiration =
+ Pleroma.Config.get([:fed_sockets, :rejection_duration], @default_rejection_duration)
+
+ {:ok, true} = Cachex.put(@rejections, origin, socket_info, ttl: rejection_expiration)
+ socket_info
+ end
+
+ defp set_to_rejected(%SocketInfo{} = socket_info),
+ do: %SocketInfo{socket_info | state: :rejected}
+end
diff --git a/lib/pleroma/web/fed_sockets/fed_socket.ex b/lib/pleroma/web/fed_sockets/fed_socket.ex
new file mode 100644
index 000000000..98d64e65a
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_socket.ex
@@ -0,0 +1,137 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FedSocket do
+ @moduledoc """
+ The FedSocket module abstracts the actions to be taken taken on connections regardless of
+ whether the connection started as inbound or outbound.
+
+
+ Normally outside modules will have no need to call the FedSocket module directly.
+ """
+
+ alias Pleroma.Object
+ alias Pleroma.Object.Containment
+ alias Pleroma.User
+ alias Pleroma.Web.ActivityPub.ObjectView
+ alias Pleroma.Web.ActivityPub.UserView
+ alias Pleroma.Web.ActivityPub.Visibility
+ alias Pleroma.Web.FedSockets.FetchRegistry
+ alias Pleroma.Web.FedSockets.IngesterWorker
+ alias Pleroma.Web.FedSockets.OutgoingHandler
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ require Logger
+
+ @shake "61dd18f7-f1e6-49a4-939a-a749fcdc1103"
+
+ def connect_to_host(uri) do
+ case OutgoingHandler.start_link(uri) do
+ {:ok, pid} ->
+ {:ok, pid}
+
+ error ->
+ {:error, error}
+ end
+ end
+
+ def close(%SocketInfo{pid: socket_pid}),
+ do: Process.send(socket_pid, :close, [])
+
+ def publish(%SocketInfo{pid: socket_pid}, json) do
+ %{action: :publish, data: json}
+ |> Jason.encode!()
+ |> send_packet(socket_pid)
+ end
+
+ def fetch(%SocketInfo{pid: socket_pid}, id) do
+ fetch_uuid = FetchRegistry.register_fetch(id)
+
+ %{action: :fetch, data: id, uuid: fetch_uuid}
+ |> Jason.encode!()
+ |> send_packet(socket_pid)
+
+ wait_for_fetch_to_return(fetch_uuid, 0)
+ end
+
+ def receive_package(%SocketInfo{} = fed_socket, json) do
+ json
+ |> Jason.decode!()
+ |> process_package(fed_socket)
+ end
+
+ defp wait_for_fetch_to_return(uuid, cntr) do
+ case FetchRegistry.check_fetch(uuid) do
+ {:error, :waiting} ->
+ Process.sleep(:math.pow(cntr, 3) |> Kernel.trunc())
+ wait_for_fetch_to_return(uuid, cntr + 1)
+
+ {:error, :missing} ->
+ Logger.error("FedSocket fetch timed out - #{inspect(uuid)}")
+ {:error, :timeout}
+
+ {:ok, _fr} ->
+ FetchRegistry.pop_fetch(uuid)
+ end
+ end
+
+ defp process_package(%{"action" => "publish", "data" => data}, %{origin: origin} = _fed_socket) do
+ if Containment.contain_origin(origin, data) do
+ IngesterWorker.enqueue("ingest", %{"object" => data})
+ end
+
+ {:reply, %{"action" => "publish_reply", "status" => "processed"}}
+ end
+
+ defp process_package(%{"action" => "fetch_reply", "uuid" => uuid, "data" => data}, _fed_socket) do
+ FetchRegistry.register_fetch_received(uuid, data)
+ {:noreply, nil}
+ end
+
+ defp process_package(%{"action" => "fetch", "uuid" => uuid, "data" => ap_id}, _fed_socket) do
+ {:ok, data} = render_fetched_data(ap_id, uuid)
+ {:reply, data}
+ end
+
+ defp process_package(%{"action" => "publish_reply"}, _fed_socket) do
+ {:noreply, nil}
+ end
+
+ defp process_package(other, _fed_socket) do
+ Logger.warn("unknown json packages received #{inspect(other)}")
+ {:noreply, nil}
+ end
+
+ defp render_fetched_data(ap_id, uuid) do
+ {:ok,
+ %{
+ "action" => "fetch_reply",
+ "status" => "processed",
+ "uuid" => uuid,
+ "data" => represent_item(ap_id)
+ }}
+ end
+
+ defp represent_item(ap_id) do
+ case User.get_by_ap_id(ap_id) do
+ nil ->
+ object = Object.get_cached_by_ap_id(ap_id)
+
+ if Visibility.is_public?(object) do
+ Phoenix.View.render_to_string(ObjectView, "object.json", object: object)
+ else
+ nil
+ end
+
+ user ->
+ Phoenix.View.render_to_string(UserView, "user.json", user: user)
+ end
+ end
+
+ defp send_packet(data, socket_pid) do
+ Process.send(socket_pid, {:send, data}, [])
+ end
+
+ def shake, do: @shake
+end
diff --git a/lib/pleroma/web/fed_sockets/fed_sockets.ex b/lib/pleroma/web/fed_sockets/fed_sockets.ex
new file mode 100644
index 000000000..1fd5899c8
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_sockets.ex
@@ -0,0 +1,185 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets do
+ @moduledoc """
+ This documents the FedSockets framework. A framework for federating
+ ActivityPub objects between servers via persistant WebSocket connections.
+
+ FedSockets allow servers to authenticate on first contact and maintain that
+ connection, eliminating the need to authenticate every time data needs to be shared.
+
+ ## Protocol
+ FedSockets currently support 2 types of data transfer:
+ * `publish` method which doesn't require a response
+ * `fetch` method requires a response be sent
+
+ ### Publish
+ The publish operation sends a json encoded map of the shape:
+ %{action: :publish, data: json}
+ and accepts (but does not require) a reply of form:
+ %{"action" => "publish_reply"}
+
+ The outgoing params represent
+ * data: ActivityPub object encoded into json
+
+
+ ### Fetch
+ The fetch operation sends a json encoded map of the shape:
+ %{action: :fetch, data: id, uuid: fetch_uuid}
+ and requires a reply of form:
+ %{"action" => "fetch_reply", "uuid" => uuid, "data" => data}
+
+ The outgoing params represent
+ * id: an ActivityPub object URI
+ * uuid: a unique uuid generated by the sender
+
+ The reply params represent
+ * data: an ActivityPub object encoded into json
+ * uuid: the uuid sent along with the fetch request
+
+ ## Examples
+ Clients of FedSocket transfers shouldn't need to use any of the functions outside of this module.
+
+ A typical publish operation can be performed through the following code, and a fetch operation in a similar manner.
+
+ case FedSockets.get_or_create_fed_socket(inbox) do
+ {:ok, fedsocket} ->
+ FedSockets.publish(fedsocket, json)
+
+ _ ->
+ alternative_publish(inbox, actor, json, params)
+ end
+
+ ## Configuration
+ FedSockets have the following config settings
+
+ config :pleroma, :fed_sockets,
+ enabled: true,
+ ping_interval: :timer.seconds(15),
+ connection_duration: :timer.hours(1),
+ rejection_duration: :timer.hours(1),
+ fed_socket_fetches: [
+ default: 12_000,
+ interval: 3_000,
+ lazy: false
+ ]
+ * enabled - turn FedSockets on or off with this flag. Can be toggled at runtime.
+ * connection_duration - How long a FedSocket can sit idle before it's culled.
+ * rejection_duration - After failing to make a FedSocket connection a host will be excluded
+ from further connections for this amount of time
+ * fed_socket_fetches - Use these parameters to pass options to the Cachex queue backing the FetchRegistry
+ * fed_socket_rejections - Use these parameters to pass options to the Cachex queue backing the FedRegistry
+
+ Cachex options are
+ * default: the minimum amount of time a fetch can wait before it times out.
+ * interval: the interval between checks for timed out entries. This plus the default represent the maximum time allowed
+ * lazy: leave at false for consistant and fast lookups, set to true for stricter timeout enforcement
+
+ """
+ require Logger
+
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ @doc """
+ returns a FedSocket for the given origin. Will reuse an existing one or create a new one.
+
+ address is expected to be a fully formed URL such as:
+ "http://www.example.com" or "http://www.example.com:8080"
+
+ It can and usually does include additional path parameters,
+ but these are ignored as the FedSockets are organized by host and port info alone.
+ """
+ def get_or_create_fed_socket(address) do
+ with {:cache, {:error, :missing}} <- {:cache, get_fed_socket(address)},
+ {:connect, {:ok, _pid}} <- {:connect, FedSocket.connect_to_host(address)},
+ {:cache, {:ok, fed_socket}} <- {:cache, get_fed_socket(address)} do
+ Logger.debug("fedsocket created for - #{inspect(address)}")
+ {:ok, fed_socket}
+ else
+ {:cache, {:ok, socket}} ->
+ Logger.debug("fedsocket found in cache - #{inspect(address)}")
+ {:ok, socket}
+
+ {:cache, {:error, :rejected} = e} ->
+ e
+
+ {:connect, {:error, _host}} ->
+ Logger.debug("set host rejected for - #{inspect(address)}")
+ FedRegistry.set_host_rejected(address)
+ {:error, :rejected}
+
+ {_, {:error, :disabled}} ->
+ {:error, :disabled}
+
+ {_, {:error, reason}} ->
+ Logger.warn("get_or_create_fed_socket error - #{inspect(reason)}")
+ {:error, reason}
+ end
+ end
+
+ @doc """
+ returns a FedSocket for the given origin. Will not create a new FedSocket if one does not exist.
+
+ address is expected to be a fully formed URL such as:
+ "http://www.example.com" or "http://www.example.com:8080"
+ """
+ def get_fed_socket(address) do
+ origin = SocketInfo.origin(address)
+
+ with {:config, true} <- {:config, Pleroma.Config.get([:fed_sockets, :enabled], false)},
+ {:ok, socket} <- FedRegistry.get_fed_socket(origin) do
+ {:ok, socket}
+ else
+ {:config, _} ->
+ {:error, :disabled}
+
+ {:error, :rejected} ->
+ Logger.debug("FedSocket previously rejected - #{inspect(origin)}")
+ {:error, :rejected}
+
+ {:error, reason} ->
+ {:error, reason}
+ end
+ end
+
+ @doc """
+ Sends the supplied data via the publish protocol.
+ It will not block waiting for a reply.
+ Returns :ok but this is not an indication of a successful transfer.
+
+ the data is expected to be JSON encoded binary data.
+ """
+ def publish(%SocketInfo{} = fed_socket, json) do
+ FedSocket.publish(fed_socket, json)
+ end
+
+ @doc """
+ Sends the supplied data via the fetch protocol.
+ It will block waiting for a reply or timeout.
+
+ Returns {:ok, object} where object is the requested object (or nil)
+ {:error, :timeout} in the event the message was not responded to
+
+ the id is expected to be the URI of an ActivityPub object.
+ """
+ def fetch(%SocketInfo{} = fed_socket, id) do
+ FedSocket.fetch(fed_socket, id)
+ end
+
+ @doc """
+ Disconnect all and restart FedSockets.
+ This is mainly used in development and testing but could be useful in production.
+ """
+ def reset do
+ FedRegistry
+ |> Process.whereis()
+ |> Process.exit(:testing)
+ end
+
+ def uri_for_origin(origin),
+ do: "ws://#{origin}/api/fedsocket/v1"
+end
diff --git a/lib/pleroma/web/fed_sockets/fetch_registry.ex b/lib/pleroma/web/fed_sockets/fetch_registry.ex
new file mode 100644
index 000000000..7897f0fc6
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fetch_registry.ex
@@ -0,0 +1,151 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FetchRegistry do
+ @moduledoc """
+ The FetchRegistry acts as a broker for fetch requests and return values.
+ This allows calling processes to block while waiting for a reply.
+ It doesn't impose it's own process instead using `Cachex` to handle fetches in process, allowing
+ multi threaded processes to avoid bottlenecking.
+
+ Normally outside modules will have no need to call or use the FetchRegistry themselves.
+
+ The `Cachex` parameters can be controlled from the config. Since exact timeout intervals
+ aren't necessary the following settings are used by default:
+
+ config :pleroma, :fed_sockets,
+ fed_socket_fetches: [
+ default: 12_000,
+ interval: 3_000,
+ lazy: false
+ ]
+
+ """
+
+ defmodule FetchRegistryData do
+ defstruct uuid: nil,
+ sent_json: nil,
+ received_json: nil,
+ sent_at: nil,
+ received_at: nil
+ end
+
+ alias Ecto.UUID
+
+ require Logger
+
+ @fetches :fed_socket_fetches
+
+ @doc """
+ Registers a json request wth the FetchRegistry and returns the identifying UUID.
+ """
+ def register_fetch(json) do
+ %FetchRegistryData{uuid: uuid} =
+ json
+ |> new_registry_data
+ |> save_registry_data
+
+ uuid
+ end
+
+ @doc """
+ Reports on the status of a Fetch given the identifying UUID.
+
+ Will return
+ * {:ok, fetched_object} if a fetch has completed
+ * {:error, :waiting} if a fetch is still pending
+ * {:error, other_error} usually :missing to indicate a fetch that has timed out
+ """
+ def check_fetch(uuid) do
+ case get_registry_data(uuid) do
+ {:ok, %FetchRegistryData{received_at: nil}} ->
+ {:error, :waiting}
+
+ {:ok, %FetchRegistryData{} = reg_data} ->
+ {:ok, reg_data}
+
+ e ->
+ e
+ end
+ end
+
+ @doc """
+ Retrieves the response to a fetch given the identifying UUID.
+ The completed fetch will be deleted from the FetchRegistry
+
+ Will return
+ * {:ok, fetched_object} if a fetch has completed
+ * {:error, :waiting} if a fetch is still pending
+ * {:error, other_error} usually :missing to indicate a fetch that has timed out
+ """
+ def pop_fetch(uuid) do
+ case check_fetch(uuid) do
+ {:ok, %FetchRegistryData{received_json: received_json}} ->
+ delete_registry_data(uuid)
+ {:ok, received_json}
+
+ e ->
+ e
+ end
+ end
+
+ @doc """
+ This is called to register a fetch has returned.
+ It expects the result data along with the UUID that was sent in the request
+
+ Will return the fetched object or :error
+ """
+ def register_fetch_received(uuid, data) do
+ case get_registry_data(uuid) do
+ {:ok, %FetchRegistryData{received_at: nil} = reg_data} ->
+ reg_data
+ |> set_fetch_received(data)
+ |> save_registry_data()
+
+ {:ok, %FetchRegistryData{} = reg_data} ->
+ Logger.warn("tried to add fetched data twice - #{uuid}")
+ reg_data
+
+ {:error, _} ->
+ Logger.warn("Error adding fetch to registry - #{uuid}")
+ :error
+ end
+ end
+
+ defp new_registry_data(json) do
+ %FetchRegistryData{
+ uuid: UUID.generate(),
+ sent_json: json,
+ sent_at: :erlang.monotonic_time(:millisecond)
+ }
+ end
+
+ defp get_registry_data(origin) do
+ case Cachex.get(@fetches, origin) do
+ {:ok, nil} ->
+ {:error, :missing}
+
+ {:ok, reg_data} ->
+ {:ok, reg_data}
+
+ _ ->
+ {:error, :cache_error}
+ end
+ end
+
+ defp set_fetch_received(%FetchRegistryData{} = reg_data, data),
+ do: %FetchRegistryData{
+ reg_data
+ | received_at: :erlang.monotonic_time(:millisecond),
+ received_json: data
+ }
+
+ defp save_registry_data(%FetchRegistryData{uuid: uuid} = reg_data) do
+ {:ok, true} = Cachex.put(@fetches, uuid, reg_data)
+ reg_data
+ end
+
+ defp delete_registry_data(origin),
+ do: {:ok, true} = Cachex.del(@fetches, origin)
+end
diff --git a/lib/pleroma/web/fed_sockets/incoming_handler.ex b/lib/pleroma/web/fed_sockets/incoming_handler.ex
new file mode 100644
index 000000000..49d0d9d84
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/incoming_handler.ex
@@ -0,0 +1,88 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.IncomingHandler do
+ require Logger
+
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ import HTTPSignatures, only: [validate_conn: 1, split_signature: 1]
+
+ @behaviour :cowboy_websocket
+
+ def init(req, state) do
+ shake = FedSocket.shake()
+
+ with true <- Pleroma.Config.get([:fed_sockets, :enabled]),
+ sec_protocol <- :cowboy_req.header("sec-websocket-protocol", req, nil),
+ headers = %{"(request-target)" => ^shake} <- :cowboy_req.headers(req),
+ true <- validate_conn(%{req_headers: headers}),
+ %{"keyId" => origin} <- split_signature(headers["signature"]) do
+ req =
+ if is_nil(sec_protocol) do
+ req
+ else
+ :cowboy_req.set_resp_header("sec-websocket-protocol", sec_protocol, req)
+ end
+
+ {:cowboy_websocket, req, %{origin: origin}, %{}}
+ else
+ _ ->
+ {:ok, req, state}
+ end
+ end
+
+ def websocket_init(%{origin: origin}) do
+ case FedRegistry.add_fed_socket(origin) do
+ {:ok, socket_info} ->
+ {:ok, socket_info}
+
+ e ->
+ Logger.error("FedSocket websocket_init failed - #{inspect(e)}")
+ {:error, inspect(e)}
+ end
+ end
+
+ # Use the ping to check if the connection should be expired
+ def websocket_handle(:ping, socket_info) do
+ if SocketInfo.expired?(socket_info) do
+ {:stop, socket_info}
+ else
+ {:ok, socket_info, :hibernate}
+ end
+ end
+
+ def websocket_handle({:text, data}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ case FedSocket.receive_package(socket_info, data) do
+ {:noreply, _} ->
+ {:ok, socket_info}
+
+ {:reply, reply} ->
+ {:reply, {:text, Jason.encode!(reply)}, socket_info}
+
+ {:error, reason} ->
+ Logger.error("incoming error - receive_package: #{inspect(reason)}")
+ {:ok, socket_info}
+ end
+ end
+
+ def websocket_info({:send, message}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ {:reply, {:text, message}, socket_info}
+ end
+
+ def websocket_info(:close, state) do
+ {:stop, state}
+ end
+
+ def websocket_info(message, state) do
+ Logger.debug("#{__MODULE__} unknown message #{inspect(message)}")
+ {:ok, state}
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/ingester_worker.ex b/lib/pleroma/web/fed_sockets/ingester_worker.ex
new file mode 100644
index 000000000..325f2a4ab
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/ingester_worker.ex
@@ -0,0 +1,33 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.IngesterWorker do
+ use Pleroma.Workers.WorkerHelper, queue: "ingestion_queue"
+ require Logger
+
+ alias Pleroma.Web.Federator
+
+ @impl Oban.Worker
+ def perform(%Job{args: %{"op" => "ingest", "object" => ingestee}}) do
+ try do
+ ingestee
+ |> Jason.decode!()
+ |> do_ingestion()
+ rescue
+ e ->
+ Logger.error("IngesterWorker error - #{inspect(e)}")
+ e
+ end
+ end
+
+ defp do_ingestion(params) do
+ case Federator.incoming_ap_doc(params) do
+ {:error, reason} ->
+ {:error, reason}
+
+ {:ok, object} ->
+ {:ok, object}
+ end
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/outgoing_handler.ex b/lib/pleroma/web/fed_sockets/outgoing_handler.ex
new file mode 100644
index 000000000..6ddef17fe
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/outgoing_handler.ex
@@ -0,0 +1,146 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.OutgoingHandler do
+ use GenServer
+
+ require Logger
+
+ alias Pleroma.Web.ActivityPub.InternalFetchActor
+ alias Pleroma.Web.FedSockets
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ def start_link(uri) do
+ GenServer.start_link(__MODULE__, %{uri: uri})
+ end
+
+ def init(%{uri: uri}) do
+ case initiate_connection(uri) do
+ {:ok, ws_origin, conn_pid} ->
+ FedRegistry.add_fed_socket(ws_origin, conn_pid)
+
+ {:error, reason} ->
+ Logger.debug("Outgoing connection failed - #{inspect(reason)}")
+ :ignore
+ end
+ end
+
+ def handle_info({:gun_ws, conn_pid, _ref, {:text, data}}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ case FedSocket.receive_package(socket_info, data) do
+ {:noreply, _} ->
+ {:noreply, socket_info}
+
+ {:reply, reply} ->
+ :gun.ws_send(conn_pid, {:text, Jason.encode!(reply)})
+ {:noreply, socket_info}
+
+ {:error, reason} ->
+ Logger.error("incoming error - receive_package: #{inspect(reason)}")
+ {:noreply, socket_info}
+ end
+ end
+
+ def handle_info(:close, state) do
+ Logger.debug("Sending close frame !!!!!!!")
+ {:close, state}
+ end
+
+ def handle_info({:gun_down, _pid, _prot, :closed, _}, state) do
+ {:stop, :normal, state}
+ end
+
+ def handle_info({:send, data}, %{conn_pid: conn_pid} = socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+ :gun.ws_send(conn_pid, {:text, data})
+ {:noreply, socket_info}
+ end
+
+ def handle_info({:gun_ws, _, _, :pong}, state) do
+ {:noreply, state, :hibernate}
+ end
+
+ def handle_info(msg, state) do
+ Logger.debug("#{__MODULE__} unhandled event #{inspect(msg)}")
+ {:noreply, state}
+ end
+
+ def terminate(reason, state) do
+ Logger.debug(
+ "#{__MODULE__} terminating outgoing connection for #{inspect(state)} for #{inspect(reason)}"
+ )
+
+ {:ok, state}
+ end
+
+ def initiate_connection(uri) do
+ ws_uri =
+ uri
+ |> SocketInfo.origin()
+ |> FedSockets.uri_for_origin()
+
+ %{host: host, port: port, path: path} = URI.parse(ws_uri)
+
+ with {:ok, conn_pid} <- :gun.open(to_charlist(host), port),
+ {:ok, _} <- :gun.await_up(conn_pid),
+ reference <- :gun.get(conn_pid, to_charlist(path)),
+ {:response, :fin, 204, _} <- :gun.await(conn_pid, reference),
+ headers <- build_headers(uri),
+ ref <- :gun.ws_upgrade(conn_pid, to_charlist(path), headers, %{silence_pings: false}) do
+ receive do
+ {:gun_upgrade, ^conn_pid, ^ref, [<<"websocket">>], _} ->
+ {:ok, ws_uri, conn_pid}
+ after
+ 15_000 ->
+ Logger.debug("Fedsocket timeout connecting to #{inspect(uri)}")
+ {:error, :timeout}
+ end
+ else
+ {:response, :nofin, 404, _} ->
+ {:error, :fedsockets_not_supported}
+
+ e ->
+ Logger.debug("Fedsocket error connecting to #{inspect(uri)}")
+ {:error, e}
+ end
+ end
+
+ defp build_headers(uri) do
+ host_for_sig = uri |> URI.parse() |> host_signature()
+
+ shake = FedSocket.shake()
+ digest = "SHA-256=" <> (:crypto.hash(:sha256, shake) |> Base.encode64())
+ date = Pleroma.Signature.signed_date()
+ shake_size = byte_size(shake)
+
+ signature_opts = %{
+ "(request-target)": shake,
+ "content-length": to_charlist("#{shake_size}"),
+ date: date,
+ digest: digest,
+ host: host_for_sig
+ }
+
+ signature = Pleroma.Signature.sign(InternalFetchActor.get_actor(), signature_opts)
+
+ [
+ {'signature', to_charlist(signature)},
+ {'date', date},
+ {'digest', to_charlist(digest)},
+ {'content-length', to_charlist("#{shake_size}")},
+ {to_charlist("(request-target)"), to_charlist(shake)}
+ ]
+ end
+
+ defp host_signature(%{host: host, scheme: scheme, port: port}) do
+ if port == URI.default_port(scheme) do
+ host
+ else
+ "#{host}:#{port}"
+ end
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/socket_info.ex b/lib/pleroma/web/fed_sockets/socket_info.ex
new file mode 100644
index 000000000..d6fdffe1a
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/socket_info.ex
@@ -0,0 +1,52 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.SocketInfo do
+ defstruct origin: nil,
+ pid: nil,
+ conn_pid: nil,
+ state: :default,
+ connected_until: nil
+
+ alias Pleroma.Web.FedSockets.SocketInfo
+ @default_connection_duration 15 * 60 * 1000
+
+ def build(uri, conn_pid \\ nil) do
+ uri
+ |> build_origin()
+ |> build_pids(conn_pid)
+ |> touch()
+ end
+
+ def touch(%SocketInfo{} = socket_info),
+ do: %{socket_info | connected_until: new_ttl()}
+
+ def connect(%SocketInfo{} = socket_info),
+ do: %{socket_info | state: :connected}
+
+ def expired?(%{connected_until: connected_until}),
+ do: connected_until < :erlang.monotonic_time(:millisecond)
+
+ def origin(uri),
+ do: build_origin(uri).origin
+
+ defp build_pids(socket_info, conn_pid),
+ do: struct(socket_info, pid: self(), conn_pid: conn_pid)
+
+ defp build_origin(uri) when is_binary(uri),
+ do: uri |> URI.parse() |> build_origin
+
+ defp build_origin(%{host: host, port: nil, scheme: scheme}),
+ do: build_origin(%{host: host, port: URI.default_port(scheme)})
+
+ defp build_origin(%{host: host, port: port}),
+ do: %SocketInfo{origin: "#{host}:#{port}"}
+
+ defp new_ttl do
+ connection_duration =
+ Pleroma.Config.get([:fed_sockets, :connection_duration], @default_connection_duration)
+
+ :erlang.monotonic_time(:millisecond) + connection_duration
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/supervisor.ex b/lib/pleroma/web/fed_sockets/supervisor.ex
new file mode 100644
index 000000000..a5f4bebfb
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/supervisor.ex
@@ -0,0 +1,59 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.Supervisor do
+ use Supervisor
+ import Cachex.Spec
+
+ def start_link(opts) do
+ Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
+ end
+
+ def init(args) do
+ children = [
+ build_cache(:fed_socket_fetches, args),
+ build_cache(:fed_socket_rejections, args),
+ {Registry, keys: :unique, name: FedSockets.Registry, meta: [rejected: %{}]}
+ ]
+
+ opts = [strategy: :one_for_all, name: Pleroma.Web.Streamer.Supervisor]
+ Supervisor.init(children, opts)
+ end
+
+ defp build_cache(name, args) do
+ opts = get_opts(name, args)
+
+ %{
+ id: String.to_atom("#{name}_cache"),
+ start: {Cachex, :start_link, [name, opts]},
+ type: :worker
+ }
+ end
+
+ defp get_opts(cache_name, args)
+ when cache_name in [:fed_socket_fetches, :fed_socket_rejections] do
+ default = get_opts_or_config(args, cache_name, :default, 15_000)
+ interval = get_opts_or_config(args, cache_name, :interval, 3_000)
+ lazy = get_opts_or_config(args, cache_name, :lazy, false)
+
+ [expiration: expiration(default: default, interval: interval, lazy: lazy)]
+ end
+
+ defp get_opts(name, args) do
+ Keyword.get(args, name, [])
+ end
+
+ defp get_opts_or_config(args, name, key, default) do
+ args
+ |> Keyword.get(name, [])
+ |> Keyword.get(key)
+ |> case do
+ nil ->
+ Pleroma.Config.get([:fed_sockets, name, key], default)
+
+ value ->
+ value
+ end
+ end
+end
diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex
index d2a30a548..121ba1693 100644
--- a/lib/pleroma/web/mastodon_api/views/account_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/account_view.ex
@@ -181,8 +181,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
user = User.sanitize_html(user, User.html_filter_policy(opts[:for]))
display_name = user.name || user.nickname
- image = User.avatar_url(user) |> MediaProxy.url()
+ avatar = User.avatar_url(user) |> MediaProxy.url()
+ avatar_static = User.avatar_url(user) |> MediaProxy.preview_url(static: true)
header = User.banner_url(user) |> MediaProxy.url()
+ header_static = User.banner_url(user) |> MediaProxy.preview_url(static: true)
following_count =
if !user.hide_follows_count or !user.hide_follows or opts[:for] == user do
@@ -247,10 +249,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
statuses_count: user.note_count,
note: user.bio,
url: user.uri || user.ap_id,
- avatar: image,
- avatar_static: image,
+ avatar: avatar,
+ avatar_static: avatar_static,
header: header,
- header_static: header,
+ header_static: header_static,
emojis: emojis,
fields: user.fields,
bot: bot,
diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex
index 94b8dc8c6..435bcde15 100644
--- a/lib/pleroma/web/mastodon_api/views/status_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/status_view.ex
@@ -415,6 +415,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
[attachment_url | _] = attachment["url"]
media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image"
href = attachment_url["href"] |> MediaProxy.url()
+ href_preview = attachment_url["href"] |> MediaProxy.preview_url()
type =
cond do
@@ -430,7 +431,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
id: to_string(attachment["id"] || hash_id),
url: href,
remote_url: href,
- preview_url: href,
+ preview_url: href_preview,
text_url: href,
type: type,
description: attachment["name"],
diff --git a/lib/pleroma/web/media_proxy/invalidation.ex b/lib/pleroma/web/media_proxy/invalidation.ex
index 5808861e6..4f4340478 100644
--- a/lib/pleroma/web/media_proxy/invalidation.ex
+++ b/lib/pleroma/web/media_proxy/invalidation.ex
@@ -33,6 +33,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation do
def prepare_urls(urls) do
urls
|> List.wrap()
- |> Enum.map(&MediaProxy.url/1)
+ |> Enum.map(fn url -> [MediaProxy.url(url), MediaProxy.preview_url(url)] end)
+ |> List.flatten()
+ |> Enum.uniq()
end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex
index e18dd8224..8656b8cad 100644
--- a/lib/pleroma/web/media_proxy/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config
+ alias Pleroma.Helpers.UriHelper
alias Pleroma.Upload
alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@@ -40,27 +41,35 @@ defmodule Pleroma.Web.MediaProxy do
def url("/" <> _ = url), do: url
def url(url) do
- if disabled?() or not url_proxiable?(url) do
- url
- else
+ if enabled?() and url_proxiable?(url) do
encode_url(url)
+ else
+ url
end
end
@spec url_proxiable?(String.t()) :: boolean()
def url_proxiable?(url) do
- if local?(url) or whitelisted?(url) do
- false
+ not local?(url) and not whitelisted?(url)
+ end
+
+ def preview_url(url, preview_params \\ []) do
+ if preview_enabled?() do
+ encode_preview_url(url, preview_params)
else
- true
+ url(url)
end
end
- defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
+ def enabled?, do: Config.get([:media_proxy, :enabled], false)
+
+ # Note: media proxy must be enabled for media preview proxy in order to load all
+ # non-local non-whitelisted URLs through it and be sure that body size constraint is preserved.
+ def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled])
- defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
+ def local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
- defp whitelisted?(url) do
+ def whitelisted?(url) do
%{host: domain} = URI.parse(url)
mediaproxy_whitelist_domains =
@@ -85,17 +94,29 @@ defmodule Pleroma.Web.MediaProxy do
defp maybe_get_domain_from_url(domain), do: domain
- def encode_url(url) do
+ defp base64_sig64(url) do
base64 = Base.url_encode64(url, @base64_opts)
sig64 =
base64
- |> signed_url
+ |> signed_url()
|> Base.url_encode64(@base64_opts)
+ {base64, sig64}
+ end
+
+ def encode_url(url) do
+ {base64, sig64} = base64_sig64(url)
+
build_url(sig64, base64, filename(url))
end
+ def encode_preview_url(url, preview_params \\ []) do
+ {base64, sig64} = base64_sig64(url)
+
+ build_preview_url(sig64, base64, filename(url), preview_params)
+ end
+
def decode_url(sig, url) do
with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
signature when signature == sig <- signed_url(url) do
@@ -113,10 +134,14 @@ defmodule Pleroma.Web.MediaProxy do
if path = URI.parse(url_or_path).path, do: Path.basename(path)
end
- def build_url(sig_base64, url_base64, filename \\ nil) do
+ def base_url do
+ Config.get([:media_proxy, :base_url], Web.base_url())
+ end
+
+ defp proxy_url(path, sig_base64, url_base64, filename) do
[
- Config.get([:media_proxy, :base_url], Web.base_url()),
- "proxy",
+ base_url(),
+ path,
sig_base64,
url_base64,
filename
@@ -124,4 +149,38 @@ defmodule Pleroma.Web.MediaProxy do
|> Enum.filter(& &1)
|> Path.join()
end
+
+ def build_url(sig_base64, url_base64, filename \\ nil) do
+ proxy_url("proxy", sig_base64, url_base64, filename)
+ end
+
+ def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do
+ uri = proxy_url("proxy/preview", sig_base64, url_base64, filename)
+
+ UriHelper.modify_uri_params(uri, preview_params)
+ end
+
+ def verify_request_path_and_url(
+ %Plug.Conn{params: %{"filename" => _}, request_path: request_path},
+ url
+ ) do
+ verify_request_path_and_url(request_path, url)
+ end
+
+ def verify_request_path_and_url(request_path, url) when is_binary(request_path) do
+ filename = filename(url)
+
+ if filename && not basename_matches?(request_path, filename) do
+ {:wrong_filename, filename}
+ else
+ :ok
+ end
+ end
+
+ def verify_request_path_and_url(_, _), do: :ok
+
+ defp basename_matches?(path, filename) do
+ basename = Path.basename(path)
+ basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename
+ end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
index 9a64b0ef3..90651ed9b 100644
--- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
@@ -5,44 +5,201 @@
defmodule Pleroma.Web.MediaProxy.MediaProxyController do
use Pleroma.Web, :controller
+ alias Pleroma.Config
+ alias Pleroma.Helpers.MediaHelper
+ alias Pleroma.Helpers.UriHelper
alias Pleroma.ReverseProxy
alias Pleroma.Web.MediaProxy
+ alias Plug.Conn
- @default_proxy_opts [max_body_length: 25 * 1_048_576, http: [follow_redirect: true]]
-
- def remote(conn, %{"sig" => sig64, "url" => url64} = params) do
- with config <- Pleroma.Config.get([:media_proxy], []),
- true <- Keyword.get(config, :enabled, false),
+ def remote(conn, %{"sig" => sig64, "url" => url64}) do
+ with {_, true} <- {:enabled, MediaProxy.enabled?()},
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_banned_urls, MediaProxy.in_banned_urls(url)},
- :ok <- filename_matches(params, conn.request_path, url) do
- ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
+ :ok <- MediaProxy.verify_request_path_and_url(conn, url) do
+ ReverseProxy.call(conn, url, media_proxy_opts())
else
- error when error in [false, {:in_banned_urls, true}] ->
- send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
+ {:enabled, false} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
+
+ {:in_banned_urls, true} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
{:error, :invalid_signature} ->
- send_resp(conn, 403, Plug.Conn.Status.reason_phrase(403))
+ send_resp(conn, 403, Conn.Status.reason_phrase(403))
{:wrong_filename, filename} ->
redirect(conn, external: MediaProxy.build_url(sig64, url64, filename))
end
end
- def filename_matches(%{"filename" => _} = _, path, url) do
- filename = MediaProxy.filename(url)
+ def preview(%Conn{} = conn, %{"sig" => sig64, "url" => url64}) do
+ with {_, true} <- {:enabled, MediaProxy.preview_enabled?()},
+ {:ok, url} <- MediaProxy.decode_url(sig64, url64),
+ :ok <- MediaProxy.verify_request_path_and_url(conn, url) do
+ handle_preview(conn, url)
+ else
+ {:enabled, false} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
+
+ {:error, :invalid_signature} ->
+ send_resp(conn, 403, Conn.Status.reason_phrase(403))
+
+ {:wrong_filename, filename} ->
+ redirect(conn, external: MediaProxy.build_preview_url(sig64, url64, filename))
+ end
+ end
+
+ defp handle_preview(conn, url) do
+ media_proxy_url = MediaProxy.url(url)
+
+ with {:ok, %{status: status} = head_response} when status in 200..299 <-
+ Pleroma.HTTP.request("head", media_proxy_url, [], [], pool: :media) do
+ content_type = Tesla.get_header(head_response, "content-type")
+ content_length = Tesla.get_header(head_response, "content-length")
+ content_length = content_length && String.to_integer(content_length)
+ static = conn.params["static"] in ["true", true]
+
+ cond do
+ static and content_type == "image/gif" ->
+ handle_jpeg_preview(conn, media_proxy_url)
+
+ static ->
+ drop_static_param_and_redirect(conn)
+
+ content_type == "image/gif" ->
+ redirect(conn, external: media_proxy_url)
+
+ min_content_length_for_preview() > 0 and content_length > 0 and
+ content_length < min_content_length_for_preview() ->
+ redirect(conn, external: media_proxy_url)
+
+ true ->
+ handle_preview(content_type, conn, media_proxy_url)
+ end
+ else
+ # If HEAD failed, redirecting to media proxy URI doesn't make much sense; returning an error
+ {_, %{status: status}} ->
+ send_resp(conn, :failed_dependency, "Can't fetch HTTP headers (HTTP #{status}).")
+
+ {:error, :recv_response_timeout} ->
+ send_resp(conn, :failed_dependency, "HEAD request timeout.")
+
+ _ ->
+ send_resp(conn, :failed_dependency, "Can't fetch HTTP headers.")
+ end
+ end
+
+ defp handle_preview("image/png" <> _ = _content_type, conn, media_proxy_url) do
+ handle_png_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview("image/" <> _ = _content_type, conn, media_proxy_url) do
+ handle_jpeg_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview("video/" <> _ = _content_type, conn, media_proxy_url) do
+ handle_video_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview(_unsupported_content_type, conn, media_proxy_url) do
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+
+ defp handle_png_preview(conn, media_proxy_url) do
+ quality = Config.get!([:media_preview_proxy, :image_quality])
+ {thumbnail_max_width, thumbnail_max_height} = thumbnail_max_dimensions()
+
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.image_resize(
+ media_proxy_url,
+ %{
+ max_width: thumbnail_max_width,
+ max_height: thumbnail_max_height,
+ quality: quality,
+ format: "png"
+ }
+ ) do
+ conn
+ |> put_preview_response_headers(["image/png", "preview.png"])
+ |> send_resp(200, thumbnail_binary)
+ else
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+ end
+
+ defp handle_jpeg_preview(conn, media_proxy_url) do
+ quality = Config.get!([:media_preview_proxy, :image_quality])
+ {thumbnail_max_width, thumbnail_max_height} = thumbnail_max_dimensions()
- if filename && does_not_match(path, filename) do
- {:wrong_filename, filename}
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.image_resize(
+ media_proxy_url,
+ %{max_width: thumbnail_max_width, max_height: thumbnail_max_height, quality: quality}
+ ) do
+ conn
+ |> put_preview_response_headers()
+ |> send_resp(200, thumbnail_binary)
else
- :ok
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
end
end
- def filename_matches(_, _, _), do: :ok
+ defp handle_video_preview(conn, media_proxy_url) do
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.video_framegrab(media_proxy_url) do
+ conn
+ |> put_preview_response_headers()
+ |> send_resp(200, thumbnail_binary)
+ else
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+ end
+
+ defp drop_static_param_and_redirect(conn) do
+ uri_without_static_param =
+ conn
+ |> current_url()
+ |> UriHelper.modify_uri_params(%{}, ["static"])
+
+ redirect(conn, external: uri_without_static_param)
+ end
+
+ defp fallback_on_preview_error(conn, media_proxy_url) do
+ redirect(conn, external: media_proxy_url)
+ end
+
+ defp put_preview_response_headers(
+ conn,
+ [content_type, filename] = _content_info \\ ["image/jpeg", "preview.jpg"]
+ ) do
+ conn
+ |> put_resp_header("content-type", content_type)
+ |> put_resp_header("content-disposition", "inline; filename=\"#{filename}\"")
+ |> put_resp_header("cache-control", ReverseProxy.default_cache_control_header())
+ end
+
+ defp thumbnail_max_dimensions do
+ config = media_preview_proxy_config()
+
+ thumbnail_max_width = Keyword.fetch!(config, :thumbnail_max_width)
+ thumbnail_max_height = Keyword.fetch!(config, :thumbnail_max_height)
+
+ {thumbnail_max_width, thumbnail_max_height}
+ end
+
+ defp min_content_length_for_preview do
+ Keyword.get(media_preview_proxy_config(), :min_content_length, 0)
+ end
+
+ defp media_preview_proxy_config do
+ Config.get!([:media_preview_proxy])
+ end
- defp does_not_match(path, filename) do
- basename = Path.basename(path)
- basename != filename and URI.decode(basename) != filename and URI.encode(basename) != filename
+ defp media_proxy_opts do
+ Config.get([:media_proxy, :proxy_opts], [])
end
end
diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex
index 2f0dfb474..8a206e019 100644
--- a/lib/pleroma/web/metadata/utils.ex
+++ b/lib/pleroma/web/metadata/utils.ex
@@ -38,7 +38,7 @@ defmodule Pleroma.Web.Metadata.Utils do
def scrub_html(content), do: content
def attachment_url(url) do
- MediaProxy.url(url)
+ MediaProxy.preview_url(url)
end
def user_name_string(user) do
diff --git a/lib/pleroma/web/oauth/oauth_controller.ex b/lib/pleroma/web/oauth/oauth_controller.ex
index 26e68be42..a4152e840 100644
--- a/lib/pleroma/web/oauth/oauth_controller.ex
+++ b/lib/pleroma/web/oauth/oauth_controller.ex
@@ -119,7 +119,7 @@ defmodule Pleroma.Web.OAuth.OAuthController do
redirect_uri = redirect_uri(conn, redirect_uri)
url_params = %{access_token: token.token}
url_params = Maps.put_if_present(url_params, :state, params["state"])
- url = UriHelper.append_uri_params(redirect_uri, url_params)
+ url = UriHelper.modify_uri_params(redirect_uri, url_params)
redirect(conn, external: url)
else
conn
@@ -161,7 +161,7 @@ defmodule Pleroma.Web.OAuth.OAuthController do
redirect_uri = redirect_uri(conn, redirect_uri)
url_params = %{code: auth.token}
url_params = Maps.put_if_present(url_params, :state, auth_attrs["state"])
- url = UriHelper.append_uri_params(redirect_uri, url_params)
+ url = UriHelper.modify_uri_params(redirect_uri, url_params)
redirect(conn, external: url)
else
conn
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index cd336b932..f924e1e91 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -680,6 +680,8 @@ defmodule Pleroma.Web.Router do
end
scope "/proxy/", Pleroma.Web.MediaProxy do
+ get("/preview/:sig/:url", MediaProxyController, :preview)
+ get("/preview/:sig/:url/:filename", MediaProxyController, :preview)
get("/:sig/:url", MediaProxyController, :remote)
get("/:sig/:url/:filename", MediaProxyController, :remote)
end
diff --git a/mix.lock b/mix.lock
index a28c47017..adb3f024a 100644
--- a/mix.lock
+++ b/mix.lock
@@ -31,6 +31,7 @@
"ecto": {:hex, :ecto, "3.4.5", "2bcd262f57b2c888b0bd7f7a28c8a48aa11dc1a2c6a858e45dd8f8426d504265", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "8c6d1d4d524559e9b7a062f0498e2c206122552d63eacff0a6567ffe7a8e8691"},
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
"ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"},
+ "eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"},
"elixir_make": {:hex, :elixir_make, "0.6.0", "38349f3e29aff4864352084fc736fa7fa0f2995a819a737554f7ebd28b85aaab", [:mix], [], "hexpm", "d522695b93b7f0b4c0fcb2dfe73a6b905b1c301226a5a55cb42e5b14d509e050"},
"esshd": {:hex, :esshd, "0.1.1", "d4dd4c46698093a40a56afecce8a46e246eb35463c457c246dacba2e056f31b5", [:mix], [], "hexpm", "d73e341e3009d390aa36387dc8862860bf9f874c94d9fd92ade2926376f49981"},
"eternal": {:hex, :eternal, "1.2.1", "d5b6b2499ba876c57be2581b5b999ee9bdf861c647401066d3eeed111d096bc4", [:mix], [], "hexpm", "b14f1dc204321429479c569cfbe8fb287541184ed040956c8862cb7a677b8406"},
@@ -80,6 +81,7 @@
"nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]},
"oban": {:hex, :oban, "2.0.0", "e6ce70d94dd46815ec0882a1ffb7356df9a9d5b8a40a64ce5c2536617a447379", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cf574813bd048b98a698aa587c21367d2e06842d4e1b1993dcd6a696e9e633bd"},
"open_api_spex": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", "f296ac0924ba3cf79c7a588c4c252889df4c2edd", [ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"]},
+ "p1_utils": {:hex, :p1_utils, "1.0.18", "3fe224de5b2e190d730a3c5da9d6e8540c96484cf4b4692921d1e28f0c32b01c", [:rebar3], [], "hexpm", "1fc8773a71a15553b179c986b22fbeead19b28fe486c332d4929700ffeb71f88"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm", "17ef63abde837ad30680ea7f857dd9e7ced9476cdd7b0394432af4bfc241b960"},
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "1.2.1", "9cbe354b58121075bd20eb83076900a3832324b7dd171a6895fab57b6bb2752c", [:mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}], "hexpm", "d3b40a4a4630f0b442f19eca891fcfeeee4c40871936fed2f68e1c4faa30481f"},
"phoenix": {:hex, :phoenix, "1.4.17", "1b1bd4cff7cfc87c94deaa7d60dd8c22e04368ab95499483c50640ef3bd838d8", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a8e5d7a3d76d452bb5fb86e8b7bd115f737e4f8efe202a463d4aeb4a5809611"},
@@ -118,5 +120,5 @@
"unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm", "1d1848c40487cdb0b30e8ed975e34e025860c02e419cb615d255849f3427439d"},
"unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"},
"web_push_encryption": {:hex, :web_push_encryption, "0.3.0", "598b5135e696fd1404dc8d0d7c0fa2c027244a4e5d5e5a98ba267f14fdeaabc8", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.8", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "f10bdd1afe527ede694749fb77a2f22f146a51b054c7fa541c9fd920fba7c875"},
- "websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []},
+ "websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []}
}
diff --git a/test/fixtures/image.gif b/test/fixtures/image.gif
new file mode 100755
index 000000000..9df64778b
--- /dev/null
+++ b/test/fixtures/image.gif
Binary files differ
diff --git a/test/fixtures/image.png b/test/fixtures/image.png
new file mode 100755
index 000000000..e999e8800
--- /dev/null
+++ b/test/fixtures/image.png
Binary files differ
diff --git a/test/web/activity_pub/mrf/mediaproxy_warming_policy_test.exs b/test/web/activity_pub/mrf/mediaproxy_warming_policy_test.exs
index 313d59a66..1710c4d2a 100644
--- a/test/web/activity_pub/mrf/mediaproxy_warming_policy_test.exs
+++ b/test/web/activity_pub/mrf/mediaproxy_warming_policy_test.exs
@@ -22,6 +22,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
}
}
+ setup do: clear_config([:media_proxy, :enabled], true)
+
test "it prefetches media proxy URIs" do
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
MediaProxyWarmingPolicy.filter(@message)
diff --git a/test/web/fed_sockets/fed_registry_test.exs b/test/web/fed_sockets/fed_registry_test.exs
new file mode 100644
index 000000000..19ac874d6
--- /dev/null
+++ b/test/web/fed_sockets/fed_registry_test.exs
@@ -0,0 +1,124 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FedRegistryTest do
+ use ExUnit.Case
+
+ alias Pleroma.Web.FedSockets
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ @good_domain "http://good.domain"
+ @good_domain_origin "good.domain:80"
+
+ setup do
+ start_supervised({Pleroma.Web.FedSockets.Supervisor, []})
+ build_test_socket(@good_domain)
+ Process.sleep(10)
+
+ :ok
+ end
+
+ describe "add_fed_socket/1 without conflicting sockets" do
+ test "can be added" do
+ Process.sleep(10)
+ assert {:ok, %SocketInfo{origin: origin}} = FedRegistry.get_fed_socket(@good_domain_origin)
+ assert origin == "good.domain:80"
+ end
+
+ test "multiple origins can be added" do
+ build_test_socket("http://anothergood.domain")
+ Process.sleep(10)
+
+ assert {:ok, %SocketInfo{origin: origin_1}} =
+ FedRegistry.get_fed_socket(@good_domain_origin)
+
+ assert {:ok, %SocketInfo{origin: origin_2}} =
+ FedRegistry.get_fed_socket("anothergood.domain:80")
+
+ assert origin_1 == "good.domain:80"
+ assert origin_2 == "anothergood.domain:80"
+ assert FedRegistry.list_all() |> Enum.count() == 2
+ end
+ end
+
+ describe "add_fed_socket/1 when duplicate sockets conflict" do
+ setup do
+ build_test_socket(@good_domain)
+ build_test_socket(@good_domain)
+ Process.sleep(10)
+ :ok
+ end
+
+ test "will be ignored" do
+ assert {:ok, %SocketInfo{origin: origin, pid: pid_one}} =
+ FedRegistry.get_fed_socket(@good_domain_origin)
+
+ assert origin == "good.domain:80"
+
+ assert FedRegistry.list_all() |> Enum.count() == 1
+ end
+
+ test "the newer process will be closed" do
+ pid_two = build_test_socket(@good_domain)
+
+ assert {:ok, %SocketInfo{origin: origin, pid: pid_one}} =
+ FedRegistry.get_fed_socket(@good_domain_origin)
+
+ assert origin == "good.domain:80"
+ Process.sleep(10)
+
+ refute Process.alive?(pid_two)
+
+ assert FedRegistry.list_all() |> Enum.count() == 1
+ end
+ end
+
+ describe "get_fed_socket/1" do
+ test "returns missing for unknown hosts" do
+ assert {:error, :missing} = FedRegistry.get_fed_socket("not_a_dmoain")
+ end
+
+ test "returns rejected for hosts previously rejected" do
+ "rejected.domain:80"
+ |> FedSockets.uri_for_origin()
+ |> FedRegistry.set_host_rejected()
+
+ assert {:error, :rejected} = FedRegistry.get_fed_socket("rejected.domain:80")
+ end
+
+ test "can retrieve a previously added SocketInfo" do
+ build_test_socket(@good_domain)
+ Process.sleep(10)
+ assert {:ok, %SocketInfo{origin: origin}} = FedRegistry.get_fed_socket(@good_domain_origin)
+ assert origin == "good.domain:80"
+ end
+
+ test "removes references to SocketInfos when the process crashes" do
+ assert {:ok, %SocketInfo{origin: origin, pid: pid}} =
+ FedRegistry.get_fed_socket(@good_domain_origin)
+
+ assert origin == "good.domain:80"
+
+ Process.exit(pid, :testing)
+ Process.sleep(100)
+ assert {:error, :missing} = FedRegistry.get_fed_socket(@good_domain_origin)
+ end
+ end
+
+ def build_test_socket(uri) do
+ Kernel.spawn(fn -> fed_socket_almost(uri) end)
+ end
+
+ def fed_socket_almost(origin) do
+ FedRegistry.add_fed_socket(origin)
+
+ receive do
+ :close ->
+ :ok
+ after
+ 5_000 -> :timeout
+ end
+ end
+end
diff --git a/test/web/fed_sockets/fetch_registry_test.exs b/test/web/fed_sockets/fetch_registry_test.exs
new file mode 100644
index 000000000..7bd2d995a
--- /dev/null
+++ b/test/web/fed_sockets/fetch_registry_test.exs
@@ -0,0 +1,67 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FetchRegistryTest do
+ use ExUnit.Case
+
+ alias Pleroma.Web.FedSockets.FetchRegistry
+ alias Pleroma.Web.FedSockets.FetchRegistry.FetchRegistryData
+
+ @json_message "hello"
+ @json_reply "hello back"
+
+ setup do
+ start_supervised(
+ {Pleroma.Web.FedSockets.Supervisor,
+ [
+ ping_interval: 8,
+ connection_duration: 15,
+ rejection_duration: 5,
+ fed_socket_fetches: [default: 10, interval: 10]
+ ]}
+ )
+
+ :ok
+ end
+
+ test "fetches can be stored" do
+ uuid = FetchRegistry.register_fetch(@json_message)
+
+ assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
+ end
+
+ test "fetches can return" do
+ uuid = FetchRegistry.register_fetch(@json_message)
+ task = Task.async(fn -> FetchRegistry.register_fetch_received(uuid, @json_reply) end)
+
+ assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
+ Task.await(task)
+
+ assert {:ok, %FetchRegistryData{received_json: received_json}} =
+ FetchRegistry.check_fetch(uuid)
+
+ assert received_json == @json_reply
+ end
+
+ test "fetches are deleted once popped from stack" do
+ uuid = FetchRegistry.register_fetch(@json_message)
+ task = Task.async(fn -> FetchRegistry.register_fetch_received(uuid, @json_reply) end)
+ Task.await(task)
+
+ assert {:ok, %FetchRegistryData{received_json: received_json}} =
+ FetchRegistry.check_fetch(uuid)
+
+ assert received_json == @json_reply
+ assert {:ok, @json_reply} = FetchRegistry.pop_fetch(uuid)
+
+ assert {:error, :missing} = FetchRegistry.check_fetch(uuid)
+ end
+
+ test "fetches can time out" do
+ uuid = FetchRegistry.register_fetch(@json_message)
+ assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
+ Process.sleep(500)
+ assert {:error, :missing} = FetchRegistry.check_fetch(uuid)
+ end
+end
diff --git a/test/web/fed_sockets/socket_info_test.exs b/test/web/fed_sockets/socket_info_test.exs
new file mode 100644
index 000000000..db3d6edcd
--- /dev/null
+++ b/test/web/fed_sockets/socket_info_test.exs
@@ -0,0 +1,118 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.SocketInfoTest do
+ use ExUnit.Case
+
+ alias Pleroma.Web.FedSockets
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ describe "uri_for_origin" do
+ test "provides the fed_socket URL given the origin information" do
+ endpoint = "example.com:4000"
+ assert FedSockets.uri_for_origin(endpoint) =~ "ws://"
+ assert FedSockets.uri_for_origin(endpoint) =~ endpoint
+ end
+ end
+
+ describe "origin" do
+ test "will provide the origin field given a url" do
+ endpoint = "example.com:4000"
+ assert SocketInfo.origin("ws://#{endpoint}") == endpoint
+ assert SocketInfo.origin("http://#{endpoint}") == endpoint
+ assert SocketInfo.origin("https://#{endpoint}") == endpoint
+ end
+
+ test "will proide the origin field given a uri" do
+ endpoint = "example.com:4000"
+ uri = URI.parse("http://#{endpoint}")
+
+ assert SocketInfo.origin(uri) == endpoint
+ end
+ end
+
+ describe "touch" do
+ test "will update the TTL" do
+ endpoint = "example.com:4000"
+ socket = SocketInfo.build("ws://#{endpoint}")
+ Process.sleep(2)
+ touched_socket = SocketInfo.touch(socket)
+
+ assert socket.connected_until < touched_socket.connected_until
+ end
+ end
+
+ describe "expired?" do
+ setup do
+ start_supervised(
+ {Pleroma.Web.FedSockets.Supervisor,
+ [
+ ping_interval: 8,
+ connection_duration: 5,
+ rejection_duration: 5,
+ fed_socket_rejections: [lazy: true]
+ ]}
+ )
+
+ :ok
+ end
+
+ test "tests if the TTL is exceeded" do
+ endpoint = "example.com:4000"
+ socket = SocketInfo.build("ws://#{endpoint}")
+ refute SocketInfo.expired?(socket)
+ Process.sleep(10)
+
+ assert SocketInfo.expired?(socket)
+ end
+ end
+
+ describe "creating outgoing connection records" do
+ test "can be passed a string" do
+ assert %{conn_pid: :pid, origin: _origin} = SocketInfo.build("example.com:4000", :pid)
+ end
+
+ test "can be passed a URI" do
+ uri = URI.parse("http://example.com:4000")
+ assert %{conn_pid: :pid, origin: origin} = SocketInfo.build(uri, :pid)
+ assert origin =~ "example.com:4000"
+ end
+
+ test "will include the port number" do
+ assert %{conn_pid: :pid, origin: origin} = SocketInfo.build("http://example.com:4000", :pid)
+
+ assert origin =~ ":4000"
+ end
+
+ test "will provide the port if missing" do
+ assert %{conn_pid: :pid, origin: "example.com:80"} =
+ SocketInfo.build("http://example.com", :pid)
+
+ assert %{conn_pid: :pid, origin: "example.com:443"} =
+ SocketInfo.build("https://example.com", :pid)
+ end
+ end
+
+ describe "creating incoming connection records" do
+ test "can be passed a string" do
+ assert %{pid: _, origin: _origin} = SocketInfo.build("example.com:4000")
+ end
+
+ test "can be passed a URI" do
+ uri = URI.parse("example.com:4000")
+ assert %{pid: _, origin: _origin} = SocketInfo.build(uri)
+ end
+
+ test "will include the port number" do
+ assert %{pid: _, origin: origin} = SocketInfo.build("http://example.com:4000")
+
+ assert origin =~ ":4000"
+ end
+
+ test "will provide the port if missing" do
+ assert %{pid: _, origin: "example.com:80"} = SocketInfo.build("http://example.com")
+ assert %{pid: _, origin: "example.com:443"} = SocketInfo.build("https://example.com")
+ end
+ end
+end
diff --git a/test/web/mastodon_api/views/account_view_test.exs b/test/web/mastodon_api/views/account_view_test.exs
index a54b765ef..a5f39b215 100644
--- a/test/web/mastodon_api/views/account_view_test.exs
+++ b/test/web/mastodon_api/views/account_view_test.exs
@@ -5,6 +5,7 @@
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
use Pleroma.DataCase
+ alias Pleroma.Config
alias Pleroma.User
alias Pleroma.UserRelationship
alias Pleroma.Web.CommonAPI
@@ -540,8 +541,9 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
end
end
- test "uses mediaproxy urls when it's enabled" do
+ test "uses mediaproxy urls when it's enabled (regardless of media preview proxy state)" do
clear_config([:media_proxy, :enabled], true)
+ clear_config([:media_preview_proxy, :enabled])
user =
insert(:user,
@@ -550,20 +552,24 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
emoji: %{"joker_smile" => "https://evil.website/society.png"}
)
- AccountView.render("show.json", %{user: user, skip_visibility_check: true})
- |> Enum.all?(fn
- {key, url} when key in [:avatar, :avatar_static, :header, :header_static] ->
- String.starts_with?(url, Pleroma.Web.base_url())
-
- {:emojis, emojis} ->
- Enum.all?(emojis, fn %{url: url, static_url: static_url} ->
- String.starts_with?(url, Pleroma.Web.base_url()) &&
- String.starts_with?(static_url, Pleroma.Web.base_url())
- end)
-
- _ ->
- true
- end)
- |> assert()
+ with media_preview_enabled <- [false, true] do
+ Config.put([:media_preview_proxy, :enabled], media_preview_enabled)
+
+ AccountView.render("show.json", %{user: user, skip_visibility_check: true})
+ |> Enum.all?(fn
+ {key, url} when key in [:avatar, :avatar_static, :header, :header_static] ->
+ String.starts_with?(url, Pleroma.Web.base_url())
+
+ {:emojis, emojis} ->
+ Enum.all?(emojis, fn %{url: url, static_url: static_url} ->
+ String.starts_with?(url, Pleroma.Web.base_url()) &&
+ String.starts_with?(static_url, Pleroma.Web.base_url())
+ end)
+
+ _ ->
+ true
+ end)
+ |> assert()
+ end
end
end
diff --git a/test/web/media_proxy/media_proxy_controller_test.exs b/test/web/media_proxy/media_proxy_controller_test.exs
index d4db44c63..33e6873f7 100644
--- a/test/web/media_proxy/media_proxy_controller_test.exs
+++ b/test/web/media_proxy/media_proxy_controller_test.exs
@@ -8,34 +8,34 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
import Mock
alias Pleroma.Web.MediaProxy
- alias Pleroma.Web.MediaProxy.MediaProxyController
alias Plug.Conn
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
- test "it returns 404 when MediaProxy disabled", %{conn: conn} do
- clear_config([:media_proxy, :enabled], false)
-
- assert %Conn{
- status: 404,
- resp_body: "Not Found"
- } = get(conn, "/proxy/hhgfh/eeeee")
-
- assert %Conn{
- status: 404,
- resp_body: "Not Found"
- } = get(conn, "/proxy/hhgfh/eeee/fff")
- end
-
- describe "" do
+ describe "Media Proxy" do
setup do
clear_config([:media_proxy, :enabled], true)
clear_config([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
+
[url: MediaProxy.encode_url("https://google.fn/test.png")]
end
+ test "it returns 404 when disabled", %{conn: conn} do
+ clear_config([:media_proxy, :enabled], false)
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/hhgfh/eeeee")
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/hhgfh/eeee/fff")
+ end
+
test "it returns 403 for invalid signature", %{conn: conn, url: url} do
Pleroma.Config.put([Pleroma.Web.Endpoint, :secret_key_base], "000")
%{path: path} = URI.parse(url)
@@ -56,7 +56,7 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
} = get(conn, "/proxy/hhgfh/eeee/fff")
end
- test "redirects on valid url when filename is invalidated", %{conn: conn, url: url} do
+ test "redirects to valid url when filename is invalidated", %{conn: conn, url: url} do
invalid_url = String.replace(url, "test.png", "test-file.png")
response = get(conn, invalid_url)
assert response.status == 302
@@ -80,42 +80,248 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
end
end
- describe "filename_matches/3" do
- test "preserves the encoded or decoded path" do
- assert MediaProxyController.filename_matches(
- %{"filename" => "/Hello world.jpg"},
- "/Hello world.jpg",
- "http://pleroma.social/Hello world.jpg"
- ) == :ok
-
- assert MediaProxyController.filename_matches(
- %{"filename" => "/Hello%20world.jpg"},
- "/Hello%20world.jpg",
- "http://pleroma.social/Hello%20world.jpg"
- ) == :ok
-
- assert MediaProxyController.filename_matches(
- %{"filename" => "/my%2Flong%2Furl%2F2019%2F07%2FS.jpg"},
- "/my%2Flong%2Furl%2F2019%2F07%2FS.jpg",
- "http://pleroma.social/my%2Flong%2Furl%2F2019%2F07%2FS.jpg"
- ) == :ok
-
- assert MediaProxyController.filename_matches(
- %{"filename" => "/my%2Flong%2Furl%2F2019%2F07%2FS.jp"},
- "/my%2Flong%2Furl%2F2019%2F07%2FS.jp",
- "http://pleroma.social/my%2Flong%2Furl%2F2019%2F07%2FS.jpg"
- ) == {:wrong_filename, "my%2Flong%2Furl%2F2019%2F07%2FS.jpg"}
- end
-
- test "encoded url are tried to match for proxy as `conn.request_path` encodes the url" do
- # conn.request_path will return encoded url
- request_path = "/ANALYSE-DAI-_-LE-STABLECOIN-100-D%C3%89CENTRALIS%C3%89-BQ.jpg"
-
- assert MediaProxyController.filename_matches(
- true,
- request_path,
- "https://mydomain.com/uploads/2019/07/ANALYSE-DAI-_-LE-STABLECOIN-100-DÉCENTRALISÉ-BQ.jpg"
- ) == :ok
+ describe "Media Preview Proxy" do
+ setup do
+ clear_config([:media_proxy, :enabled], true)
+ clear_config([:media_preview_proxy, :enabled], true)
+ clear_config([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
+
+ original_url = "https://google.fn/test.png"
+
+ [
+ url: MediaProxy.encode_preview_url(original_url),
+ media_proxy_url: MediaProxy.encode_url(original_url)
+ ]
+ end
+
+ test "returns 404 when media proxy is disabled", %{conn: conn} do
+ clear_config([:media_proxy, :enabled], false)
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/preview/hhgfh/eeeee")
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/preview/hhgfh/fff")
+ end
+
+ test "returns 404 when disabled", %{conn: conn} do
+ clear_config([:media_preview_proxy, :enabled], false)
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/preview/hhgfh/eeeee")
+
+ assert %Conn{
+ status: 404,
+ resp_body: "Not Found"
+ } = get(conn, "/proxy/preview/hhgfh/fff")
+ end
+
+ test "it returns 403 for invalid signature", %{conn: conn, url: url} do
+ Pleroma.Config.put([Pleroma.Web.Endpoint, :secret_key_base], "000")
+ %{path: path} = URI.parse(url)
+
+ assert %Conn{
+ status: 403,
+ resp_body: "Forbidden"
+ } = get(conn, path)
+
+ assert %Conn{
+ status: 403,
+ resp_body: "Forbidden"
+ } = get(conn, "/proxy/preview/hhgfh/eeee")
+
+ assert %Conn{
+ status: 403,
+ resp_body: "Forbidden"
+ } = get(conn, "/proxy/preview/hhgfh/eeee/fff")
+ end
+
+ test "redirects to valid url when filename is invalidated", %{conn: conn, url: url} do
+ invalid_url = String.replace(url, "test.png", "test-file.png")
+ response = get(conn, invalid_url)
+ assert response.status == 302
+ assert redirected_to(response) == url
+ end
+
+ test "responds with 424 Failed Dependency if HEAD request to media proxy fails", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 500, body: ""}
+ end)
+
+ response = get(conn, url)
+ assert response.status == 424
+ assert response.resp_body == "Can't fetch HTTP headers (HTTP 500)."
+ end
+
+ test "redirects to media proxy URI on unsupported content type", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "application/pdf"}]}
+ end)
+
+ response = get(conn, url)
+ assert response.status == 302
+ assert redirected_to(response) == media_proxy_url
+ end
+
+ test "with `static=true` and GIF image preview requested, responds with JPEG image", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ # Setting a high :min_content_length to ensure this scenario is not affected by its logic
+ clear_config([:media_preview_proxy, :min_content_length], 1_000_000_000)
+
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{
+ status: 200,
+ body: "",
+ headers: [{"content-type", "image/gif"}, {"content-length", "1001718"}]
+ }
+
+ %{method: :get, url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.gif")}
+ end)
+
+ response = get(conn, url <> "?static=true")
+
+ assert response.status == 200
+ assert Conn.get_resp_header(response, "content-type") == ["image/jpeg"]
+ assert response.resp_body != ""
+ end
+
+ test "with GIF image preview requested and no `static` param, redirects to media proxy URI",
+ %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/gif"}]}
+ end)
+
+ response = get(conn, url)
+
+ assert response.status == 302
+ assert redirected_to(response) == media_proxy_url
+ end
+
+ test "with `static` param and non-GIF image preview requested, " <>
+ "redirects to media preview proxy URI without `static` param",
+ %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/jpeg"}]}
+ end)
+
+ response = get(conn, url <> "?static=true")
+
+ assert response.status == 302
+ assert redirected_to(response) == url
+ end
+
+ test "with :min_content_length setting not matched by Content-Length header, " <>
+ "redirects to media proxy URI",
+ %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ clear_config([:media_preview_proxy, :min_content_length], 100_000)
+
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{
+ status: 200,
+ body: "",
+ headers: [{"content-type", "image/gif"}, {"content-length", "5000"}]
+ }
+ end)
+
+ response = get(conn, url)
+
+ assert response.status == 302
+ assert redirected_to(response) == media_proxy_url
+ end
+
+ test "thumbnails PNG images into PNG", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/png"}]}
+
+ %{method: :get, url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.png")}
+ end)
+
+ response = get(conn, url)
+
+ assert response.status == 200
+ assert Conn.get_resp_header(response, "content-type") == ["image/png"]
+ assert response.resp_body != ""
+ end
+
+ test "thumbnails JPEG images into JPEG", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/jpeg"}]}
+
+ %{method: :get, url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
+ end)
+
+ response = get(conn, url)
+
+ assert response.status == 200
+ assert Conn.get_resp_header(response, "content-type") == ["image/jpeg"]
+ assert response.resp_body != ""
+ end
+
+ test "redirects to media proxy URI in case of thumbnailing error", %{
+ conn: conn,
+ url: url,
+ media_proxy_url: media_proxy_url
+ } do
+ Tesla.Mock.mock(fn
+ %{method: "head", url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "", headers: [{"content-type", "image/jpeg"}]}
+
+ %{method: :get, url: ^media_proxy_url} ->
+ %Tesla.Env{status: 200, body: "<html><body>error</body></html>"}
+ end)
+
+ response = get(conn, url)
+
+ assert response.status == 302
+ assert redirected_to(response) == media_proxy_url
end
end
end
diff --git a/test/web/media_proxy/media_proxy_test.exs b/test/web/media_proxy/media_proxy_test.exs
index 72885cfdd..0e6df826c 100644
--- a/test/web/media_proxy/media_proxy_test.exs
+++ b/test/web/media_proxy/media_proxy_test.exs
@@ -6,9 +6,16 @@ defmodule Pleroma.Web.MediaProxyTest do
use ExUnit.Case
use Pleroma.Tests.Helpers
+ alias Pleroma.Config
alias Pleroma.Web.Endpoint
alias Pleroma.Web.MediaProxy
+ defp decode_result(encoded) do
+ [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/")
+ {:ok, decoded} = MediaProxy.decode_url(sig, base64)
+ decoded
+ end
+
describe "when enabled" do
setup do: clear_config([:media_proxy, :enabled], true)
@@ -35,7 +42,7 @@ defmodule Pleroma.Web.MediaProxyTest do
assert String.starts_with?(
encoded,
- Pleroma.Config.get([:media_proxy, :base_url], Pleroma.Web.base_url())
+ Config.get([:media_proxy, :base_url], Pleroma.Web.base_url())
)
assert String.ends_with?(encoded, "/logo.png")
@@ -75,6 +82,64 @@ defmodule Pleroma.Web.MediaProxyTest do
assert MediaProxy.decode_url(sig, base64) == {:error, :invalid_signature}
end
+ def test_verify_request_path_and_url(request_path, url, expected_result) do
+ assert MediaProxy.verify_request_path_and_url(request_path, url) == expected_result
+
+ assert MediaProxy.verify_request_path_and_url(
+ %Plug.Conn{
+ params: %{"filename" => Path.basename(request_path)},
+ request_path: request_path
+ },
+ url
+ ) == expected_result
+ end
+
+ test "if first arg of `verify_request_path_and_url/2` is a Plug.Conn without \"filename\" " <>
+ "parameter, `verify_request_path_and_url/2` returns :ok " do
+ assert MediaProxy.verify_request_path_and_url(
+ %Plug.Conn{params: %{}, request_path: "/some/path"},
+ "https://instance.com/file.jpg"
+ ) == :ok
+
+ assert MediaProxy.verify_request_path_and_url(
+ %Plug.Conn{params: %{}, request_path: "/path/to/file.jpg"},
+ "https://instance.com/file.jpg"
+ ) == :ok
+ end
+
+ test "`verify_request_path_and_url/2` preserves the encoded or decoded path" do
+ test_verify_request_path_and_url(
+ "/Hello world.jpg",
+ "http://pleroma.social/Hello world.jpg",
+ :ok
+ )
+
+ test_verify_request_path_and_url(
+ "/Hello%20world.jpg",
+ "http://pleroma.social/Hello%20world.jpg",
+ :ok
+ )
+
+ test_verify_request_path_and_url(
+ "/my%2Flong%2Furl%2F2019%2F07%2FS.jpg",
+ "http://pleroma.social/my%2Flong%2Furl%2F2019%2F07%2FS.jpg",
+ :ok
+ )
+
+ test_verify_request_path_and_url(
+ # Note: `conn.request_path` returns encoded url
+ "/ANALYSE-DAI-_-LE-STABLECOIN-100-D%C3%89CENTRALIS%C3%89-BQ.jpg",
+ "https://mydomain.com/uploads/2019/07/ANALYSE-DAI-_-LE-STABLECOIN-100-DÉCENTRALISÉ-BQ.jpg",
+ :ok
+ )
+
+ test_verify_request_path_and_url(
+ "/my%2Flong%2Furl%2F2019%2F07%2FS",
+ "http://pleroma.social/my%2Flong%2Furl%2F2019%2F07%2FS.jpg",
+ {:wrong_filename, "my%2Flong%2Furl%2F2019%2F07%2FS.jpg"}
+ )
+ end
+
test "uses the configured base_url" do
base_url = "https://cache.pleroma.social"
clear_config([:media_proxy, :base_url], base_url)
@@ -124,12 +189,6 @@ defmodule Pleroma.Web.MediaProxyTest do
end
end
- defp decode_result(encoded) do
- [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/")
- {:ok, decoded} = MediaProxy.decode_url(sig, base64)
- decoded
- end
-
describe "whitelist" do
setup do: clear_config([:media_proxy, :enabled], true)
diff --git a/test/web/push/impl_test.exs b/test/web/push/impl_test.exs
index aeb5c1fbd..c7c17e156 100644
--- a/test/web/push/impl_test.exs
+++ b/test/web/push/impl_test.exs
@@ -12,7 +12,9 @@ defmodule Pleroma.Web.Push.ImplTest do
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Push.Impl
alias Pleroma.Web.Push.Subscription
+ alias Pleroma.Web.WebPushHttpClientMock
+ import Mock
import Pleroma.Factory
setup do
@@ -78,6 +80,22 @@ defmodule Pleroma.Web.Push.ImplTest do
assert Impl.push_message(@message, @sub, @api_key, %Subscription{}) == :ok
end
+ test_with_mock "uses WebPushHttpClientMock as an HTTP client", WebPushHttpClientMock,
+ post: fn _, _, _ -> {:ok, %{status_code: 200}} end do
+ Impl.push_message(@message, @sub, @api_key, %Subscription{})
+ assert_called(WebPushHttpClientMock.post("https://example.com/example/1234", :_, :_))
+ end
+
+ test_with_mock "uses Pleroma.HTTP as an HTTP client", Pleroma.HTTP,
+ post: fn _, _, _ -> {:ok, %{status_code: 200}} end do
+ client = Application.get_env(:web_push_encryption, :http_client)
+ on_exit(fn -> Application.put_env(:web_push_encryption, :http_client, client) end)
+ Application.put_env(:web_push_encryption, :http_client, Pleroma.HTTP)
+
+ Impl.push_message(@message, @sub, @api_key, %Subscription{})
+ assert_called(Pleroma.HTTP.post("https://example.com/example/1234", :_, :_))
+ end
+
@tag capture_log: true
test "fail message sending" do
assert Impl.push_message(