aboutsummaryrefslogtreecommitdiff
path: root/lib/pleroma/web
diff options
context:
space:
mode:
authorMaksim Pechnikov <parallel588@gmail.com>2020-09-18 22:13:05 +0300
committerMaksim Pechnikov <parallel588@gmail.com>2020-09-18 22:13:05 +0300
commited3bc53fa137ec4652606cad1df5783c47afb830 (patch)
treeab05f72720d2118b6f22b159c80bf5de4961d7ce /lib/pleroma/web
parent2159daa9af8967fe4b6d31e19c7048979b4cb165 (diff)
parent6c052bd5b6cc29d321b500654bd6b098d0e6c56a (diff)
downloadpleroma-ed3bc53fa137ec4652606cad1df5783c47afb830.tar.gz
Merge branch 'develop' into issue/2099
Diffstat (limited to 'lib/pleroma/web')
-rw-r--r--lib/pleroma/web/activity_pub/activity_pub.ex14
-rw-r--r--lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex14
-rw-r--r--lib/pleroma/web/activity_pub/publisher.ex34
-rw-r--r--lib/pleroma/web/activity_pub/transmogrifier.ex2
-rw-r--r--lib/pleroma/web/fed_sockets/fed_registry.ex185
-rw-r--r--lib/pleroma/web/fed_sockets/fed_socket.ex137
-rw-r--r--lib/pleroma/web/fed_sockets/fed_sockets.ex185
-rw-r--r--lib/pleroma/web/fed_sockets/fetch_registry.ex151
-rw-r--r--lib/pleroma/web/fed_sockets/incoming_handler.ex88
-rw-r--r--lib/pleroma/web/fed_sockets/ingester_worker.ex33
-rw-r--r--lib/pleroma/web/fed_sockets/outgoing_handler.ex146
-rw-r--r--lib/pleroma/web/fed_sockets/socket_info.ex52
-rw-r--r--lib/pleroma/web/fed_sockets/supervisor.ex59
-rw-r--r--lib/pleroma/web/mastodon_api/views/account_view.ex10
-rw-r--r--lib/pleroma/web/mastodon_api/views/status_view.ex3
-rw-r--r--lib/pleroma/web/media_proxy/invalidation.ex4
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy.ex87
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy_controller.ex195
-rw-r--r--lib/pleroma/web/metadata/utils.ex2
-rw-r--r--lib/pleroma/web/oauth/oauth_controller.ex4
-rw-r--r--lib/pleroma/web/router.ex2
21 files changed, 1342 insertions, 65 deletions
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index 92fc1e422..06e8e1a7c 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -1270,10 +1270,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.following_address,
+ force_http: true
+ ),
{:ok, hide_follows} <- collection_private(following_data),
{:ok, followers_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address, force_http: true),
{:ok, hide_followers} <- collection_private(followers_data) do
{:ok,
%{
@@ -1347,8 +1349,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
- def fetch_and_prepare_user_from_ap_id(ap_id) do
- with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
+ def fetch_and_prepare_user_from_ap_id(ap_id, opts \\ []) do
+ with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id, opts),
{:ok, data} <- user_data_from_user_object(data) do
{:ok, maybe_update_follow_information(data)}
else
@@ -1390,13 +1392,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
- def make_user_from_ap_id(ap_id) do
+ def make_user_from_ap_id(ap_id, opts \\ []) do
user = User.get_cached_by_ap_id(ap_id)
if user && !User.ap_enabled?(user) do
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
- with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
+ with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, opts) do
if user do
user
|> User.remote_user_changeset(data)
diff --git a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
index 98d595469..0fb05d3c4 100644
--- a/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
+++ b/lib/pleroma/web/activity_pub/mrf/media_proxy_warming_policy.ex
@@ -12,17 +12,21 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
- @options [
+ @adapter_options [
pool: :media,
recv_timeout: 10_000
]
def perform(:prefetch, url) do
- Logger.debug("Prefetching #{inspect(url)}")
+ # Fetching only proxiable resources
+ if MediaProxy.enabled?() and MediaProxy.url_proxiable?(url) do
+ # If preview proxy is enabled, it'll also hit media proxy (so we're caching both requests)
+ prefetch_url = MediaProxy.preview_url(url)
- url
- |> MediaProxy.url()
- |> HTTP.get([], @options)
+ Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
+
+ HTTP.get(prefetch_url, [], @adapter_options)
+ end
end
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
diff --git a/lib/pleroma/web/activity_pub/publisher.ex b/lib/pleroma/web/activity_pub/publisher.ex
index d88f7f3ee..9c3956683 100644
--- a/lib/pleroma/web/activity_pub/publisher.ex
+++ b/lib/pleroma/web/activity_pub/publisher.ex
@@ -13,6 +13,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.Transmogrifier
+ alias Pleroma.Web.FedSockets
require Pleroma.Constants
@@ -50,15 +51,35 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
Logger.debug("Federating #{id} to #{inbox}")
- uri = URI.parse(inbox)
+ case FedSockets.get_or_create_fed_socket(inbox) do
+ {:ok, fedsocket} ->
+ Logger.debug("publishing via fedsockets - #{inspect(inbox)}")
+ FedSockets.publish(fedsocket, json)
+ _ ->
+ Logger.debug("publishing via http - #{inspect(inbox)}")
+ http_publish(inbox, actor, json, params)
+ end
+ end
+
+ def publish_one(%{actor_id: actor_id} = params) do
+ actor = User.get_cached_by_id(actor_id)
+
+ params
+ |> Map.delete(:actor_id)
+ |> Map.put(:actor, actor)
+ |> publish_one()
+ end
+
+ defp http_publish(inbox, actor, json, params) do
+ uri = %{path: path} = URI.parse(inbox)
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
date = Pleroma.Signature.signed_date()
signature =
Pleroma.Signature.sign(actor, %{
- "(request-target)": "post #{uri.path}",
+ "(request-target)": "post #{path}",
host: signature_host(uri),
"content-length": byte_size(json),
digest: digest,
@@ -89,15 +110,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
end
end
- def publish_one(%{actor_id: actor_id} = params) do
- actor = User.get_cached_by_id(actor_id)
-
- params
- |> Map.delete(:actor_id)
- |> Map.put(:actor, actor)
- |> publish_one()
- end
-
defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
if port == URI.default_port(scheme) do
host
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index fcca014f0..aa6a69463 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -1000,7 +1000,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
- {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
+ {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id, force_http: true),
{:ok, user} <- update_user(user, data) do
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
{:ok, user}
diff --git a/lib/pleroma/web/fed_sockets/fed_registry.ex b/lib/pleroma/web/fed_sockets/fed_registry.ex
new file mode 100644
index 000000000..e00ea69c0
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_registry.ex
@@ -0,0 +1,185 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FedRegistry do
+ @moduledoc """
+ The FedRegistry stores the active FedSockets for quick retrieval.
+
+ The storage and retrieval portion of the FedRegistry is done in process through
+ elixir's `Registry` module for speed and its ability to monitor for terminated processes.
+
+ Dropped connections will be caught by `Registry` and deleted. Since the next
+ message will initiate a new connection there is no reason to try and reconnect at that point.
+
+ Normally outside modules should have no need to call or use the FedRegistry themselves.
+ """
+
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ require Logger
+
+ @default_rejection_duration 15 * 60 * 1000
+ @rejections :fed_socket_rejections
+
+ @doc """
+ Retrieves a FedSocket from the Registry given it's origin.
+
+ The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
+
+ Will return:
+ * {:ok, fed_socket} for working FedSockets
+ * {:error, :rejected} for origins that have been tried and refused within the rejection duration interval
+ * {:error, some_reason} usually :missing for unknown origins
+ """
+ def get_fed_socket(origin) do
+ case get_registry_data(origin) do
+ {:error, reason} ->
+ {:error, reason}
+
+ {:ok, %{state: :connected} = socket_info} ->
+ {:ok, socket_info}
+ end
+ end
+
+ @doc """
+ Adds a connected FedSocket to the Registry.
+
+ Always returns {:ok, fed_socket}
+ """
+ def add_fed_socket(origin, pid \\ nil) do
+ origin
+ |> SocketInfo.build(pid)
+ |> SocketInfo.connect()
+ |> add_socket_info
+ end
+
+ defp add_socket_info(%{origin: origin, state: :connected} = socket_info) do
+ case Registry.register(FedSockets.Registry, origin, socket_info) do
+ {:ok, _owner} ->
+ clear_prior_rejection(origin)
+ Logger.debug("fedsocket added: #{inspect(origin)}")
+
+ {:ok, socket_info}
+
+ {:error, {:already_registered, _pid}} ->
+ FedSocket.close(socket_info)
+ existing_socket_info = Registry.lookup(FedSockets.Registry, origin)
+
+ {:ok, existing_socket_info}
+
+ _ ->
+ {:error, :error_adding_socket}
+ end
+ end
+
+ @doc """
+ Mark this origin as having rejected a connection attempt.
+ This will keep it from getting additional connection attempts
+ for a period of time specified in the config.
+
+ Always returns {:ok, new_reg_data}
+ """
+ def set_host_rejected(uri) do
+ new_reg_data =
+ uri
+ |> SocketInfo.origin()
+ |> get_or_create_registry_data()
+ |> set_to_rejected()
+ |> save_registry_data()
+
+ {:ok, new_reg_data}
+ end
+
+ @doc """
+ Retrieves the FedRegistryData from the Registry given it's origin.
+
+ The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
+
+ Will return:
+ * {:ok, fed_registry_data} for known origins
+ * {:error, :missing} for uniknown origins
+ * {:error, :cache_error} indicating some low level runtime issues
+ """
+ def get_registry_data(origin) do
+ case Registry.lookup(FedSockets.Registry, origin) do
+ [] ->
+ if is_rejected?(origin) do
+ Logger.debug("previously rejected fedsocket requested")
+ {:error, :rejected}
+ else
+ {:error, :missing}
+ end
+
+ [{_pid, %{state: :connected} = socket_info}] ->
+ {:ok, socket_info}
+
+ _ ->
+ {:error, :cache_error}
+ end
+ end
+
+ @doc """
+ Retrieves a map of all sockets from the Registry. The keys are the origins and the values are the corresponding SocketInfo
+ """
+ def list_all do
+ (list_all_connected() ++ list_all_rejected())
+ |> Enum.into(%{})
+ end
+
+ defp list_all_connected do
+ FedSockets.Registry
+ |> Registry.select([{{:"$1", :_, :"$3"}, [], [{{:"$1", :"$3"}}]}])
+ end
+
+ defp list_all_rejected do
+ {:ok, keys} = Cachex.keys(@rejections)
+
+ {:ok, registry_data} =
+ Cachex.execute(@rejections, fn worker ->
+ Enum.map(keys, fn k -> {k, Cachex.get!(worker, k)} end)
+ end)
+
+ registry_data
+ end
+
+ defp clear_prior_rejection(origin),
+ do: Cachex.del(@rejections, origin)
+
+ defp is_rejected?(origin) do
+ case Cachex.get(@rejections, origin) do
+ {:ok, nil} ->
+ false
+
+ {:ok, _} ->
+ true
+ end
+ end
+
+ defp get_or_create_registry_data(origin) do
+ case get_registry_data(origin) do
+ {:error, :missing} ->
+ %SocketInfo{origin: origin}
+
+ {:ok, socket_info} ->
+ socket_info
+ end
+ end
+
+ defp save_registry_data(%SocketInfo{origin: origin, state: :connected} = socket_info) do
+ {:ok, true} = Registry.update_value(FedSockets.Registry, origin, fn _ -> socket_info end)
+ socket_info
+ end
+
+ defp save_registry_data(%SocketInfo{origin: origin, state: :rejected} = socket_info) do
+ rejection_expiration =
+ Pleroma.Config.get([:fed_sockets, :rejection_duration], @default_rejection_duration)
+
+ {:ok, true} = Cachex.put(@rejections, origin, socket_info, ttl: rejection_expiration)
+ socket_info
+ end
+
+ defp set_to_rejected(%SocketInfo{} = socket_info),
+ do: %SocketInfo{socket_info | state: :rejected}
+end
diff --git a/lib/pleroma/web/fed_sockets/fed_socket.ex b/lib/pleroma/web/fed_sockets/fed_socket.ex
new file mode 100644
index 000000000..98d64e65a
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_socket.ex
@@ -0,0 +1,137 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FedSocket do
+ @moduledoc """
+ The FedSocket module abstracts the actions to be taken taken on connections regardless of
+ whether the connection started as inbound or outbound.
+
+
+ Normally outside modules will have no need to call the FedSocket module directly.
+ """
+
+ alias Pleroma.Object
+ alias Pleroma.Object.Containment
+ alias Pleroma.User
+ alias Pleroma.Web.ActivityPub.ObjectView
+ alias Pleroma.Web.ActivityPub.UserView
+ alias Pleroma.Web.ActivityPub.Visibility
+ alias Pleroma.Web.FedSockets.FetchRegistry
+ alias Pleroma.Web.FedSockets.IngesterWorker
+ alias Pleroma.Web.FedSockets.OutgoingHandler
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ require Logger
+
+ @shake "61dd18f7-f1e6-49a4-939a-a749fcdc1103"
+
+ def connect_to_host(uri) do
+ case OutgoingHandler.start_link(uri) do
+ {:ok, pid} ->
+ {:ok, pid}
+
+ error ->
+ {:error, error}
+ end
+ end
+
+ def close(%SocketInfo{pid: socket_pid}),
+ do: Process.send(socket_pid, :close, [])
+
+ def publish(%SocketInfo{pid: socket_pid}, json) do
+ %{action: :publish, data: json}
+ |> Jason.encode!()
+ |> send_packet(socket_pid)
+ end
+
+ def fetch(%SocketInfo{pid: socket_pid}, id) do
+ fetch_uuid = FetchRegistry.register_fetch(id)
+
+ %{action: :fetch, data: id, uuid: fetch_uuid}
+ |> Jason.encode!()
+ |> send_packet(socket_pid)
+
+ wait_for_fetch_to_return(fetch_uuid, 0)
+ end
+
+ def receive_package(%SocketInfo{} = fed_socket, json) do
+ json
+ |> Jason.decode!()
+ |> process_package(fed_socket)
+ end
+
+ defp wait_for_fetch_to_return(uuid, cntr) do
+ case FetchRegistry.check_fetch(uuid) do
+ {:error, :waiting} ->
+ Process.sleep(:math.pow(cntr, 3) |> Kernel.trunc())
+ wait_for_fetch_to_return(uuid, cntr + 1)
+
+ {:error, :missing} ->
+ Logger.error("FedSocket fetch timed out - #{inspect(uuid)}")
+ {:error, :timeout}
+
+ {:ok, _fr} ->
+ FetchRegistry.pop_fetch(uuid)
+ end
+ end
+
+ defp process_package(%{"action" => "publish", "data" => data}, %{origin: origin} = _fed_socket) do
+ if Containment.contain_origin(origin, data) do
+ IngesterWorker.enqueue("ingest", %{"object" => data})
+ end
+
+ {:reply, %{"action" => "publish_reply", "status" => "processed"}}
+ end
+
+ defp process_package(%{"action" => "fetch_reply", "uuid" => uuid, "data" => data}, _fed_socket) do
+ FetchRegistry.register_fetch_received(uuid, data)
+ {:noreply, nil}
+ end
+
+ defp process_package(%{"action" => "fetch", "uuid" => uuid, "data" => ap_id}, _fed_socket) do
+ {:ok, data} = render_fetched_data(ap_id, uuid)
+ {:reply, data}
+ end
+
+ defp process_package(%{"action" => "publish_reply"}, _fed_socket) do
+ {:noreply, nil}
+ end
+
+ defp process_package(other, _fed_socket) do
+ Logger.warn("unknown json packages received #{inspect(other)}")
+ {:noreply, nil}
+ end
+
+ defp render_fetched_data(ap_id, uuid) do
+ {:ok,
+ %{
+ "action" => "fetch_reply",
+ "status" => "processed",
+ "uuid" => uuid,
+ "data" => represent_item(ap_id)
+ }}
+ end
+
+ defp represent_item(ap_id) do
+ case User.get_by_ap_id(ap_id) do
+ nil ->
+ object = Object.get_cached_by_ap_id(ap_id)
+
+ if Visibility.is_public?(object) do
+ Phoenix.View.render_to_string(ObjectView, "object.json", object: object)
+ else
+ nil
+ end
+
+ user ->
+ Phoenix.View.render_to_string(UserView, "user.json", user: user)
+ end
+ end
+
+ defp send_packet(data, socket_pid) do
+ Process.send(socket_pid, {:send, data}, [])
+ end
+
+ def shake, do: @shake
+end
diff --git a/lib/pleroma/web/fed_sockets/fed_sockets.ex b/lib/pleroma/web/fed_sockets/fed_sockets.ex
new file mode 100644
index 000000000..1fd5899c8
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fed_sockets.ex
@@ -0,0 +1,185 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets do
+ @moduledoc """
+ This documents the FedSockets framework. A framework for federating
+ ActivityPub objects between servers via persistant WebSocket connections.
+
+ FedSockets allow servers to authenticate on first contact and maintain that
+ connection, eliminating the need to authenticate every time data needs to be shared.
+
+ ## Protocol
+ FedSockets currently support 2 types of data transfer:
+ * `publish` method which doesn't require a response
+ * `fetch` method requires a response be sent
+
+ ### Publish
+ The publish operation sends a json encoded map of the shape:
+ %{action: :publish, data: json}
+ and accepts (but does not require) a reply of form:
+ %{"action" => "publish_reply"}
+
+ The outgoing params represent
+ * data: ActivityPub object encoded into json
+
+
+ ### Fetch
+ The fetch operation sends a json encoded map of the shape:
+ %{action: :fetch, data: id, uuid: fetch_uuid}
+ and requires a reply of form:
+ %{"action" => "fetch_reply", "uuid" => uuid, "data" => data}
+
+ The outgoing params represent
+ * id: an ActivityPub object URI
+ * uuid: a unique uuid generated by the sender
+
+ The reply params represent
+ * data: an ActivityPub object encoded into json
+ * uuid: the uuid sent along with the fetch request
+
+ ## Examples
+ Clients of FedSocket transfers shouldn't need to use any of the functions outside of this module.
+
+ A typical publish operation can be performed through the following code, and a fetch operation in a similar manner.
+
+ case FedSockets.get_or_create_fed_socket(inbox) do
+ {:ok, fedsocket} ->
+ FedSockets.publish(fedsocket, json)
+
+ _ ->
+ alternative_publish(inbox, actor, json, params)
+ end
+
+ ## Configuration
+ FedSockets have the following config settings
+
+ config :pleroma, :fed_sockets,
+ enabled: true,
+ ping_interval: :timer.seconds(15),
+ connection_duration: :timer.hours(1),
+ rejection_duration: :timer.hours(1),
+ fed_socket_fetches: [
+ default: 12_000,
+ interval: 3_000,
+ lazy: false
+ ]
+ * enabled - turn FedSockets on or off with this flag. Can be toggled at runtime.
+ * connection_duration - How long a FedSocket can sit idle before it's culled.
+ * rejection_duration - After failing to make a FedSocket connection a host will be excluded
+ from further connections for this amount of time
+ * fed_socket_fetches - Use these parameters to pass options to the Cachex queue backing the FetchRegistry
+ * fed_socket_rejections - Use these parameters to pass options to the Cachex queue backing the FedRegistry
+
+ Cachex options are
+ * default: the minimum amount of time a fetch can wait before it times out.
+ * interval: the interval between checks for timed out entries. This plus the default represent the maximum time allowed
+ * lazy: leave at false for consistant and fast lookups, set to true for stricter timeout enforcement
+
+ """
+ require Logger
+
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ @doc """
+ returns a FedSocket for the given origin. Will reuse an existing one or create a new one.
+
+ address is expected to be a fully formed URL such as:
+ "http://www.example.com" or "http://www.example.com:8080"
+
+ It can and usually does include additional path parameters,
+ but these are ignored as the FedSockets are organized by host and port info alone.
+ """
+ def get_or_create_fed_socket(address) do
+ with {:cache, {:error, :missing}} <- {:cache, get_fed_socket(address)},
+ {:connect, {:ok, _pid}} <- {:connect, FedSocket.connect_to_host(address)},
+ {:cache, {:ok, fed_socket}} <- {:cache, get_fed_socket(address)} do
+ Logger.debug("fedsocket created for - #{inspect(address)}")
+ {:ok, fed_socket}
+ else
+ {:cache, {:ok, socket}} ->
+ Logger.debug("fedsocket found in cache - #{inspect(address)}")
+ {:ok, socket}
+
+ {:cache, {:error, :rejected} = e} ->
+ e
+
+ {:connect, {:error, _host}} ->
+ Logger.debug("set host rejected for - #{inspect(address)}")
+ FedRegistry.set_host_rejected(address)
+ {:error, :rejected}
+
+ {_, {:error, :disabled}} ->
+ {:error, :disabled}
+
+ {_, {:error, reason}} ->
+ Logger.warn("get_or_create_fed_socket error - #{inspect(reason)}")
+ {:error, reason}
+ end
+ end
+
+ @doc """
+ returns a FedSocket for the given origin. Will not create a new FedSocket if one does not exist.
+
+ address is expected to be a fully formed URL such as:
+ "http://www.example.com" or "http://www.example.com:8080"
+ """
+ def get_fed_socket(address) do
+ origin = SocketInfo.origin(address)
+
+ with {:config, true} <- {:config, Pleroma.Config.get([:fed_sockets, :enabled], false)},
+ {:ok, socket} <- FedRegistry.get_fed_socket(origin) do
+ {:ok, socket}
+ else
+ {:config, _} ->
+ {:error, :disabled}
+
+ {:error, :rejected} ->
+ Logger.debug("FedSocket previously rejected - #{inspect(origin)}")
+ {:error, :rejected}
+
+ {:error, reason} ->
+ {:error, reason}
+ end
+ end
+
+ @doc """
+ Sends the supplied data via the publish protocol.
+ It will not block waiting for a reply.
+ Returns :ok but this is not an indication of a successful transfer.
+
+ the data is expected to be JSON encoded binary data.
+ """
+ def publish(%SocketInfo{} = fed_socket, json) do
+ FedSocket.publish(fed_socket, json)
+ end
+
+ @doc """
+ Sends the supplied data via the fetch protocol.
+ It will block waiting for a reply or timeout.
+
+ Returns {:ok, object} where object is the requested object (or nil)
+ {:error, :timeout} in the event the message was not responded to
+
+ the id is expected to be the URI of an ActivityPub object.
+ """
+ def fetch(%SocketInfo{} = fed_socket, id) do
+ FedSocket.fetch(fed_socket, id)
+ end
+
+ @doc """
+ Disconnect all and restart FedSockets.
+ This is mainly used in development and testing but could be useful in production.
+ """
+ def reset do
+ FedRegistry
+ |> Process.whereis()
+ |> Process.exit(:testing)
+ end
+
+ def uri_for_origin(origin),
+ do: "ws://#{origin}/api/fedsocket/v1"
+end
diff --git a/lib/pleroma/web/fed_sockets/fetch_registry.ex b/lib/pleroma/web/fed_sockets/fetch_registry.ex
new file mode 100644
index 000000000..7897f0fc6
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/fetch_registry.ex
@@ -0,0 +1,151 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.FetchRegistry do
+ @moduledoc """
+ The FetchRegistry acts as a broker for fetch requests and return values.
+ This allows calling processes to block while waiting for a reply.
+ It doesn't impose it's own process instead using `Cachex` to handle fetches in process, allowing
+ multi threaded processes to avoid bottlenecking.
+
+ Normally outside modules will have no need to call or use the FetchRegistry themselves.
+
+ The `Cachex` parameters can be controlled from the config. Since exact timeout intervals
+ aren't necessary the following settings are used by default:
+
+ config :pleroma, :fed_sockets,
+ fed_socket_fetches: [
+ default: 12_000,
+ interval: 3_000,
+ lazy: false
+ ]
+
+ """
+
+ defmodule FetchRegistryData do
+ defstruct uuid: nil,
+ sent_json: nil,
+ received_json: nil,
+ sent_at: nil,
+ received_at: nil
+ end
+
+ alias Ecto.UUID
+
+ require Logger
+
+ @fetches :fed_socket_fetches
+
+ @doc """
+ Registers a json request wth the FetchRegistry and returns the identifying UUID.
+ """
+ def register_fetch(json) do
+ %FetchRegistryData{uuid: uuid} =
+ json
+ |> new_registry_data
+ |> save_registry_data
+
+ uuid
+ end
+
+ @doc """
+ Reports on the status of a Fetch given the identifying UUID.
+
+ Will return
+ * {:ok, fetched_object} if a fetch has completed
+ * {:error, :waiting} if a fetch is still pending
+ * {:error, other_error} usually :missing to indicate a fetch that has timed out
+ """
+ def check_fetch(uuid) do
+ case get_registry_data(uuid) do
+ {:ok, %FetchRegistryData{received_at: nil}} ->
+ {:error, :waiting}
+
+ {:ok, %FetchRegistryData{} = reg_data} ->
+ {:ok, reg_data}
+
+ e ->
+ e
+ end
+ end
+
+ @doc """
+ Retrieves the response to a fetch given the identifying UUID.
+ The completed fetch will be deleted from the FetchRegistry
+
+ Will return
+ * {:ok, fetched_object} if a fetch has completed
+ * {:error, :waiting} if a fetch is still pending
+ * {:error, other_error} usually :missing to indicate a fetch that has timed out
+ """
+ def pop_fetch(uuid) do
+ case check_fetch(uuid) do
+ {:ok, %FetchRegistryData{received_json: received_json}} ->
+ delete_registry_data(uuid)
+ {:ok, received_json}
+
+ e ->
+ e
+ end
+ end
+
+ @doc """
+ This is called to register a fetch has returned.
+ It expects the result data along with the UUID that was sent in the request
+
+ Will return the fetched object or :error
+ """
+ def register_fetch_received(uuid, data) do
+ case get_registry_data(uuid) do
+ {:ok, %FetchRegistryData{received_at: nil} = reg_data} ->
+ reg_data
+ |> set_fetch_received(data)
+ |> save_registry_data()
+
+ {:ok, %FetchRegistryData{} = reg_data} ->
+ Logger.warn("tried to add fetched data twice - #{uuid}")
+ reg_data
+
+ {:error, _} ->
+ Logger.warn("Error adding fetch to registry - #{uuid}")
+ :error
+ end
+ end
+
+ defp new_registry_data(json) do
+ %FetchRegistryData{
+ uuid: UUID.generate(),
+ sent_json: json,
+ sent_at: :erlang.monotonic_time(:millisecond)
+ }
+ end
+
+ defp get_registry_data(origin) do
+ case Cachex.get(@fetches, origin) do
+ {:ok, nil} ->
+ {:error, :missing}
+
+ {:ok, reg_data} ->
+ {:ok, reg_data}
+
+ _ ->
+ {:error, :cache_error}
+ end
+ end
+
+ defp set_fetch_received(%FetchRegistryData{} = reg_data, data),
+ do: %FetchRegistryData{
+ reg_data
+ | received_at: :erlang.monotonic_time(:millisecond),
+ received_json: data
+ }
+
+ defp save_registry_data(%FetchRegistryData{uuid: uuid} = reg_data) do
+ {:ok, true} = Cachex.put(@fetches, uuid, reg_data)
+ reg_data
+ end
+
+ defp delete_registry_data(origin),
+ do: {:ok, true} = Cachex.del(@fetches, origin)
+end
diff --git a/lib/pleroma/web/fed_sockets/incoming_handler.ex b/lib/pleroma/web/fed_sockets/incoming_handler.ex
new file mode 100644
index 000000000..49d0d9d84
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/incoming_handler.ex
@@ -0,0 +1,88 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.IncomingHandler do
+ require Logger
+
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ import HTTPSignatures, only: [validate_conn: 1, split_signature: 1]
+
+ @behaviour :cowboy_websocket
+
+ def init(req, state) do
+ shake = FedSocket.shake()
+
+ with true <- Pleroma.Config.get([:fed_sockets, :enabled]),
+ sec_protocol <- :cowboy_req.header("sec-websocket-protocol", req, nil),
+ headers = %{"(request-target)" => ^shake} <- :cowboy_req.headers(req),
+ true <- validate_conn(%{req_headers: headers}),
+ %{"keyId" => origin} <- split_signature(headers["signature"]) do
+ req =
+ if is_nil(sec_protocol) do
+ req
+ else
+ :cowboy_req.set_resp_header("sec-websocket-protocol", sec_protocol, req)
+ end
+
+ {:cowboy_websocket, req, %{origin: origin}, %{}}
+ else
+ _ ->
+ {:ok, req, state}
+ end
+ end
+
+ def websocket_init(%{origin: origin}) do
+ case FedRegistry.add_fed_socket(origin) do
+ {:ok, socket_info} ->
+ {:ok, socket_info}
+
+ e ->
+ Logger.error("FedSocket websocket_init failed - #{inspect(e)}")
+ {:error, inspect(e)}
+ end
+ end
+
+ # Use the ping to check if the connection should be expired
+ def websocket_handle(:ping, socket_info) do
+ if SocketInfo.expired?(socket_info) do
+ {:stop, socket_info}
+ else
+ {:ok, socket_info, :hibernate}
+ end
+ end
+
+ def websocket_handle({:text, data}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ case FedSocket.receive_package(socket_info, data) do
+ {:noreply, _} ->
+ {:ok, socket_info}
+
+ {:reply, reply} ->
+ {:reply, {:text, Jason.encode!(reply)}, socket_info}
+
+ {:error, reason} ->
+ Logger.error("incoming error - receive_package: #{inspect(reason)}")
+ {:ok, socket_info}
+ end
+ end
+
+ def websocket_info({:send, message}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ {:reply, {:text, message}, socket_info}
+ end
+
+ def websocket_info(:close, state) do
+ {:stop, state}
+ end
+
+ def websocket_info(message, state) do
+ Logger.debug("#{__MODULE__} unknown message #{inspect(message)}")
+ {:ok, state}
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/ingester_worker.ex b/lib/pleroma/web/fed_sockets/ingester_worker.ex
new file mode 100644
index 000000000..325f2a4ab
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/ingester_worker.ex
@@ -0,0 +1,33 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.IngesterWorker do
+ use Pleroma.Workers.WorkerHelper, queue: "ingestion_queue"
+ require Logger
+
+ alias Pleroma.Web.Federator
+
+ @impl Oban.Worker
+ def perform(%Job{args: %{"op" => "ingest", "object" => ingestee}}) do
+ try do
+ ingestee
+ |> Jason.decode!()
+ |> do_ingestion()
+ rescue
+ e ->
+ Logger.error("IngesterWorker error - #{inspect(e)}")
+ e
+ end
+ end
+
+ defp do_ingestion(params) do
+ case Federator.incoming_ap_doc(params) do
+ {:error, reason} ->
+ {:error, reason}
+
+ {:ok, object} ->
+ {:ok, object}
+ end
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/outgoing_handler.ex b/lib/pleroma/web/fed_sockets/outgoing_handler.ex
new file mode 100644
index 000000000..6ddef17fe
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/outgoing_handler.ex
@@ -0,0 +1,146 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.OutgoingHandler do
+ use GenServer
+
+ require Logger
+
+ alias Pleroma.Web.ActivityPub.InternalFetchActor
+ alias Pleroma.Web.FedSockets
+ alias Pleroma.Web.FedSockets.FedRegistry
+ alias Pleroma.Web.FedSockets.FedSocket
+ alias Pleroma.Web.FedSockets.SocketInfo
+
+ def start_link(uri) do
+ GenServer.start_link(__MODULE__, %{uri: uri})
+ end
+
+ def init(%{uri: uri}) do
+ case initiate_connection(uri) do
+ {:ok, ws_origin, conn_pid} ->
+ FedRegistry.add_fed_socket(ws_origin, conn_pid)
+
+ {:error, reason} ->
+ Logger.debug("Outgoing connection failed - #{inspect(reason)}")
+ :ignore
+ end
+ end
+
+ def handle_info({:gun_ws, conn_pid, _ref, {:text, data}}, socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+
+ case FedSocket.receive_package(socket_info, data) do
+ {:noreply, _} ->
+ {:noreply, socket_info}
+
+ {:reply, reply} ->
+ :gun.ws_send(conn_pid, {:text, Jason.encode!(reply)})
+ {:noreply, socket_info}
+
+ {:error, reason} ->
+ Logger.error("incoming error - receive_package: #{inspect(reason)}")
+ {:noreply, socket_info}
+ end
+ end
+
+ def handle_info(:close, state) do
+ Logger.debug("Sending close frame !!!!!!!")
+ {:close, state}
+ end
+
+ def handle_info({:gun_down, _pid, _prot, :closed, _}, state) do
+ {:stop, :normal, state}
+ end
+
+ def handle_info({:send, data}, %{conn_pid: conn_pid} = socket_info) do
+ socket_info = SocketInfo.touch(socket_info)
+ :gun.ws_send(conn_pid, {:text, data})
+ {:noreply, socket_info}
+ end
+
+ def handle_info({:gun_ws, _, _, :pong}, state) do
+ {:noreply, state, :hibernate}
+ end
+
+ def handle_info(msg, state) do
+ Logger.debug("#{__MODULE__} unhandled event #{inspect(msg)}")
+ {:noreply, state}
+ end
+
+ def terminate(reason, state) do
+ Logger.debug(
+ "#{__MODULE__} terminating outgoing connection for #{inspect(state)} for #{inspect(reason)}"
+ )
+
+ {:ok, state}
+ end
+
+ def initiate_connection(uri) do
+ ws_uri =
+ uri
+ |> SocketInfo.origin()
+ |> FedSockets.uri_for_origin()
+
+ %{host: host, port: port, path: path} = URI.parse(ws_uri)
+
+ with {:ok, conn_pid} <- :gun.open(to_charlist(host), port),
+ {:ok, _} <- :gun.await_up(conn_pid),
+ reference <- :gun.get(conn_pid, to_charlist(path)),
+ {:response, :fin, 204, _} <- :gun.await(conn_pid, reference),
+ headers <- build_headers(uri),
+ ref <- :gun.ws_upgrade(conn_pid, to_charlist(path), headers, %{silence_pings: false}) do
+ receive do
+ {:gun_upgrade, ^conn_pid, ^ref, [<<"websocket">>], _} ->
+ {:ok, ws_uri, conn_pid}
+ after
+ 15_000 ->
+ Logger.debug("Fedsocket timeout connecting to #{inspect(uri)}")
+ {:error, :timeout}
+ end
+ else
+ {:response, :nofin, 404, _} ->
+ {:error, :fedsockets_not_supported}
+
+ e ->
+ Logger.debug("Fedsocket error connecting to #{inspect(uri)}")
+ {:error, e}
+ end
+ end
+
+ defp build_headers(uri) do
+ host_for_sig = uri |> URI.parse() |> host_signature()
+
+ shake = FedSocket.shake()
+ digest = "SHA-256=" <> (:crypto.hash(:sha256, shake) |> Base.encode64())
+ date = Pleroma.Signature.signed_date()
+ shake_size = byte_size(shake)
+
+ signature_opts = %{
+ "(request-target)": shake,
+ "content-length": to_charlist("#{shake_size}"),
+ date: date,
+ digest: digest,
+ host: host_for_sig
+ }
+
+ signature = Pleroma.Signature.sign(InternalFetchActor.get_actor(), signature_opts)
+
+ [
+ {'signature', to_charlist(signature)},
+ {'date', date},
+ {'digest', to_charlist(digest)},
+ {'content-length', to_charlist("#{shake_size}")},
+ {to_charlist("(request-target)"), to_charlist(shake)}
+ ]
+ end
+
+ defp host_signature(%{host: host, scheme: scheme, port: port}) do
+ if port == URI.default_port(scheme) do
+ host
+ else
+ "#{host}:#{port}"
+ end
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/socket_info.ex b/lib/pleroma/web/fed_sockets/socket_info.ex
new file mode 100644
index 000000000..d6fdffe1a
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/socket_info.ex
@@ -0,0 +1,52 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.SocketInfo do
+ defstruct origin: nil,
+ pid: nil,
+ conn_pid: nil,
+ state: :default,
+ connected_until: nil
+
+ alias Pleroma.Web.FedSockets.SocketInfo
+ @default_connection_duration 15 * 60 * 1000
+
+ def build(uri, conn_pid \\ nil) do
+ uri
+ |> build_origin()
+ |> build_pids(conn_pid)
+ |> touch()
+ end
+
+ def touch(%SocketInfo{} = socket_info),
+ do: %{socket_info | connected_until: new_ttl()}
+
+ def connect(%SocketInfo{} = socket_info),
+ do: %{socket_info | state: :connected}
+
+ def expired?(%{connected_until: connected_until}),
+ do: connected_until < :erlang.monotonic_time(:millisecond)
+
+ def origin(uri),
+ do: build_origin(uri).origin
+
+ defp build_pids(socket_info, conn_pid),
+ do: struct(socket_info, pid: self(), conn_pid: conn_pid)
+
+ defp build_origin(uri) when is_binary(uri),
+ do: uri |> URI.parse() |> build_origin
+
+ defp build_origin(%{host: host, port: nil, scheme: scheme}),
+ do: build_origin(%{host: host, port: URI.default_port(scheme)})
+
+ defp build_origin(%{host: host, port: port}),
+ do: %SocketInfo{origin: "#{host}:#{port}"}
+
+ defp new_ttl do
+ connection_duration =
+ Pleroma.Config.get([:fed_sockets, :connection_duration], @default_connection_duration)
+
+ :erlang.monotonic_time(:millisecond) + connection_duration
+ end
+end
diff --git a/lib/pleroma/web/fed_sockets/supervisor.ex b/lib/pleroma/web/fed_sockets/supervisor.ex
new file mode 100644
index 000000000..a5f4bebfb
--- /dev/null
+++ b/lib/pleroma/web/fed_sockets/supervisor.ex
@@ -0,0 +1,59 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Web.FedSockets.Supervisor do
+ use Supervisor
+ import Cachex.Spec
+
+ def start_link(opts) do
+ Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
+ end
+
+ def init(args) do
+ children = [
+ build_cache(:fed_socket_fetches, args),
+ build_cache(:fed_socket_rejections, args),
+ {Registry, keys: :unique, name: FedSockets.Registry, meta: [rejected: %{}]}
+ ]
+
+ opts = [strategy: :one_for_all, name: Pleroma.Web.Streamer.Supervisor]
+ Supervisor.init(children, opts)
+ end
+
+ defp build_cache(name, args) do
+ opts = get_opts(name, args)
+
+ %{
+ id: String.to_atom("#{name}_cache"),
+ start: {Cachex, :start_link, [name, opts]},
+ type: :worker
+ }
+ end
+
+ defp get_opts(cache_name, args)
+ when cache_name in [:fed_socket_fetches, :fed_socket_rejections] do
+ default = get_opts_or_config(args, cache_name, :default, 15_000)
+ interval = get_opts_or_config(args, cache_name, :interval, 3_000)
+ lazy = get_opts_or_config(args, cache_name, :lazy, false)
+
+ [expiration: expiration(default: default, interval: interval, lazy: lazy)]
+ end
+
+ defp get_opts(name, args) do
+ Keyword.get(args, name, [])
+ end
+
+ defp get_opts_or_config(args, name, key, default) do
+ args
+ |> Keyword.get(name, [])
+ |> Keyword.get(key)
+ |> case do
+ nil ->
+ Pleroma.Config.get([:fed_sockets, name, key], default)
+
+ value ->
+ value
+ end
+ end
+end
diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex
index d2a30a548..121ba1693 100644
--- a/lib/pleroma/web/mastodon_api/views/account_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/account_view.ex
@@ -181,8 +181,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
user = User.sanitize_html(user, User.html_filter_policy(opts[:for]))
display_name = user.name || user.nickname
- image = User.avatar_url(user) |> MediaProxy.url()
+ avatar = User.avatar_url(user) |> MediaProxy.url()
+ avatar_static = User.avatar_url(user) |> MediaProxy.preview_url(static: true)
header = User.banner_url(user) |> MediaProxy.url()
+ header_static = User.banner_url(user) |> MediaProxy.preview_url(static: true)
following_count =
if !user.hide_follows_count or !user.hide_follows or opts[:for] == user do
@@ -247,10 +249,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
statuses_count: user.note_count,
note: user.bio,
url: user.uri || user.ap_id,
- avatar: image,
- avatar_static: image,
+ avatar: avatar,
+ avatar_static: avatar_static,
header: header,
- header_static: header,
+ header_static: header_static,
emojis: emojis,
fields: user.fields,
bot: bot,
diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex
index 94b8dc8c6..435bcde15 100644
--- a/lib/pleroma/web/mastodon_api/views/status_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/status_view.ex
@@ -415,6 +415,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
[attachment_url | _] = attachment["url"]
media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image"
href = attachment_url["href"] |> MediaProxy.url()
+ href_preview = attachment_url["href"] |> MediaProxy.preview_url()
type =
cond do
@@ -430,7 +431,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
id: to_string(attachment["id"] || hash_id),
url: href,
remote_url: href,
- preview_url: href,
+ preview_url: href_preview,
text_url: href,
type: type,
description: attachment["name"],
diff --git a/lib/pleroma/web/media_proxy/invalidation.ex b/lib/pleroma/web/media_proxy/invalidation.ex
index 5808861e6..4f4340478 100644
--- a/lib/pleroma/web/media_proxy/invalidation.ex
+++ b/lib/pleroma/web/media_proxy/invalidation.ex
@@ -33,6 +33,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation do
def prepare_urls(urls) do
urls
|> List.wrap()
- |> Enum.map(&MediaProxy.url/1)
+ |> Enum.map(fn url -> [MediaProxy.url(url), MediaProxy.preview_url(url)] end)
+ |> List.flatten()
+ |> Enum.uniq()
end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex
index e18dd8224..8656b8cad 100644
--- a/lib/pleroma/web/media_proxy/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy.ex
@@ -4,6 +4,7 @@
defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config
+ alias Pleroma.Helpers.UriHelper
alias Pleroma.Upload
alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@@ -40,27 +41,35 @@ defmodule Pleroma.Web.MediaProxy do
def url("/" <> _ = url), do: url
def url(url) do
- if disabled?() or not url_proxiable?(url) do
- url
- else
+ if enabled?() and url_proxiable?(url) do
encode_url(url)
+ else
+ url
end
end
@spec url_proxiable?(String.t()) :: boolean()
def url_proxiable?(url) do
- if local?(url) or whitelisted?(url) do
- false
+ not local?(url) and not whitelisted?(url)
+ end
+
+ def preview_url(url, preview_params \\ []) do
+ if preview_enabled?() do
+ encode_preview_url(url, preview_params)
else
- true
+ url(url)
end
end
- defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
+ def enabled?, do: Config.get([:media_proxy, :enabled], false)
+
+ # Note: media proxy must be enabled for media preview proxy in order to load all
+ # non-local non-whitelisted URLs through it and be sure that body size constraint is preserved.
+ def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled])
- defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
+ def local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
- defp whitelisted?(url) do
+ def whitelisted?(url) do
%{host: domain} = URI.parse(url)
mediaproxy_whitelist_domains =
@@ -85,17 +94,29 @@ defmodule Pleroma.Web.MediaProxy do
defp maybe_get_domain_from_url(domain), do: domain
- def encode_url(url) do
+ defp base64_sig64(url) do
base64 = Base.url_encode64(url, @base64_opts)
sig64 =
base64
- |> signed_url
+ |> signed_url()
|> Base.url_encode64(@base64_opts)
+ {base64, sig64}
+ end
+
+ def encode_url(url) do
+ {base64, sig64} = base64_sig64(url)
+
build_url(sig64, base64, filename(url))
end
+ def encode_preview_url(url, preview_params \\ []) do
+ {base64, sig64} = base64_sig64(url)
+
+ build_preview_url(sig64, base64, filename(url), preview_params)
+ end
+
def decode_url(sig, url) do
with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
signature when signature == sig <- signed_url(url) do
@@ -113,10 +134,14 @@ defmodule Pleroma.Web.MediaProxy do
if path = URI.parse(url_or_path).path, do: Path.basename(path)
end
- def build_url(sig_base64, url_base64, filename \\ nil) do
+ def base_url do
+ Config.get([:media_proxy, :base_url], Web.base_url())
+ end
+
+ defp proxy_url(path, sig_base64, url_base64, filename) do
[
- Config.get([:media_proxy, :base_url], Web.base_url()),
- "proxy",
+ base_url(),
+ path,
sig_base64,
url_base64,
filename
@@ -124,4 +149,38 @@ defmodule Pleroma.Web.MediaProxy do
|> Enum.filter(& &1)
|> Path.join()
end
+
+ def build_url(sig_base64, url_base64, filename \\ nil) do
+ proxy_url("proxy", sig_base64, url_base64, filename)
+ end
+
+ def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do
+ uri = proxy_url("proxy/preview", sig_base64, url_base64, filename)
+
+ UriHelper.modify_uri_params(uri, preview_params)
+ end
+
+ def verify_request_path_and_url(
+ %Plug.Conn{params: %{"filename" => _}, request_path: request_path},
+ url
+ ) do
+ verify_request_path_and_url(request_path, url)
+ end
+
+ def verify_request_path_and_url(request_path, url) when is_binary(request_path) do
+ filename = filename(url)
+
+ if filename && not basename_matches?(request_path, filename) do
+ {:wrong_filename, filename}
+ else
+ :ok
+ end
+ end
+
+ def verify_request_path_and_url(_, _), do: :ok
+
+ defp basename_matches?(path, filename) do
+ basename = Path.basename(path)
+ basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename
+ end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
index 9a64b0ef3..90651ed9b 100644
--- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
@@ -5,44 +5,201 @@
defmodule Pleroma.Web.MediaProxy.MediaProxyController do
use Pleroma.Web, :controller
+ alias Pleroma.Config
+ alias Pleroma.Helpers.MediaHelper
+ alias Pleroma.Helpers.UriHelper
alias Pleroma.ReverseProxy
alias Pleroma.Web.MediaProxy
+ alias Plug.Conn
- @default_proxy_opts [max_body_length: 25 * 1_048_576, http: [follow_redirect: true]]
-
- def remote(conn, %{"sig" => sig64, "url" => url64} = params) do
- with config <- Pleroma.Config.get([:media_proxy], []),
- true <- Keyword.get(config, :enabled, false),
+ def remote(conn, %{"sig" => sig64, "url" => url64}) do
+ with {_, true} <- {:enabled, MediaProxy.enabled?()},
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_banned_urls, MediaProxy.in_banned_urls(url)},
- :ok <- filename_matches(params, conn.request_path, url) do
- ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
+ :ok <- MediaProxy.verify_request_path_and_url(conn, url) do
+ ReverseProxy.call(conn, url, media_proxy_opts())
else
- error when error in [false, {:in_banned_urls, true}] ->
- send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
+ {:enabled, false} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
+
+ {:in_banned_urls, true} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
{:error, :invalid_signature} ->
- send_resp(conn, 403, Plug.Conn.Status.reason_phrase(403))
+ send_resp(conn, 403, Conn.Status.reason_phrase(403))
{:wrong_filename, filename} ->
redirect(conn, external: MediaProxy.build_url(sig64, url64, filename))
end
end
- def filename_matches(%{"filename" => _} = _, path, url) do
- filename = MediaProxy.filename(url)
+ def preview(%Conn{} = conn, %{"sig" => sig64, "url" => url64}) do
+ with {_, true} <- {:enabled, MediaProxy.preview_enabled?()},
+ {:ok, url} <- MediaProxy.decode_url(sig64, url64),
+ :ok <- MediaProxy.verify_request_path_and_url(conn, url) do
+ handle_preview(conn, url)
+ else
+ {:enabled, false} ->
+ send_resp(conn, 404, Conn.Status.reason_phrase(404))
+
+ {:error, :invalid_signature} ->
+ send_resp(conn, 403, Conn.Status.reason_phrase(403))
+
+ {:wrong_filename, filename} ->
+ redirect(conn, external: MediaProxy.build_preview_url(sig64, url64, filename))
+ end
+ end
+
+ defp handle_preview(conn, url) do
+ media_proxy_url = MediaProxy.url(url)
+
+ with {:ok, %{status: status} = head_response} when status in 200..299 <-
+ Pleroma.HTTP.request("head", media_proxy_url, [], [], pool: :media) do
+ content_type = Tesla.get_header(head_response, "content-type")
+ content_length = Tesla.get_header(head_response, "content-length")
+ content_length = content_length && String.to_integer(content_length)
+ static = conn.params["static"] in ["true", true]
+
+ cond do
+ static and content_type == "image/gif" ->
+ handle_jpeg_preview(conn, media_proxy_url)
+
+ static ->
+ drop_static_param_and_redirect(conn)
+
+ content_type == "image/gif" ->
+ redirect(conn, external: media_proxy_url)
+
+ min_content_length_for_preview() > 0 and content_length > 0 and
+ content_length < min_content_length_for_preview() ->
+ redirect(conn, external: media_proxy_url)
+
+ true ->
+ handle_preview(content_type, conn, media_proxy_url)
+ end
+ else
+ # If HEAD failed, redirecting to media proxy URI doesn't make much sense; returning an error
+ {_, %{status: status}} ->
+ send_resp(conn, :failed_dependency, "Can't fetch HTTP headers (HTTP #{status}).")
+
+ {:error, :recv_response_timeout} ->
+ send_resp(conn, :failed_dependency, "HEAD request timeout.")
+
+ _ ->
+ send_resp(conn, :failed_dependency, "Can't fetch HTTP headers.")
+ end
+ end
+
+ defp handle_preview("image/png" <> _ = _content_type, conn, media_proxy_url) do
+ handle_png_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview("image/" <> _ = _content_type, conn, media_proxy_url) do
+ handle_jpeg_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview("video/" <> _ = _content_type, conn, media_proxy_url) do
+ handle_video_preview(conn, media_proxy_url)
+ end
+
+ defp handle_preview(_unsupported_content_type, conn, media_proxy_url) do
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+
+ defp handle_png_preview(conn, media_proxy_url) do
+ quality = Config.get!([:media_preview_proxy, :image_quality])
+ {thumbnail_max_width, thumbnail_max_height} = thumbnail_max_dimensions()
+
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.image_resize(
+ media_proxy_url,
+ %{
+ max_width: thumbnail_max_width,
+ max_height: thumbnail_max_height,
+ quality: quality,
+ format: "png"
+ }
+ ) do
+ conn
+ |> put_preview_response_headers(["image/png", "preview.png"])
+ |> send_resp(200, thumbnail_binary)
+ else
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+ end
+
+ defp handle_jpeg_preview(conn, media_proxy_url) do
+ quality = Config.get!([:media_preview_proxy, :image_quality])
+ {thumbnail_max_width, thumbnail_max_height} = thumbnail_max_dimensions()
- if filename && does_not_match(path, filename) do
- {:wrong_filename, filename}
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.image_resize(
+ media_proxy_url,
+ %{max_width: thumbnail_max_width, max_height: thumbnail_max_height, quality: quality}
+ ) do
+ conn
+ |> put_preview_response_headers()
+ |> send_resp(200, thumbnail_binary)
else
- :ok
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
end
end
- def filename_matches(_, _, _), do: :ok
+ defp handle_video_preview(conn, media_proxy_url) do
+ with {:ok, thumbnail_binary} <-
+ MediaHelper.video_framegrab(media_proxy_url) do
+ conn
+ |> put_preview_response_headers()
+ |> send_resp(200, thumbnail_binary)
+ else
+ _ ->
+ fallback_on_preview_error(conn, media_proxy_url)
+ end
+ end
+
+ defp drop_static_param_and_redirect(conn) do
+ uri_without_static_param =
+ conn
+ |> current_url()
+ |> UriHelper.modify_uri_params(%{}, ["static"])
+
+ redirect(conn, external: uri_without_static_param)
+ end
+
+ defp fallback_on_preview_error(conn, media_proxy_url) do
+ redirect(conn, external: media_proxy_url)
+ end
+
+ defp put_preview_response_headers(
+ conn,
+ [content_type, filename] = _content_info \\ ["image/jpeg", "preview.jpg"]
+ ) do
+ conn
+ |> put_resp_header("content-type", content_type)
+ |> put_resp_header("content-disposition", "inline; filename=\"#{filename}\"")
+ |> put_resp_header("cache-control", ReverseProxy.default_cache_control_header())
+ end
+
+ defp thumbnail_max_dimensions do
+ config = media_preview_proxy_config()
+
+ thumbnail_max_width = Keyword.fetch!(config, :thumbnail_max_width)
+ thumbnail_max_height = Keyword.fetch!(config, :thumbnail_max_height)
+
+ {thumbnail_max_width, thumbnail_max_height}
+ end
+
+ defp min_content_length_for_preview do
+ Keyword.get(media_preview_proxy_config(), :min_content_length, 0)
+ end
+
+ defp media_preview_proxy_config do
+ Config.get!([:media_preview_proxy])
+ end
- defp does_not_match(path, filename) do
- basename = Path.basename(path)
- basename != filename and URI.decode(basename) != filename and URI.encode(basename) != filename
+ defp media_proxy_opts do
+ Config.get([:media_proxy, :proxy_opts], [])
end
end
diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex
index 2f0dfb474..8a206e019 100644
--- a/lib/pleroma/web/metadata/utils.ex
+++ b/lib/pleroma/web/metadata/utils.ex
@@ -38,7 +38,7 @@ defmodule Pleroma.Web.Metadata.Utils do
def scrub_html(content), do: content
def attachment_url(url) do
- MediaProxy.url(url)
+ MediaProxy.preview_url(url)
end
def user_name_string(user) do
diff --git a/lib/pleroma/web/oauth/oauth_controller.ex b/lib/pleroma/web/oauth/oauth_controller.ex
index 26e68be42..a4152e840 100644
--- a/lib/pleroma/web/oauth/oauth_controller.ex
+++ b/lib/pleroma/web/oauth/oauth_controller.ex
@@ -119,7 +119,7 @@ defmodule Pleroma.Web.OAuth.OAuthController do
redirect_uri = redirect_uri(conn, redirect_uri)
url_params = %{access_token: token.token}
url_params = Maps.put_if_present(url_params, :state, params["state"])
- url = UriHelper.append_uri_params(redirect_uri, url_params)
+ url = UriHelper.modify_uri_params(redirect_uri, url_params)
redirect(conn, external: url)
else
conn
@@ -161,7 +161,7 @@ defmodule Pleroma.Web.OAuth.OAuthController do
redirect_uri = redirect_uri(conn, redirect_uri)
url_params = %{code: auth.token}
url_params = Maps.put_if_present(url_params, :state, auth_attrs["state"])
- url = UriHelper.append_uri_params(redirect_uri, url_params)
+ url = UriHelper.modify_uri_params(redirect_uri, url_params)
redirect(conn, external: url)
else
conn
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index cd336b932..f924e1e91 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -680,6 +680,8 @@ defmodule Pleroma.Web.Router do
end
scope "/proxy/", Pleroma.Web.MediaProxy do
+ get("/preview/:sig/:url", MediaProxyController, :preview)
+ get("/preview/:sig/:url/:filename", MediaProxyController, :preview)
get("/:sig/:url", MediaProxyController, :remote)
get("/:sig/:url/:filename", MediaProxyController, :remote)
end