From 0374df1d12a4c28fac72be9b9c0545d318c10385 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov Date: Tue, 23 Jun 2020 21:10:32 +0300 Subject: other files consistency --- lib/pleroma/captcha.ex | 102 ++++ lib/pleroma/captcha/captcha.ex | 102 ---- lib/pleroma/captcha/captcha_service.ex | 37 -- lib/pleroma/captcha/service.ex | 37 ++ lib/pleroma/config/config_db.ex | 382 -------------- lib/pleroma/config_db.ex | 382 ++++++++++++++ .../conversation/participation/recipient_ship.ex | 34 ++ .../conversation/participation_recipient_ship.ex | 34 -- lib/pleroma/gun.ex | 31 ++ lib/pleroma/gun/gun.ex | 31 -- lib/pleroma/http.ex | 110 ++++ lib/pleroma/http/http.ex | 110 ---- lib/pleroma/reverse_proxy.ex | 432 ++++++++++++++++ lib/pleroma/reverse_proxy/reverse_proxy.ex | 432 ---------------- lib/pleroma/web/common_api.ex | 573 +++++++++++++++++++++ lib/pleroma/web/common_api/common_api.ex | 573 --------------------- lib/pleroma/web/fallback/redirect_controller.ex | 108 ++++ lib/pleroma/web/fallback_redirect_controller.ex | 108 ---- lib/pleroma/web/federator.ex | 111 ++++ lib/pleroma/web/federator/federator.ex | 111 ---- lib/pleroma/web/feed/user_controller.ex | 3 +- lib/pleroma/web/media_proxy.ex | 186 +++++++ lib/pleroma/web/media_proxy/invalidation/http.ex | 40 ++ lib/pleroma/web/media_proxy/invalidation/script.ex | 43 ++ lib/pleroma/web/media_proxy/invalidations/http.ex | 40 -- .../web/media_proxy/invalidations/script.ex | 43 -- lib/pleroma/web/media_proxy/media_proxy.ex | 186 ------- lib/pleroma/web/metadata/providers/open_graph.ex | 119 +++++ lib/pleroma/web/metadata/providers/opengraph.ex | 119 ----- lib/pleroma/web/o_status/o_status_controller.ex | 2 +- lib/pleroma/web/router.ex | 2 +- test/pleroma/web/feed/user_controller_test.exs | 2 +- 32 files changed, 2312 insertions(+), 2313 deletions(-) create mode 100644 lib/pleroma/captcha.ex delete mode 100644 lib/pleroma/captcha/captcha.ex delete mode 100644 lib/pleroma/captcha/captcha_service.ex create mode 100644 lib/pleroma/captcha/service.ex delete mode 100644 lib/pleroma/config/config_db.ex create mode 100644 lib/pleroma/config_db.ex create mode 100644 lib/pleroma/conversation/participation/recipient_ship.ex delete mode 100644 lib/pleroma/conversation/participation_recipient_ship.ex create mode 100644 lib/pleroma/gun.ex delete mode 100644 lib/pleroma/gun/gun.ex create mode 100644 lib/pleroma/http.ex delete mode 100644 lib/pleroma/http/http.ex create mode 100644 lib/pleroma/reverse_proxy.ex delete mode 100644 lib/pleroma/reverse_proxy/reverse_proxy.ex create mode 100644 lib/pleroma/web/common_api.ex delete mode 100644 lib/pleroma/web/common_api/common_api.ex create mode 100644 lib/pleroma/web/fallback/redirect_controller.ex delete mode 100644 lib/pleroma/web/fallback_redirect_controller.ex create mode 100644 lib/pleroma/web/federator.ex delete mode 100644 lib/pleroma/web/federator/federator.ex create mode 100644 lib/pleroma/web/media_proxy.ex create mode 100644 lib/pleroma/web/media_proxy/invalidation/http.ex create mode 100644 lib/pleroma/web/media_proxy/invalidation/script.ex delete mode 100644 lib/pleroma/web/media_proxy/invalidations/http.ex delete mode 100644 lib/pleroma/web/media_proxy/invalidations/script.ex delete mode 100644 lib/pleroma/web/media_proxy/media_proxy.ex create mode 100644 lib/pleroma/web/metadata/providers/open_graph.ex delete mode 100644 lib/pleroma/web/metadata/providers/opengraph.ex diff --git a/lib/pleroma/captcha.ex b/lib/pleroma/captcha.ex new file mode 100644 index 000000000..6ab754b6f --- /dev/null +++ b/lib/pleroma/captcha.ex @@ -0,0 +1,102 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Captcha do + alias Calendar.DateTime + alias Plug.Crypto.KeyGenerator + alias Plug.Crypto.MessageEncryptor + + @doc """ + Ask the configured captcha service for a new captcha + """ + def new do + if not enabled?() do + %{type: :none} + else + new_captcha = method().new() + + # This make salt a little different for two keys + {secret, sign_secret} = secret_pair(new_captcha[:token]) + + # Basically copy what Phoenix.Token does here, add the time to + # the actual data and make it a binary to then encrypt it + encrypted_captcha_answer = + %{ + at: DateTime.now_utc(), + answer_data: new_captcha[:answer_data] + } + |> :erlang.term_to_binary() + |> MessageEncryptor.encrypt(secret, sign_secret) + + # Replace the answer with the encrypted answer + %{new_captcha | answer_data: encrypted_captcha_answer} + end + end + + @doc """ + Ask the configured captcha service to validate the captcha + """ + def validate(token, captcha, answer_data) do + with {:ok, %{at: at, answer_data: answer_md5}} <- validate_answer_data(token, answer_data), + :ok <- validate_expiration(at), + :ok <- validate_usage(token), + :ok <- method().validate(token, captcha, answer_md5), + {:ok, _} <- mark_captcha_as_used(token) do + :ok + end + end + + def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled], false) + + defp seconds_valid, do: Pleroma.Config.get!([__MODULE__, :seconds_valid]) + + defp secret_pair(token) do + secret_key_base = Pleroma.Config.get!([Pleroma.Web.Endpoint, :secret_key_base]) + secret = KeyGenerator.generate(secret_key_base, token <> "_encrypt") + sign_secret = KeyGenerator.generate(secret_key_base, token <> "_sign") + + {secret, sign_secret} + end + + defp validate_answer_data(token, answer_data) do + {secret, sign_secret} = secret_pair(token) + + with false <- is_nil(answer_data), + {:ok, data} <- MessageEncryptor.decrypt(answer_data, secret, sign_secret), + %{at: at, answer_data: answer_md5} <- :erlang.binary_to_term(data) do + {:ok, %{at: at, answer_data: answer_md5}} + else + _ -> {:error, :invalid_answer_data} + end + end + + defp validate_expiration(created_at) do + # If the time found is less than (current_time-seconds_valid) then the time has already passed + # Later we check that the time found is more than the presumed invalidatation time, that means + # that the data is still valid and the captcha can be checked + + valid_if_after = DateTime.subtract!(DateTime.now_utc(), seconds_valid()) + + if DateTime.before?(created_at, valid_if_after) do + {:error, :expired} + else + :ok + end + end + + defp validate_usage(token) do + if is_nil(Cachex.get!(:used_captcha_cache, token)) do + :ok + else + {:error, :already_used} + end + end + + defp mark_captcha_as_used(token) do + ttl = seconds_valid() |> :timer.seconds() + Cachex.put(:used_captcha_cache, token, true, ttl: ttl) + end + + defp method, do: Pleroma.Config.get!([__MODULE__, :method]) +end diff --git a/lib/pleroma/captcha/captcha.ex b/lib/pleroma/captcha/captcha.ex deleted file mode 100644 index 6ab754b6f..000000000 --- a/lib/pleroma/captcha/captcha.ex +++ /dev/null @@ -1,102 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Captcha do - alias Calendar.DateTime - alias Plug.Crypto.KeyGenerator - alias Plug.Crypto.MessageEncryptor - - @doc """ - Ask the configured captcha service for a new captcha - """ - def new do - if not enabled?() do - %{type: :none} - else - new_captcha = method().new() - - # This make salt a little different for two keys - {secret, sign_secret} = secret_pair(new_captcha[:token]) - - # Basically copy what Phoenix.Token does here, add the time to - # the actual data and make it a binary to then encrypt it - encrypted_captcha_answer = - %{ - at: DateTime.now_utc(), - answer_data: new_captcha[:answer_data] - } - |> :erlang.term_to_binary() - |> MessageEncryptor.encrypt(secret, sign_secret) - - # Replace the answer with the encrypted answer - %{new_captcha | answer_data: encrypted_captcha_answer} - end - end - - @doc """ - Ask the configured captcha service to validate the captcha - """ - def validate(token, captcha, answer_data) do - with {:ok, %{at: at, answer_data: answer_md5}} <- validate_answer_data(token, answer_data), - :ok <- validate_expiration(at), - :ok <- validate_usage(token), - :ok <- method().validate(token, captcha, answer_md5), - {:ok, _} <- mark_captcha_as_used(token) do - :ok - end - end - - def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled], false) - - defp seconds_valid, do: Pleroma.Config.get!([__MODULE__, :seconds_valid]) - - defp secret_pair(token) do - secret_key_base = Pleroma.Config.get!([Pleroma.Web.Endpoint, :secret_key_base]) - secret = KeyGenerator.generate(secret_key_base, token <> "_encrypt") - sign_secret = KeyGenerator.generate(secret_key_base, token <> "_sign") - - {secret, sign_secret} - end - - defp validate_answer_data(token, answer_data) do - {secret, sign_secret} = secret_pair(token) - - with false <- is_nil(answer_data), - {:ok, data} <- MessageEncryptor.decrypt(answer_data, secret, sign_secret), - %{at: at, answer_data: answer_md5} <- :erlang.binary_to_term(data) do - {:ok, %{at: at, answer_data: answer_md5}} - else - _ -> {:error, :invalid_answer_data} - end - end - - defp validate_expiration(created_at) do - # If the time found is less than (current_time-seconds_valid) then the time has already passed - # Later we check that the time found is more than the presumed invalidatation time, that means - # that the data is still valid and the captcha can be checked - - valid_if_after = DateTime.subtract!(DateTime.now_utc(), seconds_valid()) - - if DateTime.before?(created_at, valid_if_after) do - {:error, :expired} - else - :ok - end - end - - defp validate_usage(token) do - if is_nil(Cachex.get!(:used_captcha_cache, token)) do - :ok - else - {:error, :already_used} - end - end - - defp mark_captcha_as_used(token) do - ttl = seconds_valid() |> :timer.seconds() - Cachex.put(:used_captcha_cache, token, true, ttl: ttl) - end - - defp method, do: Pleroma.Config.get!([__MODULE__, :method]) -end diff --git a/lib/pleroma/captcha/captcha_service.ex b/lib/pleroma/captcha/captcha_service.ex deleted file mode 100644 index 959038cef..000000000 --- a/lib/pleroma/captcha/captcha_service.ex +++ /dev/null @@ -1,37 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Captcha.Service do - @doc """ - Request new captcha from a captcha service. - - Returns: - - Type/Name of the service, the token to identify the captcha, - the data of the answer and service-specific data to use the newly created captcha - """ - @callback new() :: %{ - type: atom(), - token: String.t(), - answer_data: any() - } - - @doc """ - Validated the provided captcha solution. - - Arguments: - * `token` the captcha is associated with - * `captcha` solution of the captcha to validate - * `answer_data` is the data needed to validate the answer (presumably encrypted) - - Returns: - - `true` if captcha is valid, `false` if not - """ - @callback validate( - token :: String.t(), - captcha :: String.t(), - answer_data :: any() - ) :: :ok | {:error, String.t()} -end diff --git a/lib/pleroma/captcha/service.ex b/lib/pleroma/captcha/service.ex new file mode 100644 index 000000000..959038cef --- /dev/null +++ b/lib/pleroma/captcha/service.ex @@ -0,0 +1,37 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Captcha.Service do + @doc """ + Request new captcha from a captcha service. + + Returns: + + Type/Name of the service, the token to identify the captcha, + the data of the answer and service-specific data to use the newly created captcha + """ + @callback new() :: %{ + type: atom(), + token: String.t(), + answer_data: any() + } + + @doc """ + Validated the provided captcha solution. + + Arguments: + * `token` the captcha is associated with + * `captcha` solution of the captcha to validate + * `answer_data` is the data needed to validate the answer (presumably encrypted) + + Returns: + + `true` if captcha is valid, `false` if not + """ + @callback validate( + token :: String.t(), + captcha :: String.t(), + answer_data :: any() + ) :: :ok | {:error, String.t()} +end diff --git a/lib/pleroma/config/config_db.ex b/lib/pleroma/config/config_db.ex deleted file mode 100644 index e5b7811aa..000000000 --- a/lib/pleroma/config/config_db.ex +++ /dev/null @@ -1,382 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.ConfigDB do - use Ecto.Schema - - import Ecto.Changeset - import Ecto.Query, only: [select: 3] - import Pleroma.Web.Gettext - - alias __MODULE__ - alias Pleroma.Repo - - @type t :: %__MODULE__{} - - @full_subkey_update [ - {:pleroma, :assets, :mascots}, - {:pleroma, :emoji, :groups}, - {:pleroma, :workers, :retries}, - {:pleroma, :mrf_subchain, :match_actor}, - {:pleroma, :mrf_keyword, :replace} - ] - - schema "config" do - field(:key, Pleroma.EctoType.Config.Atom) - field(:group, Pleroma.EctoType.Config.Atom) - field(:value, Pleroma.EctoType.Config.BinaryValue) - field(:db, {:array, :string}, virtual: true, default: []) - - timestamps() - end - - @spec get_all_as_keyword() :: keyword() - def get_all_as_keyword do - ConfigDB - |> select([c], {c.group, c.key, c.value}) - |> Repo.all() - |> Enum.reduce([], fn {group, key, value}, acc -> - Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) - end) - end - - @spec get_by_params(map()) :: ConfigDB.t() | nil - def get_by_params(params), do: Repo.get_by(ConfigDB, params) - - @spec changeset(ConfigDB.t(), map()) :: Changeset.t() - def changeset(config, params \\ %{}) do - config - |> cast(params, [:key, :group, :value]) - |> validate_required([:key, :group, :value]) - |> unique_constraint(:key, name: :config_group_key_index) - end - - defp create(params) do - %ConfigDB{} - |> changeset(params) - |> Repo.insert() - end - - defp update(%ConfigDB{} = config, %{value: value}) do - config - |> changeset(%{value: value}) - |> Repo.update() - end - - @spec get_db_keys(keyword(), any()) :: [String.t()] - def get_db_keys(value, key) do - keys = - if Keyword.keyword?(value) do - Keyword.keys(value) - else - [key] - end - - Enum.map(keys, &to_json_types(&1)) - end - - @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() - def merge_group(group, key, old_value, new_value) do - new_keys = to_mapset(new_value) - - intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list() - - merged_value = ConfigDB.merge(old_value, new_value) - - @full_subkey_update - |> Enum.map(fn - {g, k, subkey} when g == group and k == key -> - if subkey in intersect_keys, do: subkey, else: [] - - _ -> - [] - end) - |> List.flatten() - |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1])) - end - - defp to_mapset(keyword) do - keyword - |> Keyword.keys() - |> MapSet.new() - end - - @spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean() - def sub_key_full_update?(group, key, subkeys) do - Enum.any?(@full_subkey_update, fn {g, k, subkey} -> - g == group and k == key and subkey in subkeys - end) - end - - @spec merge(keyword(), keyword()) :: keyword() - def merge(config1, config2) when is_list(config1) and is_list(config2) do - Keyword.merge(config1, config2, fn _, app1, app2 -> - if Keyword.keyword?(app1) and Keyword.keyword?(app2) do - Keyword.merge(app1, app2, &deep_merge/3) - else - app2 - end - end) - end - - defp deep_merge(_key, value1, value2) do - if Keyword.keyword?(value1) and Keyword.keyword?(value2) do - Keyword.merge(value1, value2, &deep_merge/3) - else - value2 - end - end - - @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} - def update_or_create(params) do - params = Map.put(params, :value, to_elixir_types(params[:value])) - search_opts = Map.take(params, [:group, :key]) - - with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), - {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, - {_, true, config} <- - {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do - new_value = merge_group(config.group, config.key, config.value, params[:value]) - update(config, %{value: new_value}) - else - {reason, false, config} when reason in [:partial_update, :can_be_merged] -> - update(config, params) - - nil -> - create(params) - end - end - - defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) - - defp only_full_update?(%ConfigDB{group: group, key: key}) do - full_key_update = [ - {:pleroma, :ecto_repos}, - {:quack, :meta}, - {:mime, :types}, - {:cors_plug, [:max_age, :methods, :expose, :headers]}, - {:swarm, :node_blacklist}, - {:logger, :backends} - ] - - Enum.any?(full_key_update, fn - {s_group, s_key} -> - group == s_group and ((is_list(s_key) and key in s_key) or key == s_key) - end) - end - - @spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} - def delete(%ConfigDB{} = config), do: Repo.delete(config) - - def delete(params) do - search_opts = Map.delete(params, :subkeys) - - with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), - {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, - keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)), - {_, config, new_value} when new_value != [] <- - {:partial_remove, config, Keyword.drop(config.value, keys)} do - update(config, %{value: new_value}) - else - {:partial_remove, config, []} -> - Repo.delete(config) - - {config, nil} -> - Repo.delete(config) - - nil -> - err = - dgettext("errors", "Config with params %{params} not found", params: inspect(params)) - - {:error, err} - end - end - - @spec to_json_types(term()) :: map() | list() | boolean() | String.t() - def to_json_types(entity) when is_list(entity) do - Enum.map(entity, &to_json_types/1) - end - - def to_json_types(%Regex{} = entity), do: inspect(entity) - - def to_json_types(entity) when is_map(entity) do - Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end) - end - - def to_json_types({:args, args}) when is_list(args) do - arguments = - Enum.map(args, fn - arg when is_tuple(arg) -> inspect(arg) - arg -> to_json_types(arg) - end) - - %{"tuple" => [":args", arguments]} - end - - def to_json_types({:proxy_url, {type, :localhost, port}}) do - %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]} - end - - def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do - ip = - host - |> :inet_parse.ntoa() - |> to_string() - - %{ - "tuple" => [ - ":proxy_url", - %{"tuple" => [to_json_types(type), ip, port]} - ] - } - end - - def to_json_types({:proxy_url, {type, host, port}}) do - %{ - "tuple" => [ - ":proxy_url", - %{"tuple" => [to_json_types(type), to_string(host), port]} - ] - } - end - - def to_json_types({:partial_chain, entity}), - do: %{"tuple" => [":partial_chain", inspect(entity)]} - - def to_json_types(entity) when is_tuple(entity) do - value = - entity - |> Tuple.to_list() - |> to_json_types() - - %{"tuple" => value} - end - - def to_json_types(entity) when is_binary(entity), do: entity - - def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do - entity - end - - def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do - ":#{entity}" - end - - def to_json_types(entity) when is_atom(entity), do: inspect(entity) - - @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term() - def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do - arguments = - Enum.map(args, fn arg -> - if String.contains?(arg, ["{", "}"]) do - {elem, []} = Code.eval_string(arg) - elem - else - to_elixir_types(arg) - end - end) - - {:args, arguments} - end - - def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do - {:proxy_url, {string_to_elixir_types(type), parse_host(host), port}} - end - - def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do - {partial_chain, []} = - entity - |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") - |> Code.eval_string() - - {:partial_chain, partial_chain} - end - - def to_elixir_types(%{"tuple" => entity}) do - Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1))) - end - - def to_elixir_types(entity) when is_map(entity) do - Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end) - end - - def to_elixir_types(entity) when is_list(entity) do - Enum.map(entity, &to_elixir_types/1) - end - - def to_elixir_types(entity) when is_binary(entity) do - entity - |> String.trim() - |> string_to_elixir_types() - end - - def to_elixir_types(entity), do: entity - - @spec string_to_elixir_types(String.t()) :: - atom() | Regex.t() | module() | String.t() | no_return() - def string_to_elixir_types("~r" <> _pattern = regex) do - pattern = - ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u - - delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] - - with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- - Regex.named_captures(pattern, regex), - {:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter), - {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do - result - end - end - - def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) - - def string_to_elixir_types(value) do - if module_name?(value) do - String.to_existing_atom("Elixir." <> value) - else - value - end - end - - defp parse_host("localhost"), do: :localhost - - defp parse_host(host) do - charlist = to_charlist(host) - - case :inet.parse_address(charlist) do - {:error, :einval} -> - charlist - - {:ok, ip} -> - ip - end - end - - defp find_valid_delimiter([], _string, _) do - raise(ArgumentError, message: "valid delimiter for Regex expression not found") - end - - defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter) - when is_tuple(delimiter) do - if String.contains?(pattern, closing) do - find_valid_delimiter(others, pattern, regex_delimiter) - else - {:ok, {leading, closing}} - end - end - - defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do - if String.contains?(pattern, delimiter) do - find_valid_delimiter(others, pattern, regex_delimiter) - else - {:ok, {delimiter, delimiter}} - end - end - - @spec module_name?(String.t()) :: boolean() - def module_name?(string) do - Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or - string in ["Oban", "Ueberauth", "ExSyslogger"] - end -end diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex new file mode 100644 index 000000000..e5b7811aa --- /dev/null +++ b/lib/pleroma/config_db.ex @@ -0,0 +1,382 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ConfigDB do + use Ecto.Schema + + import Ecto.Changeset + import Ecto.Query, only: [select: 3] + import Pleroma.Web.Gettext + + alias __MODULE__ + alias Pleroma.Repo + + @type t :: %__MODULE__{} + + @full_subkey_update [ + {:pleroma, :assets, :mascots}, + {:pleroma, :emoji, :groups}, + {:pleroma, :workers, :retries}, + {:pleroma, :mrf_subchain, :match_actor}, + {:pleroma, :mrf_keyword, :replace} + ] + + schema "config" do + field(:key, Pleroma.EctoType.Config.Atom) + field(:group, Pleroma.EctoType.Config.Atom) + field(:value, Pleroma.EctoType.Config.BinaryValue) + field(:db, {:array, :string}, virtual: true, default: []) + + timestamps() + end + + @spec get_all_as_keyword() :: keyword() + def get_all_as_keyword do + ConfigDB + |> select([c], {c.group, c.key, c.value}) + |> Repo.all() + |> Enum.reduce([], fn {group, key, value}, acc -> + Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) + end) + end + + @spec get_by_params(map()) :: ConfigDB.t() | nil + def get_by_params(params), do: Repo.get_by(ConfigDB, params) + + @spec changeset(ConfigDB.t(), map()) :: Changeset.t() + def changeset(config, params \\ %{}) do + config + |> cast(params, [:key, :group, :value]) + |> validate_required([:key, :group, :value]) + |> unique_constraint(:key, name: :config_group_key_index) + end + + defp create(params) do + %ConfigDB{} + |> changeset(params) + |> Repo.insert() + end + + defp update(%ConfigDB{} = config, %{value: value}) do + config + |> changeset(%{value: value}) + |> Repo.update() + end + + @spec get_db_keys(keyword(), any()) :: [String.t()] + def get_db_keys(value, key) do + keys = + if Keyword.keyword?(value) do + Keyword.keys(value) + else + [key] + end + + Enum.map(keys, &to_json_types(&1)) + end + + @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() + def merge_group(group, key, old_value, new_value) do + new_keys = to_mapset(new_value) + + intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list() + + merged_value = ConfigDB.merge(old_value, new_value) + + @full_subkey_update + |> Enum.map(fn + {g, k, subkey} when g == group and k == key -> + if subkey in intersect_keys, do: subkey, else: [] + + _ -> + [] + end) + |> List.flatten() + |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1])) + end + + defp to_mapset(keyword) do + keyword + |> Keyword.keys() + |> MapSet.new() + end + + @spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean() + def sub_key_full_update?(group, key, subkeys) do + Enum.any?(@full_subkey_update, fn {g, k, subkey} -> + g == group and k == key and subkey in subkeys + end) + end + + @spec merge(keyword(), keyword()) :: keyword() + def merge(config1, config2) when is_list(config1) and is_list(config2) do + Keyword.merge(config1, config2, fn _, app1, app2 -> + if Keyword.keyword?(app1) and Keyword.keyword?(app2) do + Keyword.merge(app1, app2, &deep_merge/3) + else + app2 + end + end) + end + + defp deep_merge(_key, value1, value2) do + if Keyword.keyword?(value1) and Keyword.keyword?(value2) do + Keyword.merge(value1, value2, &deep_merge/3) + else + value2 + end + end + + @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} + def update_or_create(params) do + params = Map.put(params, :value, to_elixir_types(params[:value])) + search_opts = Map.take(params, [:group, :key]) + + with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), + {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, + {_, true, config} <- + {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do + new_value = merge_group(config.group, config.key, config.value, params[:value]) + update(config, %{value: new_value}) + else + {reason, false, config} when reason in [:partial_update, :can_be_merged] -> + update(config, params) + + nil -> + create(params) + end + end + + defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) + + defp only_full_update?(%ConfigDB{group: group, key: key}) do + full_key_update = [ + {:pleroma, :ecto_repos}, + {:quack, :meta}, + {:mime, :types}, + {:cors_plug, [:max_age, :methods, :expose, :headers]}, + {:swarm, :node_blacklist}, + {:logger, :backends} + ] + + Enum.any?(full_key_update, fn + {s_group, s_key} -> + group == s_group and ((is_list(s_key) and key in s_key) or key == s_key) + end) + end + + @spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} + def delete(%ConfigDB{} = config), do: Repo.delete(config) + + def delete(params) do + search_opts = Map.delete(params, :subkeys) + + with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), + {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, + keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)), + {_, config, new_value} when new_value != [] <- + {:partial_remove, config, Keyword.drop(config.value, keys)} do + update(config, %{value: new_value}) + else + {:partial_remove, config, []} -> + Repo.delete(config) + + {config, nil} -> + Repo.delete(config) + + nil -> + err = + dgettext("errors", "Config with params %{params} not found", params: inspect(params)) + + {:error, err} + end + end + + @spec to_json_types(term()) :: map() | list() | boolean() | String.t() + def to_json_types(entity) when is_list(entity) do + Enum.map(entity, &to_json_types/1) + end + + def to_json_types(%Regex{} = entity), do: inspect(entity) + + def to_json_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end) + end + + def to_json_types({:args, args}) when is_list(args) do + arguments = + Enum.map(args, fn + arg when is_tuple(arg) -> inspect(arg) + arg -> to_json_types(arg) + end) + + %{"tuple" => [":args", arguments]} + end + + def to_json_types({:proxy_url, {type, :localhost, port}}) do + %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]} + end + + def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do + ip = + host + |> :inet_parse.ntoa() + |> to_string() + + %{ + "tuple" => [ + ":proxy_url", + %{"tuple" => [to_json_types(type), ip, port]} + ] + } + end + + def to_json_types({:proxy_url, {type, host, port}}) do + %{ + "tuple" => [ + ":proxy_url", + %{"tuple" => [to_json_types(type), to_string(host), port]} + ] + } + end + + def to_json_types({:partial_chain, entity}), + do: %{"tuple" => [":partial_chain", inspect(entity)]} + + def to_json_types(entity) when is_tuple(entity) do + value = + entity + |> Tuple.to_list() + |> to_json_types() + + %{"tuple" => value} + end + + def to_json_types(entity) when is_binary(entity), do: entity + + def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do + entity + end + + def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do + ":#{entity}" + end + + def to_json_types(entity) when is_atom(entity), do: inspect(entity) + + @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term() + def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do + arguments = + Enum.map(args, fn arg -> + if String.contains?(arg, ["{", "}"]) do + {elem, []} = Code.eval_string(arg) + elem + else + to_elixir_types(arg) + end + end) + + {:args, arguments} + end + + def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do + {:proxy_url, {string_to_elixir_types(type), parse_host(host), port}} + end + + def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do + {partial_chain, []} = + entity + |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") + |> Code.eval_string() + + {:partial_chain, partial_chain} + end + + def to_elixir_types(%{"tuple" => entity}) do + Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1))) + end + + def to_elixir_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end) + end + + def to_elixir_types(entity) when is_list(entity) do + Enum.map(entity, &to_elixir_types/1) + end + + def to_elixir_types(entity) when is_binary(entity) do + entity + |> String.trim() + |> string_to_elixir_types() + end + + def to_elixir_types(entity), do: entity + + @spec string_to_elixir_types(String.t()) :: + atom() | Regex.t() | module() | String.t() | no_return() + def string_to_elixir_types("~r" <> _pattern = regex) do + pattern = + ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u + + delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] + + with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- + Regex.named_captures(pattern, regex), + {:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter), + {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do + result + end + end + + def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) + + def string_to_elixir_types(value) do + if module_name?(value) do + String.to_existing_atom("Elixir." <> value) + else + value + end + end + + defp parse_host("localhost"), do: :localhost + + defp parse_host(host) do + charlist = to_charlist(host) + + case :inet.parse_address(charlist) do + {:error, :einval} -> + charlist + + {:ok, ip} -> + ip + end + end + + defp find_valid_delimiter([], _string, _) do + raise(ArgumentError, message: "valid delimiter for Regex expression not found") + end + + defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter) + when is_tuple(delimiter) do + if String.contains?(pattern, closing) do + find_valid_delimiter(others, pattern, regex_delimiter) + else + {:ok, {leading, closing}} + end + end + + defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do + if String.contains?(pattern, delimiter) do + find_valid_delimiter(others, pattern, regex_delimiter) + else + {:ok, {delimiter, delimiter}} + end + end + + @spec module_name?(String.t()) :: boolean() + def module_name?(string) do + Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or + string in ["Oban", "Ueberauth", "ExSyslogger"] + end +end diff --git a/lib/pleroma/conversation/participation/recipient_ship.ex b/lib/pleroma/conversation/participation/recipient_ship.ex new file mode 100644 index 000000000..de40bacac --- /dev/null +++ b/lib/pleroma/conversation/participation/recipient_ship.ex @@ -0,0 +1,34 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Conversation.Participation.RecipientShip do + use Ecto.Schema + + alias Pleroma.Conversation.Participation + alias Pleroma.Repo + alias Pleroma.User + + import Ecto.Changeset + + schema "conversation_participation_recipient_ships" do + belongs_to(:user, User, type: FlakeId.Ecto.CompatType) + belongs_to(:participation, Participation) + end + + def creation_cng(struct, params) do + struct + |> cast(params, [:user_id, :participation_id]) + |> validate_required([:user_id, :participation_id]) + end + + def create(%User{} = user, participation), do: create([user], participation) + + def create(users, participation) do + Enum.each(users, fn user -> + %__MODULE__{} + |> creation_cng(%{user_id: user.id, participation_id: participation.id}) + |> Repo.insert!() + end) + end +end diff --git a/lib/pleroma/conversation/participation_recipient_ship.ex b/lib/pleroma/conversation/participation_recipient_ship.ex deleted file mode 100644 index de40bacac..000000000 --- a/lib/pleroma/conversation/participation_recipient_ship.ex +++ /dev/null @@ -1,34 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Conversation.Participation.RecipientShip do - use Ecto.Schema - - alias Pleroma.Conversation.Participation - alias Pleroma.Repo - alias Pleroma.User - - import Ecto.Changeset - - schema "conversation_participation_recipient_ships" do - belongs_to(:user, User, type: FlakeId.Ecto.CompatType) - belongs_to(:participation, Participation) - end - - def creation_cng(struct, params) do - struct - |> cast(params, [:user_id, :participation_id]) - |> validate_required([:user_id, :participation_id]) - end - - def create(%User{} = user, participation), do: create([user], participation) - - def create(users, participation) do - Enum.each(users, fn user -> - %__MODULE__{} - |> creation_cng(%{user_id: user.id, participation_id: participation.id}) - |> Repo.insert!() - end) - end -end diff --git a/lib/pleroma/gun.ex b/lib/pleroma/gun.ex new file mode 100644 index 000000000..4043e4880 --- /dev/null +++ b/lib/pleroma/gun.ex @@ -0,0 +1,31 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Gun do + @callback open(charlist(), pos_integer(), map()) :: {:ok, pid()} + @callback info(pid()) :: map() + @callback close(pid()) :: :ok + @callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()} + @callback connect(pid(), map()) :: reference() + @callback await(pid(), reference()) :: {:response, :fin, 200, []} + @callback set_owner(pid(), pid()) :: :ok + + @api Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API) + + defp api, do: @api + + def open(host, port, opts), do: api().open(host, port, opts) + + def info(pid), do: api().info(pid) + + def close(pid), do: api().close(pid) + + def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout) + + def connect(pid, opts), do: api().connect(pid, opts) + + def await(pid, ref), do: api().await(pid, ref) + + def set_owner(pid, owner), do: api().set_owner(pid, owner) +end diff --git a/lib/pleroma/gun/gun.ex b/lib/pleroma/gun/gun.ex deleted file mode 100644 index 4043e4880..000000000 --- a/lib/pleroma/gun/gun.ex +++ /dev/null @@ -1,31 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Gun do - @callback open(charlist(), pos_integer(), map()) :: {:ok, pid()} - @callback info(pid()) :: map() - @callback close(pid()) :: :ok - @callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()} - @callback connect(pid(), map()) :: reference() - @callback await(pid(), reference()) :: {:response, :fin, 200, []} - @callback set_owner(pid(), pid()) :: :ok - - @api Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API) - - defp api, do: @api - - def open(host, port, opts), do: api().open(host, port, opts) - - def info(pid), do: api().info(pid) - - def close(pid), do: api().close(pid) - - def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout) - - def connect(pid, opts), do: api().connect(pid, opts) - - def await(pid, ref), do: api().await(pid, ref) - - def set_owner(pid, owner), do: api().set_owner(pid, owner) -end diff --git a/lib/pleroma/http.ex b/lib/pleroma/http.ex new file mode 100644 index 000000000..052597191 --- /dev/null +++ b/lib/pleroma/http.ex @@ -0,0 +1,110 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.HTTP do + @moduledoc """ + Wrapper for `Tesla.request/2`. + """ + + alias Pleroma.HTTP.AdapterHelper + alias Pleroma.HTTP.Request + alias Pleroma.HTTP.RequestBuilder, as: Builder + alias Tesla.Client + alias Tesla.Env + + require Logger + + @type t :: __MODULE__ + @type method() :: :get | :post | :put | :delete | :head + + @doc """ + Performs GET request. + + See `Pleroma.HTTP.request/5` + """ + @spec get(Request.url() | nil, Request.headers(), keyword()) :: + nil | {:ok, Env.t()} | {:error, any()} + def get(url, headers \\ [], options \\ []) + def get(nil, _, _), do: nil + def get(url, headers, options), do: request(:get, url, "", headers, options) + + @spec head(Request.url(), Request.headers(), keyword()) :: {:ok, Env.t()} | {:error, any()} + def head(url, headers \\ [], options \\ []), do: request(:head, url, "", headers, options) + + @doc """ + Performs POST request. + + See `Pleroma.HTTP.request/5` + """ + @spec post(Request.url(), String.t(), Request.headers(), keyword()) :: + {:ok, Env.t()} | {:error, any()} + def post(url, body, headers \\ [], options \\ []), + do: request(:post, url, body, headers, options) + + @doc """ + Builds and performs http request. + + # Arguments: + `method` - :get, :post, :put, :delete, :head + `url` - full url + `body` - request body + `headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]` + `options` - custom, per-request middleware or adapter options + + # Returns: + `{:ok, %Tesla.Env{}}` or `{:error, error}` + + """ + @spec request(method(), Request.url(), String.t(), Request.headers(), keyword()) :: + {:ok, Env.t()} | {:error, any()} + def request(method, url, body, headers, options) when is_binary(url) do + uri = URI.parse(url) + adapter_opts = AdapterHelper.options(uri, options || []) + + options = put_in(options[:adapter], adapter_opts) + params = options[:params] || [] + request = build_request(method, headers, options, url, body, params) + + adapter = Application.get_env(:tesla, :adapter) + + client = Tesla.client(adapter_middlewares(adapter), adapter) + + maybe_limit( + fn -> + request(client, request) + end, + adapter, + adapter_opts + ) + end + + @spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()} + def request(client, request), do: Tesla.request(client, request) + + defp build_request(method, headers, options, url, body, params) do + Builder.new() + |> Builder.method(method) + |> Builder.headers(headers) + |> Builder.opts(options) + |> Builder.url(url) + |> Builder.add_param(:body, :body, body) + |> Builder.add_param(:query, :query, params) + |> Builder.convert_to_keyword() + end + + @prefix Pleroma.Gun.ConnectionPool + defp maybe_limit(fun, Tesla.Adapter.Gun, opts) do + ConcurrentLimiter.limit(:"#{@prefix}.#{opts[:pool] || :default}", fun) + end + + defp maybe_limit(fun, _, _) do + fun.() + end + + defp adapter_middlewares(Tesla.Adapter.Gun) do + [Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] + end + + defp adapter_middlewares(_), do: [] +end diff --git a/lib/pleroma/http/http.ex b/lib/pleroma/http/http.ex deleted file mode 100644 index 052597191..000000000 --- a/lib/pleroma/http/http.ex +++ /dev/null @@ -1,110 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.HTTP do - @moduledoc """ - Wrapper for `Tesla.request/2`. - """ - - alias Pleroma.HTTP.AdapterHelper - alias Pleroma.HTTP.Request - alias Pleroma.HTTP.RequestBuilder, as: Builder - alias Tesla.Client - alias Tesla.Env - - require Logger - - @type t :: __MODULE__ - @type method() :: :get | :post | :put | :delete | :head - - @doc """ - Performs GET request. - - See `Pleroma.HTTP.request/5` - """ - @spec get(Request.url() | nil, Request.headers(), keyword()) :: - nil | {:ok, Env.t()} | {:error, any()} - def get(url, headers \\ [], options \\ []) - def get(nil, _, _), do: nil - def get(url, headers, options), do: request(:get, url, "", headers, options) - - @spec head(Request.url(), Request.headers(), keyword()) :: {:ok, Env.t()} | {:error, any()} - def head(url, headers \\ [], options \\ []), do: request(:head, url, "", headers, options) - - @doc """ - Performs POST request. - - See `Pleroma.HTTP.request/5` - """ - @spec post(Request.url(), String.t(), Request.headers(), keyword()) :: - {:ok, Env.t()} | {:error, any()} - def post(url, body, headers \\ [], options \\ []), - do: request(:post, url, body, headers, options) - - @doc """ - Builds and performs http request. - - # Arguments: - `method` - :get, :post, :put, :delete, :head - `url` - full url - `body` - request body - `headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]` - `options` - custom, per-request middleware or adapter options - - # Returns: - `{:ok, %Tesla.Env{}}` or `{:error, error}` - - """ - @spec request(method(), Request.url(), String.t(), Request.headers(), keyword()) :: - {:ok, Env.t()} | {:error, any()} - def request(method, url, body, headers, options) when is_binary(url) do - uri = URI.parse(url) - adapter_opts = AdapterHelper.options(uri, options || []) - - options = put_in(options[:adapter], adapter_opts) - params = options[:params] || [] - request = build_request(method, headers, options, url, body, params) - - adapter = Application.get_env(:tesla, :adapter) - - client = Tesla.client(adapter_middlewares(adapter), adapter) - - maybe_limit( - fn -> - request(client, request) - end, - adapter, - adapter_opts - ) - end - - @spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()} - def request(client, request), do: Tesla.request(client, request) - - defp build_request(method, headers, options, url, body, params) do - Builder.new() - |> Builder.method(method) - |> Builder.headers(headers) - |> Builder.opts(options) - |> Builder.url(url) - |> Builder.add_param(:body, :body, body) - |> Builder.add_param(:query, :query, params) - |> Builder.convert_to_keyword() - end - - @prefix Pleroma.Gun.ConnectionPool - defp maybe_limit(fun, Tesla.Adapter.Gun, opts) do - ConcurrentLimiter.limit(:"#{@prefix}.#{opts[:pool] || :default}", fun) - end - - defp maybe_limit(fun, _, _) do - fun.() - end - - defp adapter_middlewares(Tesla.Adapter.Gun) do - [Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] - end - - defp adapter_middlewares(_), do: [] -end diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex new file mode 100644 index 000000000..8ae1157df --- /dev/null +++ b/lib/pleroma/reverse_proxy.ex @@ -0,0 +1,432 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ReverseProxy do + @range_headers ~w(range if-range) + @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ + ~w(if-unmodified-since if-none-match) ++ @range_headers + @resp_cache_headers ~w(etag date last-modified) + @keep_resp_headers @resp_cache_headers ++ + ~w(content-length content-type content-disposition content-encoding) ++ + ~w(content-range accept-ranges vary) + @default_cache_control_header "public, max-age=1209600" + @valid_resp_codes [200, 206, 304] + @max_read_duration :timer.seconds(30) + @max_body_length :infinity + @failed_request_ttl :timer.seconds(60) + @methods ~w(GET HEAD) + + def max_read_duration_default, do: @max_read_duration + def default_cache_control_header, do: @default_cache_control_header + + @moduledoc """ + A reverse proxy. + + Pleroma.ReverseProxy.call(conn, url, options) + + It is not meant to be added into a plug pipeline, but to be called from another plug or controller. + + Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. + + Responses are chunked to the client while downloading from the upstream. + + Some request / responses headers are preserved: + + * request: `#{inspect(@keep_req_headers)}` + * response: `#{inspect(@keep_resp_headers)}` + + Options: + + * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP + errors. Any error during body processing will not be redirected as the response is chunked. This may expose + remote URL, clients IPs, …. + + * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the + specified length. It is validated with the `content-length` header and also verified when proxying. + + * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to + read from the remote upstream. + + * `failed_request_ttl` (default `#{inspect(@failed_request_ttl)}` ms): the time the failed request is cached and cannot be retried. + + * `inline_content_types`: + * `true` will not alter `content-disposition` (up to the upstream), + * `false` will add `content-disposition: attachment` to any request, + * a list of whitelisted content types + + * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is + doing content transformation (encoding, …) depending on the request. + + * `req_headers`, `resp_headers` additional headers. + + * `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun). + + """ + @default_options [pool: :media] + + @inline_content_types [ + "image/gif", + "image/jpeg", + "image/jpg", + "image/png", + "image/svg+xml", + "audio/mpeg", + "audio/mp3", + "video/webm", + "video/mp4", + "video/quicktime" + ] + + require Logger + import Plug.Conn + + @type option() :: + {:keep_user_agent, boolean} + | {:max_read_duration, :timer.time() | :infinity} + | {:max_body_length, non_neg_integer() | :infinity} + | {:failed_request_ttl, :timer.time() | :infinity} + | {:http, []} + | {:req_headers, [{String.t(), String.t()}]} + | {:resp_headers, [{String.t(), String.t()}]} + | {:inline_content_types, boolean() | [String.t()]} + | {:redirect_on_failure, boolean()} + + @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() + def call(_conn, _url, _opts \\ []) + + def call(conn = %{method: method}, url, opts) when method in @methods do + client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, [])) + + req_headers = build_req_headers(conn.req_headers, opts) + + opts = + if filename = Pleroma.Web.MediaProxy.filename(url) do + Keyword.put_new(opts, :attachment_name, filename) + else + opts + end + + with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url), + {:ok, code, headers, client} <- request(method, url, req_headers, client_opts), + :ok <- + header_length_constraint( + headers, + Keyword.get(opts, :max_body_length, @max_body_length) + ) do + response(conn, client, url, code, headers, opts) + else + {:ok, true} -> + conn + |> error_or_redirect(url, 500, "Request failed", opts) + |> halt() + + {:ok, code, headers} -> + head_response(conn, url, code, headers, opts) + |> halt() + + {:error, {:invalid_http_response, code}} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") + track_failed_url(url, code, opts) + + conn + |> error_or_redirect( + url, + code, + "Request failed: " <> Plug.Conn.Status.reason_phrase(code), + opts + ) + |> halt() + + {:error, error} -> + Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") + track_failed_url(url, error, opts) + + conn + |> error_or_redirect(url, 500, "Request failed", opts) + |> halt() + end + end + + def call(conn, _, _) do + conn + |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) + |> halt() + end + + defp request(method, url, headers, opts) do + Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") + method = method |> String.downcase() |> String.to_existing_atom() + + case client().request(method, url, headers, "", opts) do + {:ok, code, headers, client} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers), client} + + {:ok, code, headers} when code in @valid_resp_codes -> + {:ok, code, downcase_headers(headers)} + + {:ok, code, _, _} -> + {:error, {:invalid_http_response, code}} + + {:ok, code, _} -> + {:error, {:invalid_http_response, code}} + + {:error, error} -> + {:error, error} + end + end + + defp response(conn, client, url, status, headers, opts) do + Logger.debug("#{__MODULE__} #{status} #{url} #{inspect(headers)}") + + result = + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_chunked(status) + |> chunk_reply(client, opts) + + case result do + {:ok, conn} -> + halt(conn) + + {:error, :closed, conn} -> + client().close(client) + halt(conn) + + {:error, error, conn} -> + Logger.warn( + "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" + ) + + client().close(client) + halt(conn) + end + end + + defp chunk_reply(conn, client, opts) do + chunk_reply(conn, client, opts, 0, 0) + end + + defp chunk_reply(conn, client, opts, sent_so_far, duration) do + with {:ok, duration} <- + check_read_duration( + duration, + Keyword.get(opts, :max_read_duration, @max_read_duration) + ), + {:ok, data, client} <- client().stream_body(client), + {:ok, duration} <- increase_read_duration(duration), + sent_so_far = sent_so_far + byte_size(data), + :ok <- + body_size_constraint( + sent_so_far, + Keyword.get(opts, :max_body_length, @max_body_length) + ), + {:ok, conn} <- chunk(conn, data) do + chunk_reply(conn, client, opts, sent_so_far, duration) + else + :done -> {:ok, conn} + {:error, error} -> {:error, error, conn} + end + end + + defp head_response(conn, url, code, headers, opts) do + Logger.debug("#{__MODULE__} #{code} #{url} #{inspect(headers)}") + + conn + |> put_resp_headers(build_resp_headers(headers, opts)) + |> send_resp(code, "") + end + + defp error_or_redirect(conn, url, code, body, opts) do + if Keyword.get(opts, :redirect_on_failure, false) do + conn + |> Phoenix.Controller.redirect(external: url) + |> halt() + else + conn + |> send_resp(code, body) + |> halt + end + end + + defp downcase_headers(headers) do + Enum.map(headers, fn {k, v} -> + {String.downcase(k), v} + end) + end + + defp get_content_type(headers) do + {_, content_type} = + List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"}) + + [content_type | _] = String.split(content_type, ";") + content_type + end + + defp put_resp_headers(conn, headers) do + Enum.reduce(headers, conn, fn {k, v}, conn -> + put_resp_header(conn, k, v) + end) + end + + defp build_req_headers(headers, opts) do + headers + |> downcase_headers() + |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) + |> build_req_range_or_encoding_header(opts) + |> build_req_user_agent_header(opts) + |> Keyword.merge(Keyword.get(opts, :req_headers, [])) + end + + # Disable content-encoding if any @range_headers are requested (see #1823). + defp build_req_range_or_encoding_header(headers, _opts) do + range? = Enum.any?(headers, fn {header, _} -> Enum.member?(@range_headers, header) end) + + if range? && List.keymember?(headers, "accept-encoding", 0) do + List.keydelete(headers, "accept-encoding", 0) + else + headers + end + end + + defp build_req_user_agent_header(headers, opts) do + if Keyword.get(opts, :keep_user_agent, false) do + List.keystore( + headers, + "user-agent", + 0, + {"user-agent", Pleroma.Application.user_agent()} + ) + else + headers + end + end + + defp build_resp_headers(headers, opts) do + headers + |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) + |> build_resp_cache_headers(opts) + |> build_resp_content_disposition_header(opts) + |> Keyword.merge(Keyword.get(opts, :resp_headers, [])) + end + + defp build_resp_cache_headers(headers, _opts) do + has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) + + cond do + has_cache? -> + # There's caching header present but no cache-control -- we need to set our own + # as Plug defaults to "max-age=0, private, must-revalidate" + List.keystore( + headers, + "cache-control", + 0, + {"cache-control", @default_cache_control_header} + ) + + true -> + List.keystore( + headers, + "cache-control", + 0, + {"cache-control", @default_cache_control_header} + ) + end + end + + defp build_resp_content_disposition_header(headers, opts) do + opt = Keyword.get(opts, :inline_content_types, @inline_content_types) + + content_type = get_content_type(headers) + + attachment? = + cond do + is_list(opt) && !Enum.member?(opt, content_type) -> true + opt == false -> true + true -> false + end + + if attachment? do + name = + try do + {{"content-disposition", content_disposition_string}, _} = + List.keytake(headers, "content-disposition", 0) + + [name | _] = + Regex.run( + ~r/filename="((?:[^"\\]|\\.)*)"/u, + content_disposition_string || "", + capture: :all_but_first + ) + + name + rescue + MatchError -> Keyword.get(opts, :attachment_name, "attachment") + end + + disposition = "attachment; filename=\"#{name}\"" + + List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) + else + headers + end + end + + defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do + with {_, size} <- List.keyfind(headers, "content-length", 0), + {size, _} <- Integer.parse(size), + true <- size <= limit do + :ok + else + false -> + {:error, :body_too_large} + + _ -> + :ok + end + end + + defp header_length_constraint(_, _), do: :ok + + defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do + {:error, :body_too_large} + end + + defp body_size_constraint(_, _), do: :ok + + defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max) + + defp check_read_duration(duration, max) + when is_integer(duration) and is_integer(max) and max > 0 do + if duration > max do + {:error, :read_duration_exceeded} + else + {:ok, {duration, :erlang.system_time(:millisecond)}} + end + end + + defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} + + defp increase_read_duration({previous_duration, started}) + when is_integer(previous_duration) and is_integer(started) do + duration = :erlang.system_time(:millisecond) - started + {:ok, previous_duration + duration} + end + + defp increase_read_duration(_) do + {:ok, :no_duration_limit, :no_duration_limit} + end + + defp client, do: Pleroma.ReverseProxy.Client + + defp track_failed_url(url, error, opts) do + ttl = + unless error in [:body_too_large, 400, 204] do + Keyword.get(opts, :failed_request_ttl, @failed_request_ttl) + else + nil + end + + Cachex.put(:failed_proxy_url_cache, url, true, ttl: ttl) + end +end diff --git a/lib/pleroma/reverse_proxy/reverse_proxy.ex b/lib/pleroma/reverse_proxy/reverse_proxy.ex deleted file mode 100644 index 8ae1157df..000000000 --- a/lib/pleroma/reverse_proxy/reverse_proxy.ex +++ /dev/null @@ -1,432 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.ReverseProxy do - @range_headers ~w(range if-range) - @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ - ~w(if-unmodified-since if-none-match) ++ @range_headers - @resp_cache_headers ~w(etag date last-modified) - @keep_resp_headers @resp_cache_headers ++ - ~w(content-length content-type content-disposition content-encoding) ++ - ~w(content-range accept-ranges vary) - @default_cache_control_header "public, max-age=1209600" - @valid_resp_codes [200, 206, 304] - @max_read_duration :timer.seconds(30) - @max_body_length :infinity - @failed_request_ttl :timer.seconds(60) - @methods ~w(GET HEAD) - - def max_read_duration_default, do: @max_read_duration - def default_cache_control_header, do: @default_cache_control_header - - @moduledoc """ - A reverse proxy. - - Pleroma.ReverseProxy.call(conn, url, options) - - It is not meant to be added into a plug pipeline, but to be called from another plug or controller. - - Supports `#{inspect(@methods)}` HTTP methods, and only allows `#{inspect(@valid_resp_codes)}` status codes. - - Responses are chunked to the client while downloading from the upstream. - - Some request / responses headers are preserved: - - * request: `#{inspect(@keep_req_headers)}` - * response: `#{inspect(@keep_resp_headers)}` - - Options: - - * `redirect_on_failure` (default `false`). Redirects the client to the real remote URL if there's any HTTP - errors. Any error during body processing will not be redirected as the response is chunked. This may expose - remote URL, clients IPs, …. - - * `max_body_length` (default `#{inspect(@max_body_length)}`): limits the content length to be approximately the - specified length. It is validated with the `content-length` header and also verified when proxying. - - * `max_read_duration` (default `#{inspect(@max_read_duration)}` ms): the total time the connection is allowed to - read from the remote upstream. - - * `failed_request_ttl` (default `#{inspect(@failed_request_ttl)}` ms): the time the failed request is cached and cannot be retried. - - * `inline_content_types`: - * `true` will not alter `content-disposition` (up to the upstream), - * `false` will add `content-disposition: attachment` to any request, - * a list of whitelisted content types - - * `keep_user_agent` will forward the client's user-agent to the upstream. This may be useful if the upstream is - doing content transformation (encoding, …) depending on the request. - - * `req_headers`, `resp_headers` additional headers. - - * `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun). - - """ - @default_options [pool: :media] - - @inline_content_types [ - "image/gif", - "image/jpeg", - "image/jpg", - "image/png", - "image/svg+xml", - "audio/mpeg", - "audio/mp3", - "video/webm", - "video/mp4", - "video/quicktime" - ] - - require Logger - import Plug.Conn - - @type option() :: - {:keep_user_agent, boolean} - | {:max_read_duration, :timer.time() | :infinity} - | {:max_body_length, non_neg_integer() | :infinity} - | {:failed_request_ttl, :timer.time() | :infinity} - | {:http, []} - | {:req_headers, [{String.t(), String.t()}]} - | {:resp_headers, [{String.t(), String.t()}]} - | {:inline_content_types, boolean() | [String.t()]} - | {:redirect_on_failure, boolean()} - - @spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t() - def call(_conn, _url, _opts \\ []) - - def call(conn = %{method: method}, url, opts) when method in @methods do - client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, [])) - - req_headers = build_req_headers(conn.req_headers, opts) - - opts = - if filename = Pleroma.Web.MediaProxy.filename(url) do - Keyword.put_new(opts, :attachment_name, filename) - else - opts - end - - with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url), - {:ok, code, headers, client} <- request(method, url, req_headers, client_opts), - :ok <- - header_length_constraint( - headers, - Keyword.get(opts, :max_body_length, @max_body_length) - ) do - response(conn, client, url, code, headers, opts) - else - {:ok, true} -> - conn - |> error_or_redirect(url, 500, "Request failed", opts) - |> halt() - - {:ok, code, headers} -> - head_response(conn, url, code, headers, opts) - |> halt() - - {:error, {:invalid_http_response, code}} -> - Logger.error("#{__MODULE__}: request to #{inspect(url)} failed with HTTP status #{code}") - track_failed_url(url, code, opts) - - conn - |> error_or_redirect( - url, - code, - "Request failed: " <> Plug.Conn.Status.reason_phrase(code), - opts - ) - |> halt() - - {:error, error} -> - Logger.error("#{__MODULE__}: request to #{inspect(url)} failed: #{inspect(error)}") - track_failed_url(url, error, opts) - - conn - |> error_or_redirect(url, 500, "Request failed", opts) - |> halt() - end - end - - def call(conn, _, _) do - conn - |> send_resp(400, Plug.Conn.Status.reason_phrase(400)) - |> halt() - end - - defp request(method, url, headers, opts) do - Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") - method = method |> String.downcase() |> String.to_existing_atom() - - case client().request(method, url, headers, "", opts) do - {:ok, code, headers, client} when code in @valid_resp_codes -> - {:ok, code, downcase_headers(headers), client} - - {:ok, code, headers} when code in @valid_resp_codes -> - {:ok, code, downcase_headers(headers)} - - {:ok, code, _, _} -> - {:error, {:invalid_http_response, code}} - - {:ok, code, _} -> - {:error, {:invalid_http_response, code}} - - {:error, error} -> - {:error, error} - end - end - - defp response(conn, client, url, status, headers, opts) do - Logger.debug("#{__MODULE__} #{status} #{url} #{inspect(headers)}") - - result = - conn - |> put_resp_headers(build_resp_headers(headers, opts)) - |> send_chunked(status) - |> chunk_reply(client, opts) - - case result do - {:ok, conn} -> - halt(conn) - - {:error, :closed, conn} -> - client().close(client) - halt(conn) - - {:error, error, conn} -> - Logger.warn( - "#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}" - ) - - client().close(client) - halt(conn) - end - end - - defp chunk_reply(conn, client, opts) do - chunk_reply(conn, client, opts, 0, 0) - end - - defp chunk_reply(conn, client, opts, sent_so_far, duration) do - with {:ok, duration} <- - check_read_duration( - duration, - Keyword.get(opts, :max_read_duration, @max_read_duration) - ), - {:ok, data, client} <- client().stream_body(client), - {:ok, duration} <- increase_read_duration(duration), - sent_so_far = sent_so_far + byte_size(data), - :ok <- - body_size_constraint( - sent_so_far, - Keyword.get(opts, :max_body_length, @max_body_length) - ), - {:ok, conn} <- chunk(conn, data) do - chunk_reply(conn, client, opts, sent_so_far, duration) - else - :done -> {:ok, conn} - {:error, error} -> {:error, error, conn} - end - end - - defp head_response(conn, url, code, headers, opts) do - Logger.debug("#{__MODULE__} #{code} #{url} #{inspect(headers)}") - - conn - |> put_resp_headers(build_resp_headers(headers, opts)) - |> send_resp(code, "") - end - - defp error_or_redirect(conn, url, code, body, opts) do - if Keyword.get(opts, :redirect_on_failure, false) do - conn - |> Phoenix.Controller.redirect(external: url) - |> halt() - else - conn - |> send_resp(code, body) - |> halt - end - end - - defp downcase_headers(headers) do - Enum.map(headers, fn {k, v} -> - {String.downcase(k), v} - end) - end - - defp get_content_type(headers) do - {_, content_type} = - List.keyfind(headers, "content-type", 0, {"content-type", "application/octet-stream"}) - - [content_type | _] = String.split(content_type, ";") - content_type - end - - defp put_resp_headers(conn, headers) do - Enum.reduce(headers, conn, fn {k, v}, conn -> - put_resp_header(conn, k, v) - end) - end - - defp build_req_headers(headers, opts) do - headers - |> downcase_headers() - |> Enum.filter(fn {k, _} -> k in @keep_req_headers end) - |> build_req_range_or_encoding_header(opts) - |> build_req_user_agent_header(opts) - |> Keyword.merge(Keyword.get(opts, :req_headers, [])) - end - - # Disable content-encoding if any @range_headers are requested (see #1823). - defp build_req_range_or_encoding_header(headers, _opts) do - range? = Enum.any?(headers, fn {header, _} -> Enum.member?(@range_headers, header) end) - - if range? && List.keymember?(headers, "accept-encoding", 0) do - List.keydelete(headers, "accept-encoding", 0) - else - headers - end - end - - defp build_req_user_agent_header(headers, opts) do - if Keyword.get(opts, :keep_user_agent, false) do - List.keystore( - headers, - "user-agent", - 0, - {"user-agent", Pleroma.Application.user_agent()} - ) - else - headers - end - end - - defp build_resp_headers(headers, opts) do - headers - |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) - |> build_resp_cache_headers(opts) - |> build_resp_content_disposition_header(opts) - |> Keyword.merge(Keyword.get(opts, :resp_headers, [])) - end - - defp build_resp_cache_headers(headers, _opts) do - has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) - - cond do - has_cache? -> - # There's caching header present but no cache-control -- we need to set our own - # as Plug defaults to "max-age=0, private, must-revalidate" - List.keystore( - headers, - "cache-control", - 0, - {"cache-control", @default_cache_control_header} - ) - - true -> - List.keystore( - headers, - "cache-control", - 0, - {"cache-control", @default_cache_control_header} - ) - end - end - - defp build_resp_content_disposition_header(headers, opts) do - opt = Keyword.get(opts, :inline_content_types, @inline_content_types) - - content_type = get_content_type(headers) - - attachment? = - cond do - is_list(opt) && !Enum.member?(opt, content_type) -> true - opt == false -> true - true -> false - end - - if attachment? do - name = - try do - {{"content-disposition", content_disposition_string}, _} = - List.keytake(headers, "content-disposition", 0) - - [name | _] = - Regex.run( - ~r/filename="((?:[^"\\]|\\.)*)"/u, - content_disposition_string || "", - capture: :all_but_first - ) - - name - rescue - MatchError -> Keyword.get(opts, :attachment_name, "attachment") - end - - disposition = "attachment; filename=\"#{name}\"" - - List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) - else - headers - end - end - - defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do - with {_, size} <- List.keyfind(headers, "content-length", 0), - {size, _} <- Integer.parse(size), - true <- size <= limit do - :ok - else - false -> - {:error, :body_too_large} - - _ -> - :ok - end - end - - defp header_length_constraint(_, _), do: :ok - - defp body_size_constraint(size, limit) when is_integer(limit) and limit > 0 and size >= limit do - {:error, :body_too_large} - end - - defp body_size_constraint(_, _), do: :ok - - defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max) - - defp check_read_duration(duration, max) - when is_integer(duration) and is_integer(max) and max > 0 do - if duration > max do - {:error, :read_duration_exceeded} - else - {:ok, {duration, :erlang.system_time(:millisecond)}} - end - end - - defp check_read_duration(_, _), do: {:ok, :no_duration_limit, :no_duration_limit} - - defp increase_read_duration({previous_duration, started}) - when is_integer(previous_duration) and is_integer(started) do - duration = :erlang.system_time(:millisecond) - started - {:ok, previous_duration + duration} - end - - defp increase_read_duration(_) do - {:ok, :no_duration_limit, :no_duration_limit} - end - - defp client, do: Pleroma.ReverseProxy.Client - - defp track_failed_url(url, error, opts) do - ttl = - unless error in [:body_too_large, 400, 204] do - Keyword.get(opts, :failed_request_ttl, @failed_request_ttl) - else - nil - end - - Cachex.put(:failed_proxy_url_cache, url, true, ttl: ttl) - end -end diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex new file mode 100644 index 000000000..60a50b027 --- /dev/null +++ b/lib/pleroma/web/common_api.ex @@ -0,0 +1,573 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.CommonAPI do + alias Pleroma.Activity + alias Pleroma.Conversation.Participation + alias Pleroma.Formatter + alias Pleroma.Object + alias Pleroma.ThreadMute + alias Pleroma.User + alias Pleroma.UserRelationship + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Builder + alias Pleroma.Web.ActivityPub.Pipeline + alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.ActivityPub.Visibility + + import Pleroma.Web.Gettext + import Pleroma.Web.CommonAPI.Utils + + require Pleroma.Constants + require Logger + + def block(blocker, blocked) do + with {:ok, block_data, _} <- Builder.block(blocker, blocked), + {:ok, block, _} <- Pipeline.common_pipeline(block_data, local: true) do + {:ok, block} + end + end + + def post_chat_message(%User{} = user, %User{} = recipient, content, opts \\ []) do + with maybe_attachment <- opts[:media_id] && Object.get_by_id(opts[:media_id]), + :ok <- validate_chat_content_length(content, !!maybe_attachment), + {_, {:ok, chat_message_data, _meta}} <- + {:build_object, + Builder.chat_message( + user, + recipient.ap_id, + content |> format_chat_content, + attachment: maybe_attachment + )}, + {_, {:ok, create_activity_data, _meta}} <- + {:build_create_activity, Builder.create(user, chat_message_data, [recipient.ap_id])}, + {_, {:ok, %Activity{} = activity, _meta}} <- + {:common_pipeline, + Pipeline.common_pipeline(create_activity_data, + local: true + )} do + {:ok, activity} + else + {:common_pipeline, {:reject, _} = e} -> e + e -> e + end + end + + defp format_chat_content(nil), do: nil + + defp format_chat_content(content) do + {text, _, _} = + content + |> Formatter.html_escape("text/plain") + |> Formatter.linkify() + |> (fn {text, mentions, tags} -> + {String.replace(text, ~r/\r?\n/, "
"), mentions, tags} + end).() + + text + end + + defp validate_chat_content_length(_, true), do: :ok + defp validate_chat_content_length(nil, false), do: {:error, :no_content} + + defp validate_chat_content_length(content, _) do + if String.length(content) <= Pleroma.Config.get([:instance, :chat_limit]) do + :ok + else + {:error, :content_too_long} + end + end + + def unblock(blocker, blocked) do + with {_, %Activity{} = block} <- {:fetch_block, Utils.fetch_latest_block(blocker, blocked)}, + {:ok, unblock_data, _} <- Builder.undo(blocker, block), + {:ok, unblock, _} <- Pipeline.common_pipeline(unblock_data, local: true) do + {:ok, unblock} + else + {:fetch_block, nil} -> + if User.blocks?(blocker, blocked) do + User.unblock(blocker, blocked) + {:ok, :no_activity} + else + {:error, :not_blocking} + end + + e -> + e + end + end + + def follow(follower, followed) do + timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout]) + + with {:ok, follow_data, _} <- Builder.follow(follower, followed), + {:ok, activity, _} <- Pipeline.common_pipeline(follow_data, local: true), + {:ok, follower, followed} <- User.wait_and_refresh(timeout, follower, followed) do + if activity.data["state"] == "reject" do + {:error, :rejected} + else + {:ok, follower, followed, activity} + end + end + end + + def unfollow(follower, unfollowed) do + with {:ok, follower, _follow_activity} <- User.unfollow(follower, unfollowed), + {:ok, _activity} <- ActivityPub.unfollow(follower, unfollowed), + {:ok, _subscription} <- User.unsubscribe(follower, unfollowed) do + {:ok, follower} + end + end + + def accept_follow_request(follower, followed) do + with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), + {:ok, accept_data, _} <- Builder.accept(followed, follow_activity), + {:ok, _activity, _} <- Pipeline.common_pipeline(accept_data, local: true) do + {:ok, follower} + end + end + + def reject_follow_request(follower, followed) do + with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), + {:ok, reject_data, _} <- Builder.reject(followed, follow_activity), + {:ok, _activity, _} <- Pipeline.common_pipeline(reject_data, local: true) do + {:ok, follower} + end + end + + def delete(activity_id, user) do + with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <- + {:find_activity, Activity.get_by_id(activity_id)}, + {_, %Object{} = object, _} <- + {:find_object, Object.normalize(activity, false), activity}, + true <- User.superuser?(user) || user.ap_id == object.data["actor"], + {:ok, delete_data, _} <- Builder.delete(user, object.data["id"]), + {:ok, delete, _} <- Pipeline.common_pipeline(delete_data, local: true) do + {:ok, delete} + else + {:find_activity, _} -> + {:error, :not_found} + + {:find_object, nil, %Activity{data: %{"actor" => actor, "object" => object}}} -> + # We have the create activity, but not the object, it was probably pruned. + # Insert a tombstone and try again + with {:ok, tombstone_data, _} <- Builder.tombstone(actor, object), + {:ok, _tombstone} <- Object.create(tombstone_data) do + delete(activity_id, user) + else + _ -> + Logger.error( + "Could not insert tombstone for missing object on deletion. Object is #{object}." + ) + + {:error, dgettext("errors", "Could not delete")} + end + + _ -> + {:error, dgettext("errors", "Could not delete")} + end + end + + def repeat(id, user, params \\ %{}) do + with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), + object = %Object{} <- Object.normalize(activity, false), + {_, nil} <- {:existing_announce, Utils.get_existing_announce(user.ap_id, object)}, + public = public_announce?(object, params), + {:ok, announce, _} <- Builder.announce(user, object, public: public), + {:ok, activity, _} <- Pipeline.common_pipeline(announce, local: true) do + {:ok, activity} + else + {:existing_announce, %Activity{} = announce} -> + {:ok, announce} + + _ -> + {:error, :not_found} + end + end + + def unrepeat(id, user) do + with {_, %Activity{data: %{"type" => "Create"}} = activity} <- + {:find_activity, Activity.get_by_id(id)}, + %Object{} = note <- Object.normalize(activity, false), + %Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note), + {:ok, undo, _} <- Builder.undo(user, announce), + {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do + {:ok, activity} + else + {:find_activity, _} -> {:error, :not_found} + _ -> {:error, dgettext("errors", "Could not unrepeat")} + end + end + + @spec favorite(User.t(), binary()) :: {:ok, Activity.t() | :already_liked} | {:error, any()} + def favorite(%User{} = user, id) do + case favorite_helper(user, id) do + {:ok, _} = res -> + res + + {:error, :not_found} = res -> + res + + {:error, e} -> + Logger.error("Could not favorite #{id}. Error: #{inspect(e, pretty: true)}") + {:error, dgettext("errors", "Could not favorite")} + end + end + + def favorite_helper(user, id) do + with {_, %Activity{object: object}} <- {:find_object, Activity.get_by_id_with_object(id)}, + {_, {:ok, like_object, meta}} <- {:build_object, Builder.like(user, object)}, + {_, {:ok, %Activity{} = activity, _meta}} <- + {:common_pipeline, + Pipeline.common_pipeline(like_object, Keyword.put(meta, :local, true))} do + {:ok, activity} + else + {:find_object, _} -> + {:error, :not_found} + + {:common_pipeline, + { + :error, + { + :validate_object, + { + :error, + changeset + } + } + }} = e -> + if {:object, {"already liked by this actor", []}} in changeset.errors do + {:ok, :already_liked} + else + {:error, e} + end + + e -> + {:error, e} + end + end + + def unfavorite(id, user) do + with {_, %Activity{data: %{"type" => "Create"}} = activity} <- + {:find_activity, Activity.get_by_id(id)}, + %Object{} = note <- Object.normalize(activity, false), + %Activity{} = like <- Utils.get_existing_like(user.ap_id, note), + {:ok, undo, _} <- Builder.undo(user, like), + {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do + {:ok, activity} + else + {:find_activity, _} -> {:error, :not_found} + _ -> {:error, dgettext("errors", "Could not unfavorite")} + end + end + + def react_with_emoji(id, user, emoji) do + with %Activity{} = activity <- Activity.get_by_id(id), + object <- Object.normalize(activity), + {:ok, emoji_react, _} <- Builder.emoji_react(user, object, emoji), + {:ok, activity, _} <- Pipeline.common_pipeline(emoji_react, local: true) do + {:ok, activity} + else + _ -> + {:error, dgettext("errors", "Could not add reaction emoji")} + end + end + + def unreact_with_emoji(id, user, emoji) do + with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji), + {:ok, undo, _} <- Builder.undo(user, reaction_activity), + {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do + {:ok, activity} + else + _ -> + {:error, dgettext("errors", "Could not remove reaction emoji")} + end + end + + def vote(user, %{data: %{"type" => "Question"}} = object, choices) do + with :ok <- validate_not_author(object, user), + :ok <- validate_existing_votes(user, object), + {:ok, options, choices} <- normalize_and_validate_choices(choices, object) do + answer_activities = + Enum.map(choices, fn index -> + {:ok, answer_object, _meta} = + Builder.answer(user, object, Enum.at(options, index)["name"]) + + {:ok, activity_data, _meta} = Builder.create(user, answer_object, []) + + {:ok, activity, _meta} = + activity_data + |> Map.put("cc", answer_object["cc"]) + |> Map.put("context", answer_object["context"]) + |> Pipeline.common_pipeline(local: true) + + # TODO: Do preload of Pleroma.Object in Pipeline + Activity.normalize(activity.data) + end) + + object = Object.get_cached_by_ap_id(object.data["id"]) + {:ok, answer_activities, object} + end + end + + defp validate_not_author(%{data: %{"actor" => ap_id}}, %{ap_id: ap_id}), + do: {:error, dgettext("errors", "Poll's author can't vote")} + + defp validate_not_author(_, _), do: :ok + + defp validate_existing_votes(%{ap_id: ap_id}, object) do + if Utils.get_existing_votes(ap_id, object) == [] do + :ok + else + {:error, dgettext("errors", "Already voted")} + end + end + + defp get_options_and_max_count(%{data: %{"anyOf" => any_of}}) + when is_list(any_of) and any_of != [], + do: {any_of, Enum.count(any_of)} + + defp get_options_and_max_count(%{data: %{"oneOf" => one_of}}) + when is_list(one_of) and one_of != [], + do: {one_of, 1} + + defp normalize_and_validate_choices(choices, object) do + choices = Enum.map(choices, fn i -> if is_binary(i), do: String.to_integer(i), else: i end) + {options, max_count} = get_options_and_max_count(object) + count = Enum.count(options) + + with {_, true} <- {:valid_choice, Enum.all?(choices, &(&1 < count))}, + {_, true} <- {:count_check, Enum.count(choices) <= max_count} do + {:ok, options, choices} + else + {:valid_choice, _} -> {:error, dgettext("errors", "Invalid indices")} + {:count_check, _} -> {:error, dgettext("errors", "Too many choices")} + end + end + + def public_announce?(_, %{visibility: visibility}) + when visibility in ~w{public unlisted private direct}, + do: visibility in ~w(public unlisted) + + def public_announce?(object, _) do + Visibility.is_public?(object) + end + + def get_visibility(_, _, %Participation{}), do: {"direct", "direct"} + + def get_visibility(%{visibility: visibility}, in_reply_to, _) + when visibility in ~w{public unlisted private direct}, + do: {visibility, get_replied_to_visibility(in_reply_to)} + + def get_visibility(%{visibility: "list:" <> list_id}, in_reply_to, _) do + visibility = {:list, String.to_integer(list_id)} + {visibility, get_replied_to_visibility(in_reply_to)} + end + + def get_visibility(_, in_reply_to, _) when not is_nil(in_reply_to) do + visibility = get_replied_to_visibility(in_reply_to) + {visibility, visibility} + end + + def get_visibility(_, in_reply_to, _), do: {"public", get_replied_to_visibility(in_reply_to)} + + def get_replied_to_visibility(nil), do: nil + + def get_replied_to_visibility(activity) do + with %Object{} = object <- Object.normalize(activity) do + Visibility.get_visibility(object) + end + end + + def check_expiry_date({:ok, nil} = res), do: res + + def check_expiry_date({:ok, in_seconds}) do + expiry = DateTime.add(DateTime.utc_now(), in_seconds) + + if Pleroma.Workers.PurgeExpiredActivity.expires_late_enough?(expiry) do + {:ok, expiry} + else + {:error, "Expiry date is too soon"} + end + end + + def check_expiry_date(expiry_str) do + Ecto.Type.cast(:integer, expiry_str) + |> check_expiry_date() + end + + def listen(user, data) do + visibility = Map.get(data, :visibility, "public") + + with {to, cc} <- get_to_and_cc(user, [], nil, visibility, nil), + listen_data <- + data + |> Map.take([:album, :artist, :title, :length]) + |> Map.new(fn {key, value} -> {to_string(key), value} end) + |> Map.put("type", "Audio") + |> Map.put("to", to) + |> Map.put("cc", cc) + |> Map.put("actor", user.ap_id), + {:ok, activity} <- + ActivityPub.listen(%{ + actor: user, + to: to, + object: listen_data, + context: Utils.generate_context_id(), + additional: %{"cc" => cc} + }) do + {:ok, activity} + end + end + + def post(user, %{status: _} = data) do + with {:ok, draft} <- Pleroma.Web.CommonAPI.ActivityDraft.create(user, data) do + ActivityPub.create(draft.changes, draft.preview?) + end + end + + def pin(id, %{ap_id: user_ap_id} = user) do + with %Activity{ + actor: ^user_ap_id, + data: %{"type" => "Create"}, + object: %Object{data: %{"type" => object_type}} + } = activity <- Activity.get_by_id_with_object(id), + true <- object_type in ["Note", "Article", "Question"], + true <- Visibility.is_public?(activity), + {:ok, _user} <- User.add_pinnned_activity(user, activity) do + {:ok, activity} + else + {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} + _ -> {:error, dgettext("errors", "Could not pin")} + end + end + + def unpin(id, user) do + with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), + {:ok, _user} <- User.remove_pinnned_activity(user, activity) do + {:ok, activity} + else + {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} + _ -> {:error, dgettext("errors", "Could not unpin")} + end + end + + def add_mute(user, activity) do + with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]), + _ <- Pleroma.Notification.mark_context_as_read(user, activity.data["context"]) do + {:ok, activity} + else + {:error, _} -> {:error, dgettext("errors", "conversation is already muted")} + end + end + + def remove_mute(user, activity) do + ThreadMute.remove_mute(user.id, activity.data["context"]) + {:ok, activity} + end + + def thread_muted?(%User{id: user_id}, %{data: %{"context" => context}}) + when is_binary(context) do + ThreadMute.exists?(user_id, context) + end + + def thread_muted?(_, _), do: false + + def report(user, data) do + with {:ok, account} <- get_reported_account(data.account_id), + {:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]), + {:ok, statuses} <- get_report_statuses(account, data) do + ActivityPub.flag(%{ + context: Utils.generate_context_id(), + actor: user, + account: account, + statuses: statuses, + content: content_html, + forward: Map.get(data, :forward, false) + }) + end + end + + defp get_reported_account(account_id) do + case User.get_cached_by_id(account_id) do + %User{} = account -> {:ok, account} + _ -> {:error, dgettext("errors", "Account not found")} + end + end + + def update_report_state(activity_ids, state) when is_list(activity_ids) do + case Utils.update_report_state(activity_ids, state) do + :ok -> {:ok, activity_ids} + _ -> {:error, dgettext("errors", "Could not update state")} + end + end + + def update_report_state(activity_id, state) do + with %Activity{} = activity <- Activity.get_by_id(activity_id) do + Utils.update_report_state(activity, state) + else + nil -> {:error, :not_found} + _ -> {:error, dgettext("errors", "Could not update state")} + end + end + + def update_activity_scope(activity_id, opts \\ %{}) do + with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id), + {:ok, activity} <- toggle_sensitive(activity, opts) do + set_visibility(activity, opts) + else + nil -> {:error, :not_found} + {:error, reason} -> {:error, reason} + end + end + + defp toggle_sensitive(activity, %{sensitive: sensitive}) when sensitive in ~w(true false) do + toggle_sensitive(activity, %{sensitive: String.to_existing_atom(sensitive)}) + end + + defp toggle_sensitive(%Activity{object: object} = activity, %{sensitive: sensitive}) + when is_boolean(sensitive) do + new_data = Map.put(object.data, "sensitive", sensitive) + + {:ok, object} = + object + |> Object.change(%{data: new_data}) + |> Object.update_and_set_cache() + + {:ok, Map.put(activity, :object, object)} + end + + defp toggle_sensitive(activity, _), do: {:ok, activity} + + defp set_visibility(activity, %{visibility: visibility}) do + Utils.update_activity_visibility(activity, visibility) + end + + defp set_visibility(activity, _), do: {:ok, activity} + + def hide_reblogs(%User{} = user, %User{} = target) do + UserRelationship.create_reblog_mute(user, target) + end + + def show_reblogs(%User{} = user, %User{} = target) do + UserRelationship.delete_reblog_mute(user, target) + end + + def get_user(ap_id, fake_record_fallback \\ true) do + cond do + user = User.get_cached_by_ap_id(ap_id) -> + user + + user = User.get_by_guessed_nickname(ap_id) -> + user + + fake_record_fallback -> + # TODO: refactor (fake records is never a good idea) + User.error_user(ap_id) + + true -> + nil + end + end +end diff --git a/lib/pleroma/web/common_api/common_api.ex b/lib/pleroma/web/common_api/common_api.ex deleted file mode 100644 index 60a50b027..000000000 --- a/lib/pleroma/web/common_api/common_api.ex +++ /dev/null @@ -1,573 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.CommonAPI do - alias Pleroma.Activity - alias Pleroma.Conversation.Participation - alias Pleroma.Formatter - alias Pleroma.Object - alias Pleroma.ThreadMute - alias Pleroma.User - alias Pleroma.UserRelationship - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Builder - alias Pleroma.Web.ActivityPub.Pipeline - alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.ActivityPub.Visibility - - import Pleroma.Web.Gettext - import Pleroma.Web.CommonAPI.Utils - - require Pleroma.Constants - require Logger - - def block(blocker, blocked) do - with {:ok, block_data, _} <- Builder.block(blocker, blocked), - {:ok, block, _} <- Pipeline.common_pipeline(block_data, local: true) do - {:ok, block} - end - end - - def post_chat_message(%User{} = user, %User{} = recipient, content, opts \\ []) do - with maybe_attachment <- opts[:media_id] && Object.get_by_id(opts[:media_id]), - :ok <- validate_chat_content_length(content, !!maybe_attachment), - {_, {:ok, chat_message_data, _meta}} <- - {:build_object, - Builder.chat_message( - user, - recipient.ap_id, - content |> format_chat_content, - attachment: maybe_attachment - )}, - {_, {:ok, create_activity_data, _meta}} <- - {:build_create_activity, Builder.create(user, chat_message_data, [recipient.ap_id])}, - {_, {:ok, %Activity{} = activity, _meta}} <- - {:common_pipeline, - Pipeline.common_pipeline(create_activity_data, - local: true - )} do - {:ok, activity} - else - {:common_pipeline, {:reject, _} = e} -> e - e -> e - end - end - - defp format_chat_content(nil), do: nil - - defp format_chat_content(content) do - {text, _, _} = - content - |> Formatter.html_escape("text/plain") - |> Formatter.linkify() - |> (fn {text, mentions, tags} -> - {String.replace(text, ~r/\r?\n/, "
"), mentions, tags} - end).() - - text - end - - defp validate_chat_content_length(_, true), do: :ok - defp validate_chat_content_length(nil, false), do: {:error, :no_content} - - defp validate_chat_content_length(content, _) do - if String.length(content) <= Pleroma.Config.get([:instance, :chat_limit]) do - :ok - else - {:error, :content_too_long} - end - end - - def unblock(blocker, blocked) do - with {_, %Activity{} = block} <- {:fetch_block, Utils.fetch_latest_block(blocker, blocked)}, - {:ok, unblock_data, _} <- Builder.undo(blocker, block), - {:ok, unblock, _} <- Pipeline.common_pipeline(unblock_data, local: true) do - {:ok, unblock} - else - {:fetch_block, nil} -> - if User.blocks?(blocker, blocked) do - User.unblock(blocker, blocked) - {:ok, :no_activity} - else - {:error, :not_blocking} - end - - e -> - e - end - end - - def follow(follower, followed) do - timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout]) - - with {:ok, follow_data, _} <- Builder.follow(follower, followed), - {:ok, activity, _} <- Pipeline.common_pipeline(follow_data, local: true), - {:ok, follower, followed} <- User.wait_and_refresh(timeout, follower, followed) do - if activity.data["state"] == "reject" do - {:error, :rejected} - else - {:ok, follower, followed, activity} - end - end - end - - def unfollow(follower, unfollowed) do - with {:ok, follower, _follow_activity} <- User.unfollow(follower, unfollowed), - {:ok, _activity} <- ActivityPub.unfollow(follower, unfollowed), - {:ok, _subscription} <- User.unsubscribe(follower, unfollowed) do - {:ok, follower} - end - end - - def accept_follow_request(follower, followed) do - with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), - {:ok, accept_data, _} <- Builder.accept(followed, follow_activity), - {:ok, _activity, _} <- Pipeline.common_pipeline(accept_data, local: true) do - {:ok, follower} - end - end - - def reject_follow_request(follower, followed) do - with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), - {:ok, reject_data, _} <- Builder.reject(followed, follow_activity), - {:ok, _activity, _} <- Pipeline.common_pipeline(reject_data, local: true) do - {:ok, follower} - end - end - - def delete(activity_id, user) do - with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <- - {:find_activity, Activity.get_by_id(activity_id)}, - {_, %Object{} = object, _} <- - {:find_object, Object.normalize(activity, false), activity}, - true <- User.superuser?(user) || user.ap_id == object.data["actor"], - {:ok, delete_data, _} <- Builder.delete(user, object.data["id"]), - {:ok, delete, _} <- Pipeline.common_pipeline(delete_data, local: true) do - {:ok, delete} - else - {:find_activity, _} -> - {:error, :not_found} - - {:find_object, nil, %Activity{data: %{"actor" => actor, "object" => object}}} -> - # We have the create activity, but not the object, it was probably pruned. - # Insert a tombstone and try again - with {:ok, tombstone_data, _} <- Builder.tombstone(actor, object), - {:ok, _tombstone} <- Object.create(tombstone_data) do - delete(activity_id, user) - else - _ -> - Logger.error( - "Could not insert tombstone for missing object on deletion. Object is #{object}." - ) - - {:error, dgettext("errors", "Could not delete")} - end - - _ -> - {:error, dgettext("errors", "Could not delete")} - end - end - - def repeat(id, user, params \\ %{}) do - with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), - object = %Object{} <- Object.normalize(activity, false), - {_, nil} <- {:existing_announce, Utils.get_existing_announce(user.ap_id, object)}, - public = public_announce?(object, params), - {:ok, announce, _} <- Builder.announce(user, object, public: public), - {:ok, activity, _} <- Pipeline.common_pipeline(announce, local: true) do - {:ok, activity} - else - {:existing_announce, %Activity{} = announce} -> - {:ok, announce} - - _ -> - {:error, :not_found} - end - end - - def unrepeat(id, user) do - with {_, %Activity{data: %{"type" => "Create"}} = activity} <- - {:find_activity, Activity.get_by_id(id)}, - %Object{} = note <- Object.normalize(activity, false), - %Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note), - {:ok, undo, _} <- Builder.undo(user, announce), - {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do - {:ok, activity} - else - {:find_activity, _} -> {:error, :not_found} - _ -> {:error, dgettext("errors", "Could not unrepeat")} - end - end - - @spec favorite(User.t(), binary()) :: {:ok, Activity.t() | :already_liked} | {:error, any()} - def favorite(%User{} = user, id) do - case favorite_helper(user, id) do - {:ok, _} = res -> - res - - {:error, :not_found} = res -> - res - - {:error, e} -> - Logger.error("Could not favorite #{id}. Error: #{inspect(e, pretty: true)}") - {:error, dgettext("errors", "Could not favorite")} - end - end - - def favorite_helper(user, id) do - with {_, %Activity{object: object}} <- {:find_object, Activity.get_by_id_with_object(id)}, - {_, {:ok, like_object, meta}} <- {:build_object, Builder.like(user, object)}, - {_, {:ok, %Activity{} = activity, _meta}} <- - {:common_pipeline, - Pipeline.common_pipeline(like_object, Keyword.put(meta, :local, true))} do - {:ok, activity} - else - {:find_object, _} -> - {:error, :not_found} - - {:common_pipeline, - { - :error, - { - :validate_object, - { - :error, - changeset - } - } - }} = e -> - if {:object, {"already liked by this actor", []}} in changeset.errors do - {:ok, :already_liked} - else - {:error, e} - end - - e -> - {:error, e} - end - end - - def unfavorite(id, user) do - with {_, %Activity{data: %{"type" => "Create"}} = activity} <- - {:find_activity, Activity.get_by_id(id)}, - %Object{} = note <- Object.normalize(activity, false), - %Activity{} = like <- Utils.get_existing_like(user.ap_id, note), - {:ok, undo, _} <- Builder.undo(user, like), - {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do - {:ok, activity} - else - {:find_activity, _} -> {:error, :not_found} - _ -> {:error, dgettext("errors", "Could not unfavorite")} - end - end - - def react_with_emoji(id, user, emoji) do - with %Activity{} = activity <- Activity.get_by_id(id), - object <- Object.normalize(activity), - {:ok, emoji_react, _} <- Builder.emoji_react(user, object, emoji), - {:ok, activity, _} <- Pipeline.common_pipeline(emoji_react, local: true) do - {:ok, activity} - else - _ -> - {:error, dgettext("errors", "Could not add reaction emoji")} - end - end - - def unreact_with_emoji(id, user, emoji) do - with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji), - {:ok, undo, _} <- Builder.undo(user, reaction_activity), - {:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do - {:ok, activity} - else - _ -> - {:error, dgettext("errors", "Could not remove reaction emoji")} - end - end - - def vote(user, %{data: %{"type" => "Question"}} = object, choices) do - with :ok <- validate_not_author(object, user), - :ok <- validate_existing_votes(user, object), - {:ok, options, choices} <- normalize_and_validate_choices(choices, object) do - answer_activities = - Enum.map(choices, fn index -> - {:ok, answer_object, _meta} = - Builder.answer(user, object, Enum.at(options, index)["name"]) - - {:ok, activity_data, _meta} = Builder.create(user, answer_object, []) - - {:ok, activity, _meta} = - activity_data - |> Map.put("cc", answer_object["cc"]) - |> Map.put("context", answer_object["context"]) - |> Pipeline.common_pipeline(local: true) - - # TODO: Do preload of Pleroma.Object in Pipeline - Activity.normalize(activity.data) - end) - - object = Object.get_cached_by_ap_id(object.data["id"]) - {:ok, answer_activities, object} - end - end - - defp validate_not_author(%{data: %{"actor" => ap_id}}, %{ap_id: ap_id}), - do: {:error, dgettext("errors", "Poll's author can't vote")} - - defp validate_not_author(_, _), do: :ok - - defp validate_existing_votes(%{ap_id: ap_id}, object) do - if Utils.get_existing_votes(ap_id, object) == [] do - :ok - else - {:error, dgettext("errors", "Already voted")} - end - end - - defp get_options_and_max_count(%{data: %{"anyOf" => any_of}}) - when is_list(any_of) and any_of != [], - do: {any_of, Enum.count(any_of)} - - defp get_options_and_max_count(%{data: %{"oneOf" => one_of}}) - when is_list(one_of) and one_of != [], - do: {one_of, 1} - - defp normalize_and_validate_choices(choices, object) do - choices = Enum.map(choices, fn i -> if is_binary(i), do: String.to_integer(i), else: i end) - {options, max_count} = get_options_and_max_count(object) - count = Enum.count(options) - - with {_, true} <- {:valid_choice, Enum.all?(choices, &(&1 < count))}, - {_, true} <- {:count_check, Enum.count(choices) <= max_count} do - {:ok, options, choices} - else - {:valid_choice, _} -> {:error, dgettext("errors", "Invalid indices")} - {:count_check, _} -> {:error, dgettext("errors", "Too many choices")} - end - end - - def public_announce?(_, %{visibility: visibility}) - when visibility in ~w{public unlisted private direct}, - do: visibility in ~w(public unlisted) - - def public_announce?(object, _) do - Visibility.is_public?(object) - end - - def get_visibility(_, _, %Participation{}), do: {"direct", "direct"} - - def get_visibility(%{visibility: visibility}, in_reply_to, _) - when visibility in ~w{public unlisted private direct}, - do: {visibility, get_replied_to_visibility(in_reply_to)} - - def get_visibility(%{visibility: "list:" <> list_id}, in_reply_to, _) do - visibility = {:list, String.to_integer(list_id)} - {visibility, get_replied_to_visibility(in_reply_to)} - end - - def get_visibility(_, in_reply_to, _) when not is_nil(in_reply_to) do - visibility = get_replied_to_visibility(in_reply_to) - {visibility, visibility} - end - - def get_visibility(_, in_reply_to, _), do: {"public", get_replied_to_visibility(in_reply_to)} - - def get_replied_to_visibility(nil), do: nil - - def get_replied_to_visibility(activity) do - with %Object{} = object <- Object.normalize(activity) do - Visibility.get_visibility(object) - end - end - - def check_expiry_date({:ok, nil} = res), do: res - - def check_expiry_date({:ok, in_seconds}) do - expiry = DateTime.add(DateTime.utc_now(), in_seconds) - - if Pleroma.Workers.PurgeExpiredActivity.expires_late_enough?(expiry) do - {:ok, expiry} - else - {:error, "Expiry date is too soon"} - end - end - - def check_expiry_date(expiry_str) do - Ecto.Type.cast(:integer, expiry_str) - |> check_expiry_date() - end - - def listen(user, data) do - visibility = Map.get(data, :visibility, "public") - - with {to, cc} <- get_to_and_cc(user, [], nil, visibility, nil), - listen_data <- - data - |> Map.take([:album, :artist, :title, :length]) - |> Map.new(fn {key, value} -> {to_string(key), value} end) - |> Map.put("type", "Audio") - |> Map.put("to", to) - |> Map.put("cc", cc) - |> Map.put("actor", user.ap_id), - {:ok, activity} <- - ActivityPub.listen(%{ - actor: user, - to: to, - object: listen_data, - context: Utils.generate_context_id(), - additional: %{"cc" => cc} - }) do - {:ok, activity} - end - end - - def post(user, %{status: _} = data) do - with {:ok, draft} <- Pleroma.Web.CommonAPI.ActivityDraft.create(user, data) do - ActivityPub.create(draft.changes, draft.preview?) - end - end - - def pin(id, %{ap_id: user_ap_id} = user) do - with %Activity{ - actor: ^user_ap_id, - data: %{"type" => "Create"}, - object: %Object{data: %{"type" => object_type}} - } = activity <- Activity.get_by_id_with_object(id), - true <- object_type in ["Note", "Article", "Question"], - true <- Visibility.is_public?(activity), - {:ok, _user} <- User.add_pinnned_activity(user, activity) do - {:ok, activity} - else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not pin")} - end - end - - def unpin(id, user) do - with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), - {:ok, _user} <- User.remove_pinnned_activity(user, activity) do - {:ok, activity} - else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not unpin")} - end - end - - def add_mute(user, activity) do - with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]), - _ <- Pleroma.Notification.mark_context_as_read(user, activity.data["context"]) do - {:ok, activity} - else - {:error, _} -> {:error, dgettext("errors", "conversation is already muted")} - end - end - - def remove_mute(user, activity) do - ThreadMute.remove_mute(user.id, activity.data["context"]) - {:ok, activity} - end - - def thread_muted?(%User{id: user_id}, %{data: %{"context" => context}}) - when is_binary(context) do - ThreadMute.exists?(user_id, context) - end - - def thread_muted?(_, _), do: false - - def report(user, data) do - with {:ok, account} <- get_reported_account(data.account_id), - {:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]), - {:ok, statuses} <- get_report_statuses(account, data) do - ActivityPub.flag(%{ - context: Utils.generate_context_id(), - actor: user, - account: account, - statuses: statuses, - content: content_html, - forward: Map.get(data, :forward, false) - }) - end - end - - defp get_reported_account(account_id) do - case User.get_cached_by_id(account_id) do - %User{} = account -> {:ok, account} - _ -> {:error, dgettext("errors", "Account not found")} - end - end - - def update_report_state(activity_ids, state) when is_list(activity_ids) do - case Utils.update_report_state(activity_ids, state) do - :ok -> {:ok, activity_ids} - _ -> {:error, dgettext("errors", "Could not update state")} - end - end - - def update_report_state(activity_id, state) do - with %Activity{} = activity <- Activity.get_by_id(activity_id) do - Utils.update_report_state(activity, state) - else - nil -> {:error, :not_found} - _ -> {:error, dgettext("errors", "Could not update state")} - end - end - - def update_activity_scope(activity_id, opts \\ %{}) do - with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id), - {:ok, activity} <- toggle_sensitive(activity, opts) do - set_visibility(activity, opts) - else - nil -> {:error, :not_found} - {:error, reason} -> {:error, reason} - end - end - - defp toggle_sensitive(activity, %{sensitive: sensitive}) when sensitive in ~w(true false) do - toggle_sensitive(activity, %{sensitive: String.to_existing_atom(sensitive)}) - end - - defp toggle_sensitive(%Activity{object: object} = activity, %{sensitive: sensitive}) - when is_boolean(sensitive) do - new_data = Map.put(object.data, "sensitive", sensitive) - - {:ok, object} = - object - |> Object.change(%{data: new_data}) - |> Object.update_and_set_cache() - - {:ok, Map.put(activity, :object, object)} - end - - defp toggle_sensitive(activity, _), do: {:ok, activity} - - defp set_visibility(activity, %{visibility: visibility}) do - Utils.update_activity_visibility(activity, visibility) - end - - defp set_visibility(activity, _), do: {:ok, activity} - - def hide_reblogs(%User{} = user, %User{} = target) do - UserRelationship.create_reblog_mute(user, target) - end - - def show_reblogs(%User{} = user, %User{} = target) do - UserRelationship.delete_reblog_mute(user, target) - end - - def get_user(ap_id, fake_record_fallback \\ true) do - cond do - user = User.get_cached_by_ap_id(ap_id) -> - user - - user = User.get_by_guessed_nickname(ap_id) -> - user - - fake_record_fallback -> - # TODO: refactor (fake records is never a good idea) - User.error_user(ap_id) - - true -> - nil - end - end -end diff --git a/lib/pleroma/web/fallback/redirect_controller.ex b/lib/pleroma/web/fallback/redirect_controller.ex new file mode 100644 index 000000000..a7b36a34b --- /dev/null +++ b/lib/pleroma/web/fallback/redirect_controller.ex @@ -0,0 +1,108 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Fallback.RedirectController do + use Pleroma.Web, :controller + + require Logger + + alias Pleroma.User + alias Pleroma.Web.Metadata + alias Pleroma.Web.Preload + + def api_not_implemented(conn, _params) do + conn + |> put_status(404) + |> json(%{error: "Not implemented"}) + end + + def redirector(conn, _params, code \\ 200) do + conn + |> put_resp_content_type("text/html") + |> send_file(code, index_file_path()) + end + + def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id} = params) do + with %User{} = user <- User.get_cached_by_nickname_or_id(maybe_nickname_or_id) do + redirector_with_meta(conn, %{user: user}) + else + nil -> + redirector(conn, params) + end + end + + def redirector_with_meta(conn, params) do + {:ok, index_content} = File.read(index_file_path()) + + tags = build_tags(conn, params) + preloads = preload_data(conn, params) + + response = + index_content + |> String.replace("", tags <> preloads) + + conn + |> put_resp_content_type("text/html") + |> send_resp(200, response) + end + + def redirector_with_preload(conn, %{"path" => ["pleroma", "admin"]}) do + redirect(conn, to: "/pleroma/admin/") + end + + def redirector_with_preload(conn, params) do + {:ok, index_content} = File.read(index_file_path()) + preloads = preload_data(conn, params) + + response = + index_content + |> String.replace("", preloads) + + conn + |> put_resp_content_type("text/html") + |> send_resp(200, response) + end + + def registration_page(conn, params) do + redirector(conn, params) + end + + def empty(conn, _params) do + conn + |> put_status(204) + |> text("") + end + + defp index_file_path do + Pleroma.Plugs.InstanceStatic.file_path("index.html") + end + + defp build_tags(conn, params) do + try do + Metadata.build_tags(params) + rescue + e -> + Logger.error( + "Metadata rendering for #{conn.request_path} failed.\n" <> + Exception.format(:error, e, __STACKTRACE__) + ) + + "" + end + end + + defp preload_data(conn, params) do + try do + Preload.build_tags(conn, params) + rescue + e -> + Logger.error( + "Preloading for #{conn.request_path} failed.\n" <> + Exception.format(:error, e, __STACKTRACE__) + ) + + "" + end + end +end diff --git a/lib/pleroma/web/fallback_redirect_controller.ex b/lib/pleroma/web/fallback_redirect_controller.ex deleted file mode 100644 index 431ad5485..000000000 --- a/lib/pleroma/web/fallback_redirect_controller.ex +++ /dev/null @@ -1,108 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Fallback.RedirectController do - use Pleroma.Web, :controller - - require Logger - - alias Pleroma.User - alias Pleroma.Web.Metadata - alias Pleroma.Web.Preload - - def api_not_implemented(conn, _params) do - conn - |> put_status(404) - |> json(%{error: "Not implemented"}) - end - - def redirector(conn, _params, code \\ 200) do - conn - |> put_resp_content_type("text/html") - |> send_file(code, index_file_path()) - end - - def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id} = params) do - with %User{} = user <- User.get_cached_by_nickname_or_id(maybe_nickname_or_id) do - redirector_with_meta(conn, %{user: user}) - else - nil -> - redirector(conn, params) - end - end - - def redirector_with_meta(conn, params) do - {:ok, index_content} = File.read(index_file_path()) - - tags = build_tags(conn, params) - preloads = preload_data(conn, params) - - response = - index_content - |> String.replace("", tags <> preloads) - - conn - |> put_resp_content_type("text/html") - |> send_resp(200, response) - end - - def redirector_with_preload(conn, %{"path" => ["pleroma", "admin"]}) do - redirect(conn, to: "/pleroma/admin/") - end - - def redirector_with_preload(conn, params) do - {:ok, index_content} = File.read(index_file_path()) - preloads = preload_data(conn, params) - - response = - index_content - |> String.replace("", preloads) - - conn - |> put_resp_content_type("text/html") - |> send_resp(200, response) - end - - def registration_page(conn, params) do - redirector(conn, params) - end - - def empty(conn, _params) do - conn - |> put_status(204) - |> text("") - end - - defp index_file_path do - Pleroma.Plugs.InstanceStatic.file_path("index.html") - end - - defp build_tags(conn, params) do - try do - Metadata.build_tags(params) - rescue - e -> - Logger.error( - "Metadata rendering for #{conn.request_path} failed.\n" <> - Exception.format(:error, e, __STACKTRACE__) - ) - - "" - end - end - - defp preload_data(conn, params) do - try do - Preload.build_tags(conn, params) - rescue - e -> - Logger.error( - "Preloading for #{conn.request_path} failed.\n" <> - Exception.format(:error, e, __STACKTRACE__) - ) - - "" - end - end -end diff --git a/lib/pleroma/web/federator.ex b/lib/pleroma/web/federator.ex new file mode 100644 index 000000000..130654145 --- /dev/null +++ b/lib/pleroma/web/federator.ex @@ -0,0 +1,111 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Federator do + alias Pleroma.Activity + alias Pleroma.Object.Containment + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.Federator.Publisher + alias Pleroma.Workers.PublisherWorker + alias Pleroma.Workers.ReceiverWorker + + require Logger + + @doc """ + Returns `true` if the distance to target object does not exceed max configured value. + Serves to prevent fetching of very long threads, especially useful on smaller instances. + Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161). + Applies to fetching of both ancestor (reply-to) and child (reply) objects. + """ + # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength + def allowed_thread_distance?(distance) do + max_distance = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth]) + + if max_distance && max_distance >= 0 do + # Default depth is 0 (an object has zero distance from itself in its thread) + (distance || 0) <= max_distance + else + true + end + end + + # Client API + + def incoming_ap_doc(params) do + ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}) + end + + def publish(%{id: "pleroma:fakeid"} = activity) do + perform(:publish, activity) + end + + def publish(activity) do + PublisherWorker.enqueue("publish", %{"activity_id" => activity.id}) + end + + # Job Worker Callbacks + + @spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()} + def perform(:publish_one, module, params) do + apply(module, :publish_one, [params]) + end + + def perform(:publish, activity) do + Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end) + + with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]), + {:ok, actor} <- User.ensure_keys_present(actor) do + Publisher.publish(actor, activity) + end + end + + def perform(:incoming_ap_doc, params) do + Logger.debug("Handling incoming AP activity") + + actor = + params + |> Map.get("actor") + |> Utils.get_ap_id() + + # NOTE: we use the actor ID to do the containment, this is fine because an + # actor shouldn't be acting on objects outside their own AP server. + with {_, {:ok, _user}} <- {:actor, ap_enabled_actor(actor)}, + nil <- Activity.normalize(params["id"]), + {_, :ok} <- + {:correct_origin?, Containment.contain_origin_from_id(actor, params)}, + {:ok, activity} <- Transmogrifier.handle_incoming(params) do + {:ok, activity} + else + {:correct_origin?, _} -> + Logger.debug("Origin containment failure for #{params["id"]}") + {:error, :origin_containment_failed} + + %Activity{} -> + Logger.debug("Already had #{params["id"]}") + {:error, :already_present} + + {:actor, e} -> + Logger.debug("Unhandled actor #{actor}, #{inspect(e)}") + {:error, e} + + e -> + # Just drop those for now + Logger.debug(fn -> "Unhandled activity\n" <> Jason.encode!(params, pretty: true) end) + {:error, e} + end + end + + def ap_enabled_actor(id) do + user = User.get_cached_by_ap_id(id) + + if User.ap_enabled?(user) do + {:ok, user} + else + ActivityPub.make_user_from_ap_id(id) + end + end +end diff --git a/lib/pleroma/web/federator/federator.ex b/lib/pleroma/web/federator/federator.ex deleted file mode 100644 index 130654145..000000000 --- a/lib/pleroma/web/federator/federator.ex +++ /dev/null @@ -1,111 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.Federator do - alias Pleroma.Activity - alias Pleroma.Object.Containment - alias Pleroma.User - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Transmogrifier - alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.Federator.Publisher - alias Pleroma.Workers.PublisherWorker - alias Pleroma.Workers.ReceiverWorker - - require Logger - - @doc """ - Returns `true` if the distance to target object does not exceed max configured value. - Serves to prevent fetching of very long threads, especially useful on smaller instances. - Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161). - Applies to fetching of both ancestor (reply-to) and child (reply) objects. - """ - # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength - def allowed_thread_distance?(distance) do - max_distance = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth]) - - if max_distance && max_distance >= 0 do - # Default depth is 0 (an object has zero distance from itself in its thread) - (distance || 0) <= max_distance - else - true - end - end - - # Client API - - def incoming_ap_doc(params) do - ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}) - end - - def publish(%{id: "pleroma:fakeid"} = activity) do - perform(:publish, activity) - end - - def publish(activity) do - PublisherWorker.enqueue("publish", %{"activity_id" => activity.id}) - end - - # Job Worker Callbacks - - @spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()} - def perform(:publish_one, module, params) do - apply(module, :publish_one, [params]) - end - - def perform(:publish, activity) do - Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end) - - with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]), - {:ok, actor} <- User.ensure_keys_present(actor) do - Publisher.publish(actor, activity) - end - end - - def perform(:incoming_ap_doc, params) do - Logger.debug("Handling incoming AP activity") - - actor = - params - |> Map.get("actor") - |> Utils.get_ap_id() - - # NOTE: we use the actor ID to do the containment, this is fine because an - # actor shouldn't be acting on objects outside their own AP server. - with {_, {:ok, _user}} <- {:actor, ap_enabled_actor(actor)}, - nil <- Activity.normalize(params["id"]), - {_, :ok} <- - {:correct_origin?, Containment.contain_origin_from_id(actor, params)}, - {:ok, activity} <- Transmogrifier.handle_incoming(params) do - {:ok, activity} - else - {:correct_origin?, _} -> - Logger.debug("Origin containment failure for #{params["id"]}") - {:error, :origin_containment_failed} - - %Activity{} -> - Logger.debug("Already had #{params["id"]}") - {:error, :already_present} - - {:actor, e} -> - Logger.debug("Unhandled actor #{actor}, #{inspect(e)}") - {:error, e} - - e -> - # Just drop those for now - Logger.debug(fn -> "Unhandled activity\n" <> Jason.encode!(params, pretty: true) end) - {:error, e} - end - end - - def ap_enabled_actor(id) do - user = User.get_cached_by_ap_id(id) - - if User.ap_enabled?(user) do - {:ok, user} - else - ActivityPub.make_user_from_ap_id(id) - end - end -end diff --git a/lib/pleroma/web/feed/user_controller.ex b/lib/pleroma/web/feed/user_controller.ex index 71eb1ea7e..bea07649b 100644 --- a/lib/pleroma/web/feed/user_controller.ex +++ b/lib/pleroma/web/feed/user_controller.ex @@ -5,7 +5,6 @@ defmodule Pleroma.Web.Feed.UserController do use Pleroma.Web, :controller - alias Fallback.RedirectController alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPubController @@ -17,7 +16,7 @@ defmodule Pleroma.Web.Feed.UserController do def feed_redirect(%{assigns: %{format: "html"}} = conn, %{"nickname" => nickname}) do with {_, %User{} = user} <- {:fetch_user, User.get_cached_by_nickname_or_id(nickname)} do - RedirectController.redirector_with_meta(conn, %{user: user}) + Pleroma.Web.Fallback.RedirectController.redirector_with_meta(conn, %{user: user}) end end diff --git a/lib/pleroma/web/media_proxy.ex b/lib/pleroma/web/media_proxy.ex new file mode 100644 index 000000000..8656b8cad --- /dev/null +++ b/lib/pleroma/web/media_proxy.ex @@ -0,0 +1,186 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MediaProxy do + alias Pleroma.Config + alias Pleroma.Helpers.UriHelper + alias Pleroma.Upload + alias Pleroma.Web + alias Pleroma.Web.MediaProxy.Invalidation + + @base64_opts [padding: false] + @cache_table :banned_urls_cache + + def cache_table, do: @cache_table + + @spec in_banned_urls(String.t()) :: boolean() + def in_banned_urls(url), do: elem(Cachex.exists?(@cache_table, url(url)), 1) + + def remove_from_banned_urls(urls) when is_list(urls) do + Cachex.execute!(@cache_table, fn cache -> + Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1)) + end) + end + + def remove_from_banned_urls(url) when is_binary(url) do + Cachex.del(@cache_table, url(url)) + end + + def put_in_banned_urls(urls) when is_list(urls) do + Cachex.execute!(@cache_table, fn cache -> + Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true)) + end) + end + + def put_in_banned_urls(url) when is_binary(url) do + Cachex.put(@cache_table, url(url), true) + end + + def url(url) when is_nil(url) or url == "", do: nil + def url("/" <> _ = url), do: url + + def url(url) do + if enabled?() and url_proxiable?(url) do + encode_url(url) + else + url + end + end + + @spec url_proxiable?(String.t()) :: boolean() + def url_proxiable?(url) do + not local?(url) and not whitelisted?(url) + end + + def preview_url(url, preview_params \\ []) do + if preview_enabled?() do + encode_preview_url(url, preview_params) + else + url(url) + end + end + + def enabled?, do: Config.get([:media_proxy, :enabled], false) + + # Note: media proxy must be enabled for media preview proxy in order to load all + # non-local non-whitelisted URLs through it and be sure that body size constraint is preserved. + def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled]) + + def local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) + + def whitelisted?(url) do + %{host: domain} = URI.parse(url) + + mediaproxy_whitelist_domains = + [:media_proxy, :whitelist] + |> Config.get() + |> Enum.map(&maybe_get_domain_from_url/1) + + whitelist_domains = + if base_url = Config.get([Upload, :base_url]) do + %{host: base_domain} = URI.parse(base_url) + [base_domain | mediaproxy_whitelist_domains] + else + mediaproxy_whitelist_domains + end + + domain in whitelist_domains + end + + defp maybe_get_domain_from_url("http" <> _ = url) do + URI.parse(url).host + end + + defp maybe_get_domain_from_url(domain), do: domain + + defp base64_sig64(url) do + base64 = Base.url_encode64(url, @base64_opts) + + sig64 = + base64 + |> signed_url() + |> Base.url_encode64(@base64_opts) + + {base64, sig64} + end + + def encode_url(url) do + {base64, sig64} = base64_sig64(url) + + build_url(sig64, base64, filename(url)) + end + + def encode_preview_url(url, preview_params \\ []) do + {base64, sig64} = base64_sig64(url) + + build_preview_url(sig64, base64, filename(url), preview_params) + end + + def decode_url(sig, url) do + with {:ok, sig} <- Base.url_decode64(sig, @base64_opts), + signature when signature == sig <- signed_url(url) do + {:ok, Base.url_decode64!(url, @base64_opts)} + else + _ -> {:error, :invalid_signature} + end + end + + defp signed_url(url) do + :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url) + end + + def filename(url_or_path) do + if path = URI.parse(url_or_path).path, do: Path.basename(path) + end + + def base_url do + Config.get([:media_proxy, :base_url], Web.base_url()) + end + + defp proxy_url(path, sig_base64, url_base64, filename) do + [ + base_url(), + path, + sig_base64, + url_base64, + filename + ] + |> Enum.filter(& &1) + |> Path.join() + end + + def build_url(sig_base64, url_base64, filename \\ nil) do + proxy_url("proxy", sig_base64, url_base64, filename) + end + + def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do + uri = proxy_url("proxy/preview", sig_base64, url_base64, filename) + + UriHelper.modify_uri_params(uri, preview_params) + end + + def verify_request_path_and_url( + %Plug.Conn{params: %{"filename" => _}, request_path: request_path}, + url + ) do + verify_request_path_and_url(request_path, url) + end + + def verify_request_path_and_url(request_path, url) when is_binary(request_path) do + filename = filename(url) + + if filename && not basename_matches?(request_path, filename) do + {:wrong_filename, filename} + else + :ok + end + end + + def verify_request_path_and_url(_, _), do: :ok + + defp basename_matches?(path, filename) do + basename = Path.basename(path) + basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename + end +end diff --git a/lib/pleroma/web/media_proxy/invalidation/http.ex b/lib/pleroma/web/media_proxy/invalidation/http.ex new file mode 100644 index 000000000..bb81d8888 --- /dev/null +++ b/lib/pleroma/web/media_proxy/invalidation/http.ex @@ -0,0 +1,40 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MediaProxy.Invalidation.Http do + @moduledoc false + @behaviour Pleroma.Web.MediaProxy.Invalidation + + require Logger + + @impl Pleroma.Web.MediaProxy.Invalidation + def purge(urls, opts \\ []) do + method = Keyword.get(opts, :method, :purge) + headers = Keyword.get(opts, :headers, []) + options = Keyword.get(opts, :options, []) + + Logger.debug("Running cache purge: #{inspect(urls)}") + + Enum.each(urls, fn url -> + with {:error, error} <- do_purge(method, url, headers, options) do + Logger.error("Error while cache purge: url - #{url}, error: #{inspect(error)}") + end + end) + + {:ok, urls} + end + + defp do_purge(method, url, headers, options) do + case Pleroma.HTTP.request(method, url, "", headers, options) do + {:ok, %{status: status} = env} when 400 <= status and status < 500 -> + {:error, env} + + {:error, error} = error -> + error + + _ -> + {:ok, "success"} + end + end +end diff --git a/lib/pleroma/web/media_proxy/invalidation/script.ex b/lib/pleroma/web/media_proxy/invalidation/script.ex new file mode 100644 index 000000000..d32ffc50b --- /dev/null +++ b/lib/pleroma/web/media_proxy/invalidation/script.ex @@ -0,0 +1,43 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MediaProxy.Invalidation.Script do + @moduledoc false + + @behaviour Pleroma.Web.MediaProxy.Invalidation + + require Logger + + @impl Pleroma.Web.MediaProxy.Invalidation + def purge(urls, opts \\ []) do + args = + urls + |> List.wrap() + |> Enum.uniq() + |> Enum.join(" ") + + opts + |> Keyword.get(:script_path) + |> do_purge([args]) + |> handle_result(urls) + end + + defp do_purge(script_path, args) when is_binary(script_path) do + path = Path.expand(script_path) + Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}") + System.cmd(path, args) + rescue + error -> error + end + + defp do_purge(_, _), do: {:error, "not found script path"} + + defp handle_result({_result, 0}, urls), do: {:ok, urls} + defp handle_result({:error, error}, urls), do: handle_result(error, urls) + + defp handle_result(error, _) do + Logger.error("Error while cache purge: #{inspect(error)}") + {:error, inspect(error)} + end +end diff --git a/lib/pleroma/web/media_proxy/invalidations/http.ex b/lib/pleroma/web/media_proxy/invalidations/http.ex deleted file mode 100644 index bb81d8888..000000000 --- a/lib/pleroma/web/media_proxy/invalidations/http.ex +++ /dev/null @@ -1,40 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.MediaProxy.Invalidation.Http do - @moduledoc false - @behaviour Pleroma.Web.MediaProxy.Invalidation - - require Logger - - @impl Pleroma.Web.MediaProxy.Invalidation - def purge(urls, opts \\ []) do - method = Keyword.get(opts, :method, :purge) - headers = Keyword.get(opts, :headers, []) - options = Keyword.get(opts, :options, []) - - Logger.debug("Running cache purge: #{inspect(urls)}") - - Enum.each(urls, fn url -> - with {:error, error} <- do_purge(method, url, headers, options) do - Logger.error("Error while cache purge: url - #{url}, error: #{inspect(error)}") - end - end) - - {:ok, urls} - end - - defp do_purge(method, url, headers, options) do - case Pleroma.HTTP.request(method, url, "", headers, options) do - {:ok, %{status: status} = env} when 400 <= status and status < 500 -> - {:error, env} - - {:error, error} = error -> - error - - _ -> - {:ok, "success"} - end - end -end diff --git a/lib/pleroma/web/media_proxy/invalidations/script.ex b/lib/pleroma/web/media_proxy/invalidations/script.ex deleted file mode 100644 index d32ffc50b..000000000 --- a/lib/pleroma/web/media_proxy/invalidations/script.ex +++ /dev/null @@ -1,43 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.MediaProxy.Invalidation.Script do - @moduledoc false - - @behaviour Pleroma.Web.MediaProxy.Invalidation - - require Logger - - @impl Pleroma.Web.MediaProxy.Invalidation - def purge(urls, opts \\ []) do - args = - urls - |> List.wrap() - |> Enum.uniq() - |> Enum.join(" ") - - opts - |> Keyword.get(:script_path) - |> do_purge([args]) - |> handle_result(urls) - end - - defp do_purge(script_path, args) when is_binary(script_path) do - path = Path.expand(script_path) - Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}") - System.cmd(path, args) - rescue - error -> error - end - - defp do_purge(_, _), do: {:error, "not found script path"} - - defp handle_result({_result, 0}, urls), do: {:ok, urls} - defp handle_result({:error, error}, urls), do: handle_result(error, urls) - - defp handle_result(error, _) do - Logger.error("Error while cache purge: #{inspect(error)}") - {:error, inspect(error)} - end -end diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex deleted file mode 100644 index 8656b8cad..000000000 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ /dev/null @@ -1,186 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.MediaProxy do - alias Pleroma.Config - alias Pleroma.Helpers.UriHelper - alias Pleroma.Upload - alias Pleroma.Web - alias Pleroma.Web.MediaProxy.Invalidation - - @base64_opts [padding: false] - @cache_table :banned_urls_cache - - def cache_table, do: @cache_table - - @spec in_banned_urls(String.t()) :: boolean() - def in_banned_urls(url), do: elem(Cachex.exists?(@cache_table, url(url)), 1) - - def remove_from_banned_urls(urls) when is_list(urls) do - Cachex.execute!(@cache_table, fn cache -> - Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1)) - end) - end - - def remove_from_banned_urls(url) when is_binary(url) do - Cachex.del(@cache_table, url(url)) - end - - def put_in_banned_urls(urls) when is_list(urls) do - Cachex.execute!(@cache_table, fn cache -> - Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true)) - end) - end - - def put_in_banned_urls(url) when is_binary(url) do - Cachex.put(@cache_table, url(url), true) - end - - def url(url) when is_nil(url) or url == "", do: nil - def url("/" <> _ = url), do: url - - def url(url) do - if enabled?() and url_proxiable?(url) do - encode_url(url) - else - url - end - end - - @spec url_proxiable?(String.t()) :: boolean() - def url_proxiable?(url) do - not local?(url) and not whitelisted?(url) - end - - def preview_url(url, preview_params \\ []) do - if preview_enabled?() do - encode_preview_url(url, preview_params) - else - url(url) - end - end - - def enabled?, do: Config.get([:media_proxy, :enabled], false) - - # Note: media proxy must be enabled for media preview proxy in order to load all - # non-local non-whitelisted URLs through it and be sure that body size constraint is preserved. - def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled]) - - def local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) - - def whitelisted?(url) do - %{host: domain} = URI.parse(url) - - mediaproxy_whitelist_domains = - [:media_proxy, :whitelist] - |> Config.get() - |> Enum.map(&maybe_get_domain_from_url/1) - - whitelist_domains = - if base_url = Config.get([Upload, :base_url]) do - %{host: base_domain} = URI.parse(base_url) - [base_domain | mediaproxy_whitelist_domains] - else - mediaproxy_whitelist_domains - end - - domain in whitelist_domains - end - - defp maybe_get_domain_from_url("http" <> _ = url) do - URI.parse(url).host - end - - defp maybe_get_domain_from_url(domain), do: domain - - defp base64_sig64(url) do - base64 = Base.url_encode64(url, @base64_opts) - - sig64 = - base64 - |> signed_url() - |> Base.url_encode64(@base64_opts) - - {base64, sig64} - end - - def encode_url(url) do - {base64, sig64} = base64_sig64(url) - - build_url(sig64, base64, filename(url)) - end - - def encode_preview_url(url, preview_params \\ []) do - {base64, sig64} = base64_sig64(url) - - build_preview_url(sig64, base64, filename(url), preview_params) - end - - def decode_url(sig, url) do - with {:ok, sig} <- Base.url_decode64(sig, @base64_opts), - signature when signature == sig <- signed_url(url) do - {:ok, Base.url_decode64!(url, @base64_opts)} - else - _ -> {:error, :invalid_signature} - end - end - - defp signed_url(url) do - :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url) - end - - def filename(url_or_path) do - if path = URI.parse(url_or_path).path, do: Path.basename(path) - end - - def base_url do - Config.get([:media_proxy, :base_url], Web.base_url()) - end - - defp proxy_url(path, sig_base64, url_base64, filename) do - [ - base_url(), - path, - sig_base64, - url_base64, - filename - ] - |> Enum.filter(& &1) - |> Path.join() - end - - def build_url(sig_base64, url_base64, filename \\ nil) do - proxy_url("proxy", sig_base64, url_base64, filename) - end - - def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do - uri = proxy_url("proxy/preview", sig_base64, url_base64, filename) - - UriHelper.modify_uri_params(uri, preview_params) - end - - def verify_request_path_and_url( - %Plug.Conn{params: %{"filename" => _}, request_path: request_path}, - url - ) do - verify_request_path_and_url(request_path, url) - end - - def verify_request_path_and_url(request_path, url) when is_binary(request_path) do - filename = filename(url) - - if filename && not basename_matches?(request_path, filename) do - {:wrong_filename, filename} - else - :ok - end - end - - def verify_request_path_and_url(_, _), do: :ok - - defp basename_matches?(path, filename) do - basename = Path.basename(path) - basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename - end -end diff --git a/lib/pleroma/web/metadata/providers/open_graph.ex b/lib/pleroma/web/metadata/providers/open_graph.ex new file mode 100644 index 000000000..bb1b23208 --- /dev/null +++ b/lib/pleroma/web/metadata/providers/open_graph.ex @@ -0,0 +1,119 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Metadata.Providers.OpenGraph do + alias Pleroma.User + alias Pleroma.Web.Metadata + alias Pleroma.Web.Metadata.Providers.Provider + alias Pleroma.Web.Metadata.Utils + + @behaviour Provider + @media_types ["image", "audio", "video"] + + @impl Provider + def build_tags(%{ + object: object, + url: url, + user: user + }) do + attachments = build_attachments(object) + scrubbed_content = Utils.scrub_html_and_truncate(object) + # Zero width space + content = + if scrubbed_content != "" and scrubbed_content != "\u200B" do + ": “" <> scrubbed_content <> "”" + else + "" + end + + # Most previews only show og:title which is inconvenient. Instagram + # hacks this by putting the description in the title and making the + # description longer prefixed by how many likes and shares the post + # has. Here we use the descriptive nickname in the title, and expand + # the full account & nickname in the description. We also use the cute^Wevil + # smart quotes around the status text like Instagram, too. + [ + {:meta, + [ + property: "og:title", + content: "#{user.name}" <> content + ], []}, + {:meta, [property: "og:url", content: url], []}, + {:meta, + [ + property: "og:description", + content: "#{Utils.user_name_string(user)}" <> content + ], []}, + {:meta, [property: "og:type", content: "website"], []} + ] ++ + if attachments == [] or Metadata.activity_nsfw?(object) do + [ + {:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], + []}, + {:meta, [property: "og:image:width", content: 150], []}, + {:meta, [property: "og:image:height", content: 150], []} + ] + else + attachments + end + end + + @impl Provider + def build_tags(%{user: user}) do + with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do + [ + {:meta, + [ + property: "og:title", + content: Utils.user_name_string(user) + ], []}, + {:meta, [property: "og:url", content: user.uri || user.ap_id], []}, + {:meta, [property: "og:description", content: truncated_bio], []}, + {:meta, [property: "og:type", content: "website"], []}, + {:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []}, + {:meta, [property: "og:image:width", content: 150], []}, + {:meta, [property: "og:image:height", content: 150], []} + ] + end + end + + defp build_attachments(%{data: %{"attachment" => attachments}}) do + Enum.reduce(attachments, [], fn attachment, acc -> + rendered_tags = + Enum.reduce(attachment["url"], [], fn url, acc -> + # TODO: Add additional properties to objects when we have the data available. + # Also, Whatsapp only wants JPEG or PNGs. It seems that if we add a second og:image + # object when a Video or GIF is attached it will display that in Whatsapp Rich Preview. + case Utils.fetch_media_type(@media_types, url["mediaType"]) do + "audio" -> + [ + {:meta, [property: "og:audio", content: Utils.attachment_url(url["href"])], []} + | acc + ] + + "image" -> + [ + {:meta, [property: "og:image", content: Utils.attachment_url(url["href"])], []}, + {:meta, [property: "og:image:width", content: 150], []}, + {:meta, [property: "og:image:height", content: 150], []} + | acc + ] + + "video" -> + [ + {:meta, [property: "og:video", content: Utils.attachment_url(url["href"])], []} + | acc + ] + + _ -> + acc + end + end) + + acc ++ rendered_tags + end) + end + + defp build_attachments(_), do: [] +end diff --git a/lib/pleroma/web/metadata/providers/opengraph.ex b/lib/pleroma/web/metadata/providers/opengraph.ex deleted file mode 100644 index bb1b23208..000000000 --- a/lib/pleroma/web/metadata/providers/opengraph.ex +++ /dev/null @@ -1,119 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2020 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.Metadata.Providers.OpenGraph do - alias Pleroma.User - alias Pleroma.Web.Metadata - alias Pleroma.Web.Metadata.Providers.Provider - alias Pleroma.Web.Metadata.Utils - - @behaviour Provider - @media_types ["image", "audio", "video"] - - @impl Provider - def build_tags(%{ - object: object, - url: url, - user: user - }) do - attachments = build_attachments(object) - scrubbed_content = Utils.scrub_html_and_truncate(object) - # Zero width space - content = - if scrubbed_content != "" and scrubbed_content != "\u200B" do - ": “" <> scrubbed_content <> "”" - else - "" - end - - # Most previews only show og:title which is inconvenient. Instagram - # hacks this by putting the description in the title and making the - # description longer prefixed by how many likes and shares the post - # has. Here we use the descriptive nickname in the title, and expand - # the full account & nickname in the description. We also use the cute^Wevil - # smart quotes around the status text like Instagram, too. - [ - {:meta, - [ - property: "og:title", - content: "#{user.name}" <> content - ], []}, - {:meta, [property: "og:url", content: url], []}, - {:meta, - [ - property: "og:description", - content: "#{Utils.user_name_string(user)}" <> content - ], []}, - {:meta, [property: "og:type", content: "website"], []} - ] ++ - if attachments == [] or Metadata.activity_nsfw?(object) do - [ - {:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], - []}, - {:meta, [property: "og:image:width", content: 150], []}, - {:meta, [property: "og:image:height", content: 150], []} - ] - else - attachments - end - end - - @impl Provider - def build_tags(%{user: user}) do - with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do - [ - {:meta, - [ - property: "og:title", - content: Utils.user_name_string(user) - ], []}, - {:meta, [property: "og:url", content: user.uri || user.ap_id], []}, - {:meta, [property: "og:description", content: truncated_bio], []}, - {:meta, [property: "og:type", content: "website"], []}, - {:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []}, - {:meta, [property: "og:image:width", content: 150], []}, - {:meta, [property: "og:image:height", content: 150], []} - ] - end - end - - defp build_attachments(%{data: %{"attachment" => attachments}}) do - Enum.reduce(attachments, [], fn attachment, acc -> - rendered_tags = - Enum.reduce(attachment["url"], [], fn url, acc -> - # TODO: Add additional properties to objects when we have the data available. - # Also, Whatsapp only wants JPEG or PNGs. It seems that if we add a second og:image - # object when a Video or GIF is attached it will display that in Whatsapp Rich Preview. - case Utils.fetch_media_type(@media_types, url["mediaType"]) do - "audio" -> - [ - {:meta, [property: "og:audio", content: Utils.attachment_url(url["href"])], []} - | acc - ] - - "image" -> - [ - {:meta, [property: "og:image", content: Utils.attachment_url(url["href"])], []}, - {:meta, [property: "og:image:width", content: 150], []}, - {:meta, [property: "og:image:height", content: 150], []} - | acc - ] - - "video" -> - [ - {:meta, [property: "og:video", content: Utils.attachment_url(url["href"])], []} - | acc - ] - - _ -> - acc - end - end) - - acc ++ rendered_tags - end) - end - - defp build_attachments(_), do: [] -end diff --git a/lib/pleroma/web/o_status/o_status_controller.ex b/lib/pleroma/web/o_status/o_status_controller.ex index de1b0b3f0..9a4a350ae 100644 --- a/lib/pleroma/web/o_status/o_status_controller.ex +++ b/lib/pleroma/web/o_status/o_status_controller.ex @@ -5,7 +5,6 @@ defmodule Pleroma.Web.OStatus.OStatusController do use Pleroma.Web, :controller - alias Fallback.RedirectController alias Pleroma.Activity alias Pleroma.Object alias Pleroma.Plugs.RateLimiter @@ -13,6 +12,7 @@ defmodule Pleroma.Web.OStatus.OStatusController do alias Pleroma.Web.ActivityPub.ActivityPubController alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Endpoint + alias Pleroma.Web.Fallback.Fallback.RedirectController alias Pleroma.Web.Metadata.PlayerView alias Pleroma.Web.Router diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index e22b31b4c..48bb834b9 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -737,7 +737,7 @@ defmodule Pleroma.Web.Router do get("/check_password", MongooseIMController, :check_password) end - scope "/", Fallback do + scope "/", Pleroma.Web.Fallback do get("/registration/:token", RedirectController, :registration_page) get("/:maybe_nickname_or_id", RedirectController, :redirector_with_meta) get("/api*path", RedirectController, :api_not_implemented) diff --git a/test/pleroma/web/feed/user_controller_test.exs b/test/pleroma/web/feed/user_controller_test.exs index 9a5610baa..a5dc0894b 100644 --- a/test/pleroma/web/feed/user_controller_test.exs +++ b/test/pleroma/web/feed/user_controller_test.exs @@ -206,7 +206,7 @@ defmodule Pleroma.Web.Feed.UserControllerTest do |> response(200) assert response == - Fallback.RedirectController.redirector_with_meta( + Pleroma.Web.Fallback.RedirectController.redirector_with_meta( conn, %{user: user} ).resp_body -- cgit v1.2.3