diff options
author | Roman Chvanikov <chvanikoff@pm.me> | 2020-06-20 18:47:10 +0300 |
---|---|---|
committer | Roman Chvanikov <chvanikoff@pm.me> | 2020-06-20 18:47:10 +0300 |
commit | fb8775538bf4b6e0f705d29dba016d592d395ace (patch) | |
tree | 9a27aba4b9d79df6e4143acba8ac2994e721519b /lib | |
parent | ae48af590507aff8b58429ee7bae70da26d2c4d8 (diff) | |
parent | 31761340fe7246ea9b57c0d0f7e61df85aa2af85 (diff) | |
download | pleroma-fb8775538bf4b6e0f705d29dba016d592d395ace.tar.gz |
Merge branch 'develop' into refactor/fe-bundles
Diffstat (limited to 'lib')
17 files changed, 360 insertions, 103 deletions
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index 0016a5874..521052817 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -150,7 +150,8 @@ defmodule Pleroma.Application do build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500), build_cachex("web_resp", limit: 2500), build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10), - build_cachex("failed_proxy_url", limit: 2500) + build_cachex("failed_proxy_url", limit: 2500), + build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000) ] end diff --git a/lib/pleroma/plugs/uploaded_media.ex b/lib/pleroma/plugs/uploaded_media.ex index 94147e0c4..40984cfc0 100644 --- a/lib/pleroma/plugs/uploaded_media.ex +++ b/lib/pleroma/plugs/uploaded_media.ex @@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do import Pleroma.Web.Gettext require Logger + alias Pleroma.Web.MediaProxy + @behaviour Plug # no slashes @path "media" @@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do %{query_params: %{"name" => name}} = conn -> name = String.replace(name, "\"", "\\\"") - conn - |> put_resp_header("content-disposition", "filename=\"#{name}\"") + put_resp_header(conn, "content-disposition", "filename=\"#{name}\"") conn -> conn @@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do with uploader <- Keyword.fetch!(config, :uploader), proxy_remote = Keyword.get(config, :proxy_remote, false), - {:ok, get_method} <- uploader.get_file(file) do + {:ok, get_method} <- uploader.get_file(file), + false <- media_is_banned(conn, get_method) do get_media(conn, get_method, proxy_remote, opts) else _ -> @@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do def call(conn, _opts), do: conn + defp media_is_banned(%{request_path: path} = _conn, {:static_dir, _}) do + MediaProxy.in_banned_urls(Pleroma.Web.base_url() <> path) + end + + defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url) + + defp media_is_banned(_, _), do: false + defp get_media(conn, {:static_dir, directory}, _, opts) do static_opts = Map.get(opts, :static_plug_opts) diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index f0ccc7c79..ae4f96aac 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -465,6 +465,7 @@ defmodule Pleroma.User do |> validate_format(:nickname, local_nickname_regex()) |> validate_length(:bio, max: bio_limit) |> validate_length(:name, min: 1, max: name_limit) + |> validate_inclusion(:actor_type, ["Person", "Service"]) |> put_fields() |> put_emoji() |> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)}) diff --git a/lib/pleroma/web/activity_pub/object_validators/note_validator.ex b/lib/pleroma/web/activity_pub/object_validators/note_validator.ex index a10728ac6..56b93dde8 100644 --- a/lib/pleroma/web/activity_pub/object_validators/note_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/note_validator.ex @@ -41,7 +41,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do field(:announcements, {:array, :string}, default: []) # see if needed - field(:conversation, :string) field(:context_id, :string) end diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 851f474b8..1c60ef8f5 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -172,8 +172,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object |> Map.put("inReplyTo", replied_object.data["id"]) |> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id) - |> Map.put("conversation", replied_object.data["context"] || object["conversation"]) |> Map.put("context", replied_object.data["context"] || object["conversation"]) + |> Map.drop(["conversation"]) else e -> Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}") @@ -207,7 +207,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object |> Map.put("context", context) - |> Map.put("conversation", context) + |> Map.drop(["conversation"]) end def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do @@ -458,7 +458,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do to: data["to"], object: object, actor: user, - context: object["conversation"], + context: object["context"], local: false, published: data["published"], additional: diff --git a/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex b/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex new file mode 100644 index 000000000..e2759d59f --- /dev/null +++ b/lib/pleroma/web/admin_api/controllers/media_proxy_cache_controller.ex @@ -0,0 +1,63 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do + use Pleroma.Web, :controller + + alias Pleroma.Plugs.OAuthScopesPlug + alias Pleroma.Web.ApiSpec.Admin, as: Spec + alias Pleroma.Web.MediaProxy + + plug(Pleroma.Web.ApiSpec.CastAndValidate) + + plug( + OAuthScopesPlug, + %{scopes: ["read:media_proxy_caches"], admin: true} when action in [:index] + ) + + plug( + OAuthScopesPlug, + %{scopes: ["write:media_proxy_caches"], admin: true} when action in [:purge, :delete] + ) + + action_fallback(Pleroma.Web.AdminAPI.FallbackController) + + defdelegate open_api_operation(action), to: Spec.MediaProxyCacheOperation + + def index(%{assigns: %{user: _}} = conn, params) do + cursor = + :banned_urls_cache + |> :ets.table([{:traverse, {:select, Cachex.Query.create(true, :key)}}]) + |> :qlc.cursor() + + urls = + case params.page do + 1 -> + :qlc.next_answers(cursor, params.page_size) + + _ -> + :qlc.next_answers(cursor, (params.page - 1) * params.page_size) + :qlc.next_answers(cursor, params.page_size) + end + + :qlc.delete_cursor(cursor) + + render(conn, "index.json", urls: urls) + end + + def delete(%{assigns: %{user: _}, body_params: %{urls: urls}} = conn, _) do + MediaProxy.remove_from_banned_urls(urls) + render(conn, "index.json", urls: urls) + end + + def purge(%{assigns: %{user: _}, body_params: %{urls: urls, ban: ban}} = conn, _) do + MediaProxy.Invalidation.purge(urls) + + if ban do + MediaProxy.put_in_banned_urls(urls) + end + + render(conn, "index.json", urls: urls) + end +end diff --git a/lib/pleroma/web/admin_api/views/media_proxy_cache_view.ex b/lib/pleroma/web/admin_api/views/media_proxy_cache_view.ex new file mode 100644 index 000000000..c97400beb --- /dev/null +++ b/lib/pleroma/web/admin_api/views/media_proxy_cache_view.ex @@ -0,0 +1,11 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.AdminAPI.MediaProxyCacheView do + use Pleroma.Web, :view + + def render("index.json", %{urls: urls}) do + %{urls: urls} + end +end diff --git a/lib/pleroma/web/api_spec/operations/admin/media_proxy_cache_operation.ex b/lib/pleroma/web/api_spec/operations/admin/media_proxy_cache_operation.ex new file mode 100644 index 000000000..0358cfbad --- /dev/null +++ b/lib/pleroma/web/api_spec/operations/admin/media_proxy_cache_operation.ex @@ -0,0 +1,109 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ApiSpec.Admin.MediaProxyCacheOperation do + alias OpenApiSpex.Operation + alias OpenApiSpex.Schema + alias Pleroma.Web.ApiSpec.Schemas.ApiError + + import Pleroma.Web.ApiSpec.Helpers + + def open_api_operation(action) do + operation = String.to_existing_atom("#{action}_operation") + apply(__MODULE__, operation, []) + end + + def index_operation do + %Operation{ + tags: ["Admin", "MediaProxyCache"], + summary: "Fetch a paginated list of all banned MediaProxy URLs in Cachex", + operationId: "AdminAPI.MediaProxyCacheController.index", + security: [%{"oAuth" => ["read:media_proxy_caches"]}], + parameters: [ + Operation.parameter( + :page, + :query, + %Schema{type: :integer, default: 1}, + "Page" + ), + Operation.parameter( + :page_size, + :query, + %Schema{type: :integer, default: 50}, + "Number of statuses to return" + ) + ], + responses: %{ + 200 => success_response() + } + } + end + + def delete_operation do + %Operation{ + tags: ["Admin", "MediaProxyCache"], + summary: "Remove a banned MediaProxy URL from Cachex", + operationId: "AdminAPI.MediaProxyCacheController.delete", + security: [%{"oAuth" => ["write:media_proxy_caches"]}], + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + required: [:urls], + properties: %{ + urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}} + } + }, + required: true + ), + responses: %{ + 200 => success_response(), + 400 => Operation.response("Error", "application/json", ApiError) + } + } + end + + def purge_operation do + %Operation{ + tags: ["Admin", "MediaProxyCache"], + summary: "Purge and optionally ban a MediaProxy URL", + operationId: "AdminAPI.MediaProxyCacheController.purge", + security: [%{"oAuth" => ["write:media_proxy_caches"]}], + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + required: [:urls], + properties: %{ + urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}}, + ban: %Schema{type: :boolean, default: true} + } + }, + required: true + ), + responses: %{ + 200 => success_response(), + 400 => Operation.response("Error", "application/json", ApiError) + } + } + end + + defp success_response do + Operation.response("Array of banned MediaProxy URLs in Cachex", "application/json", %Schema{ + type: :object, + properties: %{ + urls: %Schema{ + type: :array, + items: %Schema{ + type: :string, + format: :uri, + description: "MediaProxy URLs" + } + } + } + }) + end +end diff --git a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex index c38c2b895..adbbac624 100644 --- a/lib/pleroma/web/mastodon_api/controllers/account_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/account_controller.ex @@ -177,6 +177,9 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do |> Maps.put_if_present(:pleroma_settings_store, params[:pleroma_settings_store]) |> Maps.put_if_present(:default_scope, params[:default_scope]) |> Maps.put_if_present(:default_scope, params["source"]["privacy"]) + |> Maps.put_if_present(:actor_type, params[:bot], fn bot -> + if bot, do: {:ok, "Service"}, else: {:ok, "Person"} + end) |> Maps.put_if_present(:actor_type, params[:actor_type]) changeset = User.update_changeset(user, user_params) diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex index 68beb69b8..6c40b8ccd 100644 --- a/lib/pleroma/web/mastodon_api/views/account_view.ex +++ b/lib/pleroma/web/mastodon_api/views/account_view.ex @@ -179,7 +179,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do 0 end - bot = user.actor_type in ["Application", "Service"] + bot = user.actor_type == "Service" emojis = Enum.map(user.emoji, fn {shortcode, raw_url} -> diff --git a/lib/pleroma/web/media_proxy/invalidation.ex b/lib/pleroma/web/media_proxy/invalidation.ex index c037ff13e..5808861e6 100644 --- a/lib/pleroma/web/media_proxy/invalidation.ex +++ b/lib/pleroma/web/media_proxy/invalidation.ex @@ -5,22 +5,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation do @moduledoc false - @callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()} + @callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()} alias Pleroma.Config + alias Pleroma.Web.MediaProxy - @spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()} + @spec enabled?() :: boolean() + def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled]) + + @spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()} def purge(urls) do - [:media_proxy, :invalidation, :enabled] - |> Config.get() - |> do_purge(urls) + prepared_urls = prepare_urls(urls) + + if enabled?() do + do_purge(prepared_urls) + else + {:ok, prepared_urls} + end end - defp do_purge(true, urls) do + defp do_purge(urls) do provider = Config.get([:media_proxy, :invalidation, :provider]) options = Config.get(provider) provider.purge(urls, options) end - defp do_purge(_, _), do: :ok + def prepare_urls(urls) do + urls + |> List.wrap() + |> Enum.map(&MediaProxy.url/1) + end end diff --git a/lib/pleroma/web/media_proxy/invalidations/http.ex b/lib/pleroma/web/media_proxy/invalidations/http.ex index 07248df6e..bb81d8888 100644 --- a/lib/pleroma/web/media_proxy/invalidations/http.ex +++ b/lib/pleroma/web/media_proxy/invalidations/http.ex @@ -9,10 +9,10 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do require Logger @impl Pleroma.Web.MediaProxy.Invalidation - def purge(urls, opts) do - method = Map.get(opts, :method, :purge) - headers = Map.get(opts, :headers, []) - options = Map.get(opts, :options, []) + def purge(urls, opts \\ []) do + method = Keyword.get(opts, :method, :purge) + headers = Keyword.get(opts, :headers, []) + options = Keyword.get(opts, :options, []) Logger.debug("Running cache purge: #{inspect(urls)}") @@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do end end) - {:ok, "success"} + {:ok, urls} end defp do_purge(method, url, headers, options) do diff --git a/lib/pleroma/web/media_proxy/invalidations/script.ex b/lib/pleroma/web/media_proxy/invalidations/script.ex index 6be782132..d32ffc50b 100644 --- a/lib/pleroma/web/media_proxy/invalidations/script.ex +++ b/lib/pleroma/web/media_proxy/invalidations/script.ex @@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do require Logger @impl Pleroma.Web.MediaProxy.Invalidation - def purge(urls, %{script_path: script_path} = _options) do + def purge(urls, opts \\ []) do args = urls |> List.wrap() |> Enum.uniq() |> Enum.join(" ") - path = Path.expand(script_path) - - Logger.debug("Running cache purge: #{inspect(urls)}, #{path}") - - case do_purge(path, [args]) do - {result, exit_status} when exit_status > 0 -> - Logger.error("Error while cache purge: #{inspect(result)}") - {:error, inspect(result)} - - _ -> - {:ok, "success"} - end + opts + |> Keyword.get(:script_path) + |> do_purge([args]) + |> handle_result(urls) end - def purge(_, _), do: {:error, "not found script path"} - - defp do_purge(path, args) do + defp do_purge(script_path, args) when is_binary(script_path) do + path = Path.expand(script_path) + Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}") System.cmd(path, args) rescue - error -> {inspect(error), 1} + error -> error + end + + defp do_purge(_, _), do: {:error, "not found script path"} + + defp handle_result({_result, 0}, urls), do: {:ok, urls} + defp handle_result({:error, error}, urls), do: handle_result(error, urls) + + defp handle_result(error, _) do + Logger.error("Error while cache purge: #{inspect(error)}") + {:error, inspect(error)} end end diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex index b2b524524..077fabe47 100644 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do alias Pleroma.Config alias Pleroma.Upload alias Pleroma.Web + alias Pleroma.Web.MediaProxy.Invalidation @base64_opts [padding: false] + @spec in_banned_urls(String.t()) :: boolean() + def in_banned_urls(url), do: elem(Cachex.exists?(:banned_urls_cache, url(url)), 1) + + def remove_from_banned_urls(urls) when is_list(urls) do + Cachex.execute!(:banned_urls_cache, fn cache -> + Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1)) + end) + end + + def remove_from_banned_urls(url) when is_binary(url) do + Cachex.del(:banned_urls_cache, url(url)) + end + + def put_in_banned_urls(urls) when is_list(urls) do + Cachex.execute!(:banned_urls_cache, fn cache -> + Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true)) + end) + end + + def put_in_banned_urls(url) when is_binary(url) do + Cachex.put(:banned_urls_cache, url(url), true) + end + def url(url) when is_nil(url) or url == "", do: nil def url("/" <> _ = url), do: url def url(url) do - if disabled?() or local?(url) or whitelisted?(url) do + if disabled?() or not url_proxiable?(url) do url else encode_url(url) end end + @spec url_proxiable?(String.t()) :: boolean() + def url_proxiable?(url) do + if local?(url) or whitelisted?(url) do + false + else + true + end + end + defp disabled?, do: !Config.get([:media_proxy, :enabled], false) defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex index 4657a4383..9a64b0ef3 100644 --- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex +++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex @@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do with config <- Pleroma.Config.get([:media_proxy], []), true <- Keyword.get(config, :enabled, false), {:ok, url} <- MediaProxy.decode_url(sig64, url64), + {_, false} <- {:in_banned_urls, MediaProxy.in_banned_urls(url)}, :ok <- filename_matches(params, conn.request_path, url) do ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts)) else - false -> + error when error in [false, {:in_banned_urls, true}] -> send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) {:error, :invalid_signature} -> diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 04dbaa290..25a67cdcb 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -209,6 +209,10 @@ defmodule Pleroma.Web.Router do post("/oauth_app", OAuthAppController, :create) patch("/oauth_app/:id", OAuthAppController, :update) delete("/oauth_app/:id", OAuthAppController, :delete) + + get("/media_proxy_caches", MediaProxyCacheController, :index) + post("/media_proxy_caches/delete", MediaProxyCacheController, :delete) + post("/media_proxy_caches/purge", MediaProxyCacheController, :purge) end scope "/api/pleroma/emoji", Pleroma.Web.PleromaAPI do diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex index 49352db2a..8deeabda0 100644 --- a/lib/pleroma/workers/attachments_cleanup_worker.ex +++ b/lib/pleroma/workers/attachments_cleanup_worker.ex @@ -18,13 +18,19 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do }, _job ) do - hrefs = - Enum.flat_map(attachments, fn attachment -> - Enum.map(attachment["url"], & &1["href"]) - end) + attachments + |> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end) + |> fetch_objects + |> prepare_objects(actor, Enum.map(attachments, & &1["name"])) + |> filter_objects + |> do_clean - names = Enum.map(attachments, & &1["name"]) + {:ok, :success} + end + + def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip} + defp do_clean({object_ids, attachment_urls}) do uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) prefix = @@ -39,68 +45,70 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do "/" ) - # find all objects for copies of the attachments, name and actor doesn't matter here - object_ids_and_hrefs = - from(o in Object, - where: - fragment( - "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)", - o.data, - o.data, - ^hrefs - ) - ) - # The query above can be time consumptive on large instances until we - # refactor how uploads are stored - |> Repo.all(timeout: :infinity) - # we should delete 1 object for any given attachment, but don't delete - # files if there are more than 1 object for it - |> Enum.reduce(%{}, fn %{ - id: id, - data: %{ - "url" => [%{"href" => href}], - "actor" => obj_actor, - "name" => name - } - }, - acc -> - Map.update(acc, href, %{id: id, count: 1}, fn val -> - case obj_actor == actor and name in names do - true -> - # set id of the actor's object that will be deleted - %{val | id: id, count: val.count + 1} - - false -> - # another actor's object, just increase count to not delete file - %{val | count: val.count + 1} - end - end) - end) - |> Enum.map(fn {href, %{id: id, count: count}} -> - # only delete files that have single instance - with 1 <- count do - href - |> String.trim_leading("#{base_url}/#{prefix}") - |> uploader.delete_file() - - {id, href} - else - _ -> {id, nil} - end - end) + Enum.each(attachment_urls, fn href -> + href + |> String.trim_leading("#{base_url}/#{prefix}") + |> uploader.delete_file() + end) - object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end) + delete_objects(object_ids) + end - from(o in Object, where: o.id in ^object_ids) - |> Repo.delete_all() + defp delete_objects([_ | _] = object_ids) do + Repo.delete_all(from(o in Object, where: o.id in ^object_ids)) + end - object_ids_and_hrefs - |> Enum.filter(fn {_, href} -> not is_nil(href) end) - |> Enum.map(&elem(&1, 1)) - |> Pleroma.Web.MediaProxy.Invalidation.purge() + defp delete_objects(_), do: :ok - {:ok, :success} + # we should delete 1 object for any given attachment, but don't delete + # files if there are more than 1 object for it + defp filter_objects(objects) do + Enum.reduce(objects, {[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} -> + with 1 <- count do + {ids ++ [id], hrefs ++ [href]} + else + _ -> {ids ++ [id], hrefs} + end + end) end - def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip} + defp prepare_objects(objects, actor, names) do + objects + |> Enum.reduce(%{}, fn %{ + id: id, + data: %{ + "url" => [%{"href" => href}], + "actor" => obj_actor, + "name" => name + } + }, + acc -> + Map.update(acc, href, %{id: id, count: 1}, fn val -> + case obj_actor == actor and name in names do + true -> + # set id of the actor's object that will be deleted + %{val | id: id, count: val.count + 1} + + false -> + # another actor's object, just increase count to not delete file + %{val | count: val.count + 1} + end + end) + end) + end + + defp fetch_objects(hrefs) do + from(o in Object, + where: + fragment( + "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)", + o.data, + o.data, + ^hrefs + ) + ) + # The query above can be time consumptive on large instances until we + # refactor how uploads are stored + |> Repo.all(timeout: :infinity) + end end |