aboutsummaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/pleroma/application.ex3
-rw-r--r--lib/pleroma/web/media_proxy/invalidation.ex28
-rw-r--r--lib/pleroma/web/media_proxy/invalidations/http.ex8
-rw-r--r--lib/pleroma/web/media_proxy/invalidations/script.ex36
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy.ex19
-rw-r--r--lib/pleroma/web/media_proxy/media_proxy_controller.ex3
-rw-r--r--lib/pleroma/workers/attachments_cleanup_worker.ex115
7 files changed, 127 insertions, 85 deletions
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index 9d3d92b38..7ddb03529 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -148,7 +148,8 @@ defmodule Pleroma.Application do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
- build_cachex("failed_proxy_url", limit: 2500)
+ build_cachex("failed_proxy_url", limit: 2500),
+ build_cachex("deleted_urls", default_ttl: :timer.minutes(60), limit: 2500)
]
end
diff --git a/lib/pleroma/web/media_proxy/invalidation.ex b/lib/pleroma/web/media_proxy/invalidation.ex
index c037ff13e..324f8a7ee 100644
--- a/lib/pleroma/web/media_proxy/invalidation.ex
+++ b/lib/pleroma/web/media_proxy/invalidation.ex
@@ -5,22 +5,32 @@
defmodule Pleroma.Web.MediaProxy.Invalidation do
@moduledoc false
- @callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()}
+ @callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
alias Pleroma.Config
+ alias Pleroma.Web.MediaProxy
- @spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()}
+ @spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
def purge(urls) do
- [:media_proxy, :invalidation, :enabled]
- |> Config.get()
- |> do_purge(urls)
+ prepared_urls = prepare_urls(urls)
+
+ if Config.get([:media_proxy, :invalidation, :enabled]) do
+ result = do_purge(prepared_urls)
+ MediaProxy.remove_from_deleted_urls(prepared_urls)
+ result
+ else
+ {:ok, prepared_urls}
+ end
end
- defp do_purge(true, urls) do
+ defp do_purge(urls) do
provider = Config.get([:media_proxy, :invalidation, :provider])
- options = Config.get(provider)
- provider.purge(urls, options)
+ provider.purge(urls, Config.get(provider))
end
- defp do_purge(_, _), do: :ok
+ defp prepare_urls(urls) do
+ urls
+ |> List.wrap()
+ |> Enum.map(&MediaProxy.url(&1))
+ end
end
diff --git a/lib/pleroma/web/media_proxy/invalidations/http.ex b/lib/pleroma/web/media_proxy/invalidations/http.ex
index 07248df6e..3694b56e8 100644
--- a/lib/pleroma/web/media_proxy/invalidations/http.ex
+++ b/lib/pleroma/web/media_proxy/invalidations/http.ex
@@ -10,9 +10,9 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
@impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, opts) do
- method = Map.get(opts, :method, :purge)
- headers = Map.get(opts, :headers, [])
- options = Map.get(opts, :options, [])
+ method = Keyword.get(opts, :method, :purge)
+ headers = Keyword.get(opts, :headers, [])
+ options = Keyword.get(opts, :options, [])
Logger.debug("Running cache purge: #{inspect(urls)}")
@@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
end
end)
- {:ok, "success"}
+ {:ok, urls}
end
defp do_purge(method, url, headers, options) do
diff --git a/lib/pleroma/web/media_proxy/invalidations/script.ex b/lib/pleroma/web/media_proxy/invalidations/script.ex
index 6be782132..d41d647bb 100644
--- a/lib/pleroma/web/media_proxy/invalidations/script.ex
+++ b/lib/pleroma/web/media_proxy/invalidations/script.ex
@@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
require Logger
@impl Pleroma.Web.MediaProxy.Invalidation
- def purge(urls, %{script_path: script_path} = _options) do
+ def purge(urls, opts) do
args =
urls
|> List.wrap()
|> Enum.uniq()
|> Enum.join(" ")
- path = Path.expand(script_path)
-
- Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
-
- case do_purge(path, [args]) do
- {result, exit_status} when exit_status > 0 ->
- Logger.error("Error while cache purge: #{inspect(result)}")
- {:error, inspect(result)}
-
- _ ->
- {:ok, "success"}
- end
+ opts
+ |> Keyword.get(:script_path, nil)
+ |> do_purge([args])
+ |> handle_result(urls)
end
- def purge(_, _), do: {:error, "not found script path"}
-
- defp do_purge(path, args) do
+ defp do_purge(script_path, args) when is_binary(script_path) do
+ path = Path.expand(script_path)
+ Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
System.cmd(path, args)
rescue
- error -> {inspect(error), 1}
+ error -> error
+ end
+
+ defp do_purge(_, _), do: {:error, "not found script path"}
+
+ defp handle_result({_result, 0}, urls), do: {:ok, urls}
+ defp handle_result({:error, error}, urls), do: handle_result(error, urls)
+
+ defp handle_result(error, _) do
+ Logger.error("Error while cache purge: #{inspect(error)}")
+ {:error, inspect(error)}
end
end
diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex
index b2b524524..4a0fec288 100644
--- a/lib/pleroma/web/media_proxy/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy.ex
@@ -9,6 +9,25 @@ defmodule Pleroma.Web.MediaProxy do
@base64_opts [padding: false]
+ @spec in_deleted_urls(String.t()) :: boolean()
+ def in_deleted_urls(url), do: elem(Cachex.exists?(:deleted_urls_cache, url), 1)
+
+ def remove_from_deleted_urls(urls) when is_list(urls) do
+ Enum.each(urls, &remove_from_deleted_urls/1)
+ end
+
+ def remove_from_deleted_urls(url) when is_binary(url) do
+ Cachex.del(:deleted_urls_cache, url)
+ end
+
+ def put_in_deleted_urls(urls) when is_list(urls) do
+ Enum.each(urls, &put_in_deleted_urls/1)
+ end
+
+ def put_in_deleted_urls(url) when is_binary(url) do
+ Cachex.put(:deleted_urls_cache, url, true)
+ end
+
def url(url) when is_nil(url) or url == "", do: nil
def url("/" <> _ = url), do: url
diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
index 4657a4383..ff0158d83 100644
--- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex
@@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
with config <- Pleroma.Config.get([:media_proxy], []),
true <- Keyword.get(config, :enabled, false),
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
+ {_, false} <- {:in_deleted_urls, MediaProxy.in_deleted_urls(url)},
:ok <- filename_matches(params, conn.request_path, url) do
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else
- false ->
+ error when error in [false, {:in_deleted_urls, true}] ->
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} ->
diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex
index 49352db2a..5fb0b9584 100644
--- a/lib/pleroma/workers/attachments_cleanup_worker.ex
+++ b/lib/pleroma/workers/attachments_cleanup_worker.ex
@@ -23,8 +23,6 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
Enum.map(attachment["url"], & &1["href"])
end)
- names = Enum.map(attachments, & &1["name"])
-
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix =
@@ -40,67 +38,78 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
)
# find all objects for copies of the attachments, name and actor doesn't matter here
- object_ids_and_hrefs =
- from(o in Object,
- where:
- fragment(
- "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
- o.data,
- o.data,
- ^hrefs
- )
- )
- # The query above can be time consumptive on large instances until we
- # refactor how uploads are stored
- |> Repo.all(timeout: :infinity)
- # we should delete 1 object for any given attachment, but don't delete
- # files if there are more than 1 object for it
- |> Enum.reduce(%{}, fn %{
- id: id,
- data: %{
- "url" => [%{"href" => href}],
- "actor" => obj_actor,
- "name" => name
- }
- },
- acc ->
- Map.update(acc, href, %{id: id, count: 1}, fn val ->
- case obj_actor == actor and name in names do
- true ->
- # set id of the actor's object that will be deleted
- %{val | id: id, count: val.count + 1}
-
- false ->
- # another actor's object, just increase count to not delete file
- %{val | count: val.count + 1}
- end
- end)
- end)
- |> Enum.map(fn {href, %{id: id, count: count}} ->
- # only delete files that have single instance
+ {object_ids, attachment_urls} =
+ hrefs
+ |> fetch_objects
+ |> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
+ |> Enum.reduce({[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
with 1 <- count do
- href
- |> String.trim_leading("#{base_url}/#{prefix}")
- |> uploader.delete_file()
-
- {id, href}
+ {ids ++ [id], hrefs ++ [href]}
else
- _ -> {id, nil}
+ _ -> {ids ++ [id], hrefs}
end
end)
- object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
+ Pleroma.Web.MediaProxy.put_in_deleted_urls(attachment_urls)
- from(o in Object, where: o.id in ^object_ids)
- |> Repo.delete_all()
+ Enum.each(attachment_urls, fn href ->
+ href
+ |> String.trim_leading("#{base_url}/#{prefix}")
+ |> uploader.delete_file()
+ end)
- object_ids_and_hrefs
- |> Enum.filter(fn {_, href} -> not is_nil(href) end)
- |> Enum.map(&elem(&1, 1))
- |> Pleroma.Web.MediaProxy.Invalidation.purge()
+ Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
+
+ cache_purge(attachment_urls)
{:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
+
+ defp cache_purge(attachment_urls) do
+ Pleroma.Web.MediaProxy.Invalidation.purge(attachment_urls)
+ end
+
+ # we should delete 1 object for any given attachment, but don't delete
+ # files if there are more than 1 object for it
+ def prepare_objects(objects, actor, names) do
+ objects
+ |> Enum.reduce(%{}, fn %{
+ id: id,
+ data: %{
+ "url" => [%{"href" => href}],
+ "actor" => obj_actor,
+ "name" => name
+ }
+ },
+ acc ->
+ Map.update(acc, href, %{id: id, count: 1}, fn val ->
+ case obj_actor == actor and name in names do
+ true ->
+ # set id of the actor's object that will be deleted
+ %{val | id: id, count: val.count + 1}
+
+ false ->
+ # another actor's object, just increase count to not delete file
+ %{val | count: val.count + 1}
+ end
+ end)
+ end)
+ end
+
+ defp fetch_objects(hrefs) do
+ from(o in Object,
+ where:
+ fragment(
+ "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
+ o.data,
+ o.data,
+ ^hrefs
+ )
+ )
+ # The query above can be time consumptive on large instances until we
+ # refactor how uploads are stored
+ |> Repo.all(timeout: :infinity)
+ end
end