aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMaksim Pechnikov <parallel588@gmail.com>2020-05-26 09:49:20 +0300
committerMaksim Pechnikov <parallel588@gmail.com>2020-05-26 09:49:20 +0300
commit04a26ab0a80a5c2ae87d329a4a83c89532849862 (patch)
tree2bdfae3208352bdc69d038d54ae26f655c532513
parent65d96929755ca7240d04b68220fce443285b29e8 (diff)
downloadpleroma-04a26ab0a80a5c2ae87d329a4a83c89532849862.tar.gz
added deleted_urls in AttachmentsCleanupWorker
-rw-r--r--lib/pleroma/workers/attachments_cleanup_worker.ex115
-rw-r--r--test/web/media_proxy/invalidation_test.exs2
-rw-r--r--test/web/media_proxy/invalidations/http_test.exs2
-rw-r--r--test/web/media_proxy/invalidations/script_test.exs2
-rw-r--r--test/web/media_proxy/media_proxy_controller_test.exs2
5 files changed, 66 insertions, 57 deletions
diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex
index 49352db2a..5fb0b9584 100644
--- a/lib/pleroma/workers/attachments_cleanup_worker.ex
+++ b/lib/pleroma/workers/attachments_cleanup_worker.ex
@@ -23,8 +23,6 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
Enum.map(attachment["url"], & &1["href"])
end)
- names = Enum.map(attachments, & &1["name"])
-
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix =
@@ -40,67 +38,78 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
)
# find all objects for copies of the attachments, name and actor doesn't matter here
- object_ids_and_hrefs =
- from(o in Object,
- where:
- fragment(
- "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
- o.data,
- o.data,
- ^hrefs
- )
- )
- # The query above can be time consumptive on large instances until we
- # refactor how uploads are stored
- |> Repo.all(timeout: :infinity)
- # we should delete 1 object for any given attachment, but don't delete
- # files if there are more than 1 object for it
- |> Enum.reduce(%{}, fn %{
- id: id,
- data: %{
- "url" => [%{"href" => href}],
- "actor" => obj_actor,
- "name" => name
- }
- },
- acc ->
- Map.update(acc, href, %{id: id, count: 1}, fn val ->
- case obj_actor == actor and name in names do
- true ->
- # set id of the actor's object that will be deleted
- %{val | id: id, count: val.count + 1}
-
- false ->
- # another actor's object, just increase count to not delete file
- %{val | count: val.count + 1}
- end
- end)
- end)
- |> Enum.map(fn {href, %{id: id, count: count}} ->
- # only delete files that have single instance
+ {object_ids, attachment_urls} =
+ hrefs
+ |> fetch_objects
+ |> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
+ |> Enum.reduce({[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
with 1 <- count do
- href
- |> String.trim_leading("#{base_url}/#{prefix}")
- |> uploader.delete_file()
-
- {id, href}
+ {ids ++ [id], hrefs ++ [href]}
else
- _ -> {id, nil}
+ _ -> {ids ++ [id], hrefs}
end
end)
- object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
+ Pleroma.Web.MediaProxy.put_in_deleted_urls(attachment_urls)
- from(o in Object, where: o.id in ^object_ids)
- |> Repo.delete_all()
+ Enum.each(attachment_urls, fn href ->
+ href
+ |> String.trim_leading("#{base_url}/#{prefix}")
+ |> uploader.delete_file()
+ end)
- object_ids_and_hrefs
- |> Enum.filter(fn {_, href} -> not is_nil(href) end)
- |> Enum.map(&elem(&1, 1))
- |> Pleroma.Web.MediaProxy.Invalidation.purge()
+ Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
+
+ cache_purge(attachment_urls)
{:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
+
+ defp cache_purge(attachment_urls) do
+ Pleroma.Web.MediaProxy.Invalidation.purge(attachment_urls)
+ end
+
+ # we should delete 1 object for any given attachment, but don't delete
+ # files if there are more than 1 object for it
+ def prepare_objects(objects, actor, names) do
+ objects
+ |> Enum.reduce(%{}, fn %{
+ id: id,
+ data: %{
+ "url" => [%{"href" => href}],
+ "actor" => obj_actor,
+ "name" => name
+ }
+ },
+ acc ->
+ Map.update(acc, href, %{id: id, count: 1}, fn val ->
+ case obj_actor == actor and name in names do
+ true ->
+ # set id of the actor's object that will be deleted
+ %{val | id: id, count: val.count + 1}
+
+ false ->
+ # another actor's object, just increase count to not delete file
+ %{val | count: val.count + 1}
+ end
+ end)
+ end)
+ end
+
+ defp fetch_objects(hrefs) do
+ from(o in Object,
+ where:
+ fragment(
+ "to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
+ o.data,
+ o.data,
+ ^hrefs
+ )
+ )
+ # The query above can be time consumptive on large instances until we
+ # refactor how uploads are stored
+ |> Repo.all(timeout: :infinity)
+ end
end
diff --git a/test/web/media_proxy/invalidation_test.exs b/test/web/media_proxy/invalidation_test.exs
index 31b5207bf..edddc385b 100644
--- a/test/web/media_proxy/invalidation_test.exs
+++ b/test/web/media_proxy/invalidation_test.exs
@@ -12,7 +12,7 @@ defmodule Pleroma.Web.MediaProxy.InvalidationTest do
setup do: clear_config([:media_proxy])
setup do
- on_exit(fn -> Cachex.purge(:deleted_urls_cache) end)
+ on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
diff --git a/test/web/media_proxy/invalidations/http_test.exs b/test/web/media_proxy/invalidations/http_test.exs
index a3a681fd9..09e7ca0fb 100644
--- a/test/web/media_proxy/invalidations/http_test.exs
+++ b/test/web/media_proxy/invalidations/http_test.exs
@@ -6,7 +6,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
import Tesla.Mock
setup do
- on_exit(fn -> Cachex.purge(:deleted_urls_cache) end)
+ on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
diff --git a/test/web/media_proxy/invalidations/script_test.exs b/test/web/media_proxy/invalidations/script_test.exs
index d488dbff2..c69cec07a 100644
--- a/test/web/media_proxy/invalidations/script_test.exs
+++ b/test/web/media_proxy/invalidations/script_test.exs
@@ -5,7 +5,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.ScriptTest do
import ExUnit.CaptureLog
setup do
- on_exit(fn -> Cachex.purge(:deleted_urls_cache) end)
+ on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
diff --git a/test/web/media_proxy/media_proxy_controller_test.exs b/test/web/media_proxy/media_proxy_controller_test.exs
index a48310d70..2b6b25221 100644
--- a/test/web/media_proxy/media_proxy_controller_test.exs
+++ b/test/web/media_proxy/media_proxy_controller_test.exs
@@ -11,7 +11,7 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base])
setup do
- on_exit(fn -> Cachex.purge(:deleted_urls_cache) end)
+ on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end