aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorlain <lain@soykaf.club>2019-02-09 22:01:08 +0100
committerlain <lain@soykaf.club>2019-02-10 16:52:42 +0100
commit8a270b438c993288853bad94be1daf39f7675e5c (patch)
treeab26d304e36c01de054091320c03eb9214e1f7e1
parent1aebe8c6d94e8e49e96df16809a70c71d977310b (diff)
downloadpleroma-8a270b438c993288853bad94be1daf39f7675e5c.tar.gz
Do object insertion through Cachex
So we don't flood our postgres logs with errors. Should also make things slightly faster.
-rw-r--r--lib/pleroma/object.ex22
-rw-r--r--lib/pleroma/web/activity_pub/utils.ex10
-rw-r--r--lib/pleroma/web/twitter_api/twitter_api.ex12
-rw-r--r--test/object_test.exs28
4 files changed, 53 insertions, 19 deletions
diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex
index 7b46a3b05..96079cf22 100644
--- a/lib/pleroma/object.ex
+++ b/lib/pleroma/object.ex
@@ -13,9 +13,29 @@ defmodule Pleroma.Object do
timestamps()
end
+ def insert_or_get(cng) do
+ {_, data} = fetch_field(cng, :data)
+ id = data["id"] || data[:id]
+ key = "object:#{id}"
+
+ fetcher = fn _ ->
+ with nil <- get_by_ap_id(id),
+ {:ok, object} <- Repo.insert(cng) do
+ {:commit, object}
+ else
+ %Object{} = object -> {:commit, object}
+ e -> {:ignore, e}
+ end
+ end
+
+ with {state, object} when state in [:commit, :ok] <- Cachex.fetch(:object_cache, key, fetcher) do
+ {:ok, object}
+ end
+ end
+
def create(data) do
Object.change(%Object{}, %{data: data})
- |> Repo.insert()
+ |> insert_or_get()
end
def change(struct, params \\ %{}) do
diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex
index 4a2cc6738..134701e80 100644
--- a/lib/pleroma/web/activity_pub/utils.ex
+++ b/lib/pleroma/web/activity_pub/utils.ex
@@ -134,14 +134,8 @@ defmodule Pleroma.Web.ActivityPub.Utils do
context = context || generate_id("contexts")
changeset = Object.context_mapping(context)
- case Repo.insert(changeset) do
- {:ok, object} ->
- object
-
- # This should be solved by an upsert, but it seems ecto
- # has problems accessing the constraint inside the jsonb.
- {:error, _} ->
- Object.get_cached_by_ap_id(context)
+ with {:ok, object} <- Object.insert_or_get(changeset) do
+ object
end
end
diff --git a/lib/pleroma/web/twitter_api/twitter_api.ex b/lib/pleroma/web/twitter_api/twitter_api.ex
index 7d00c01a1..ddd5c5cfb 100644
--- a/lib/pleroma/web/twitter_api/twitter_api.ex
+++ b/lib/pleroma/web/twitter_api/twitter_api.ex
@@ -305,16 +305,8 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
else
_e ->
changeset = Object.context_mapping(context)
-
- case Repo.insert(changeset) do
- {:ok, %{id: id}} ->
- id
-
- # This should be solved by an upsert, but it seems ecto
- # has problems accessing the constraint inside the jsonb.
- {:error, _} ->
- Object.get_cached_by_ap_id(context).id
- end
+ {:ok, object} = Object.insert_or_get(changeset)
+ object.id
end
end
diff --git a/test/object_test.exs b/test/object_test.exs
index 72194975d..ab6431012 100644
--- a/test/object_test.exs
+++ b/test/object_test.exs
@@ -57,4 +57,32 @@ defmodule Pleroma.ObjectTest do
assert cached_object.data["type"] == "Tombstone"
end
end
+
+ describe "insert_or_get" do
+ test "inserting the same object twice (by id) just returns the original object" do
+ data = %{data: %{"id" => Ecto.UUID.generate()}}
+ cng = Object.change(%Object{}, data)
+ {:ok, object} = Object.insert_or_get(cng)
+ {:ok, second_object} = Object.insert_or_get(cng)
+
+ Cachex.clear(:object_cache)
+ {:ok, third_object} = Object.insert_or_get(cng)
+
+ assert object == second_object
+ assert object == third_object
+ end
+ end
+
+ describe "create" do
+ test "inserts an object for a given data set" do
+ data = %{"id" => Ecto.UUID.generate()}
+
+ {:ok, object} = Object.create(data)
+ assert object.data["id"] == data["id"]
+
+ # Works when doing it twice.
+ {:ok, object} = Object.create(data)
+ assert object.data["id"] == data["id"]
+ end
+ end
end