Update Publisher to sync only a new non-local data.

dev
KKlochko 2 months ago
parent 5d8e70d79d
commit 5ba0e4da06

@ -149,6 +149,7 @@ defmodule DecentralisedBookIndex.Metadata do
define :get_publisher_by_id, args: [:id], action: :by_id define :get_publisher_by_id, args: [:id], action: :by_id
define :search_publisher, action: :search, args: [:name] define :search_publisher, action: :search, args: [:name]
define :update_publisher, action: :update define :update_publisher, action: :update
define :assign_publisher_dbi_server, args: [:dbi_server_id], action: :assign_dbi_server_id
define :destroy_publisher, action: :destroy define :destroy_publisher, action: :destroy
end end
end end

@ -56,6 +56,10 @@ defmodule DecentralisedBookIndex.Metadata.Publisher do
accept [:name] accept [:name]
end end
update :assign_dbi_server_id do
accept [:dbi_server_id]
end
update :sync do update :sync do
accept [:name, :inserted_at, :updated_at, :dbi_server_id] accept [:name, :inserted_at, :updated_at, :dbi_server_id]
end end

@ -5,21 +5,23 @@ defmodule DecentralisedBookIndex.Sync.PublisherSync do
def create_update(attrs, server_id) do def create_update(attrs, server_id) do
case Metadata.get_publisher_by_id(attrs.id) do case Metadata.get_publisher_by_id(attrs.id) do
{:ok, publisher} -> {:ok, publisher} ->
if not is_local(publisher) and is_new(attrs, publisher) do
attrs = attrs =
attrs attrs
|> Map.delete(:id) |> Map.delete(:id)
|> Map.put(:dbi_server_id, server_id) |> Map.put_new(:dbi_server_id, server_id)
publisher publisher
|> Ash.Changeset.for_update(:sync, attrs) |> Ash.Changeset.for_update(:sync, attrs)
|> Ash.update!(authorize?: false) |> Ash.update!(authorize?: false)
end
:ok :ok
{:error, %Ash.Error.Query.NotFound{}} -> {:error, %Ash.Error.Query.NotFound{}} ->
attrs = attrs =
attrs attrs
|> Map.put(:dbi_server_id, server_id) |> Map.put_new(:dbi_server_id, server_id)
Publisher Publisher
|> Ash.Changeset.for_create(:sync_create, attrs) |> Ash.Changeset.for_create(:sync_create, attrs)
@ -28,4 +30,17 @@ defmodule DecentralisedBookIndex.Sync.PublisherSync do
:ok :ok
end end
end end
defp is_local(publisher) do
is_nil(publisher.dbi_server_id)
end
defp is_new(new_publisher, publisher) do
with {:ok, datetime, _offset} <- DateTime.from_iso8601(new_publisher[:updated_at]) do
DateTime.after?(datetime, publisher.updated_at)
else
{:error, reason} ->
false
end
end
end end

@ -29,12 +29,9 @@ defmodule DecentralisedBookIndex.Sync.DataTransformers.PublisherSyncTest do
assert :ok = PublisherSync.create_update(publisher, server.id) assert :ok = PublisherSync.create_update(publisher, server.id)
assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id) assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id)
publisher = publisher = datetime_from_iso8601_for_map(publisher)
publisher
|> Map.replace(:inserted_at, inserted_at)
|> Map.replace(:updated_at, updated_at)
assert publisher = saved_publisher assert get_submap(saved_publisher, publisher) == publisher
assert server.id == saved_publisher.dbi_server_id assert server.id == saved_publisher.dbi_server_id
end end
@ -42,19 +39,64 @@ defmodule DecentralisedBookIndex.Sync.DataTransformers.PublisherSyncTest do
server = generate(dbi_server(url: @test_server_endpoint)) server = generate(dbi_server(url: @test_server_endpoint))
{:ok, publisher} = Metadata.create_publisher("Publisher", actor: user) {:ok, publisher} = Metadata.create_publisher("Publisher", actor: user)
{:ok, publisher} = Metadata.assign_publisher_dbi_server(publisher, server.id, actor: user)
publisher_attrs = %{ publisher_attrs = %{
id: publisher.id, id: publisher.id,
name: "Publisher", name: "Publisher",
inserted_at: "2025-03-21T09:20:48.791539Z", inserted_at: "2025-03-21T09:20:48.791539Z",
updated_at: "2025-03-21T09:20:48.791539Z" updated_at: datetime_add_second_as_string(publisher.updated_at, 10),
dbi_server_id: server.id
} }
assert :ok = PublisherSync.create_update(publisher_attrs, server.id) assert :ok = PublisherSync.create_update(publisher_attrs, server.id)
assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id) assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id)
assert publisher_attrs = saved_publisher publisher_attrs = datetime_from_iso8601_for_map(publisher_attrs)
assert get_submap(saved_publisher, publisher_attrs) == publisher_attrs
assert server.id == saved_publisher.dbi_server_id assert server.id == saved_publisher.dbi_server_id
end end
test "the local existing publisher wouldn't updated", %{user: user} do
server = generate(dbi_server(url: @test_server_endpoint))
{:ok, publisher} = Metadata.create_publisher("Publisher", actor: user)
publisher_attrs = %{
id: publisher.id,
name: "Updated Publisher",
inserted_at: "2025-03-21T09:20:48.791539Z",
updated_at: "2025-03-21T09:20:48.791539Z"
}
assert :ok = PublisherSync.create_update(publisher_attrs, server.id)
assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id)
assert get_submap(saved_publisher, publisher_attrs) != publisher_attrs
assert saved_publisher.name == "Publisher"
assert saved_publisher.dbi_server_id == nil
end
test "old publisher wouldn't be sync", %{user: user} do
server = generate(dbi_server(url: @test_server_endpoint))
{:ok, publisher} = Metadata.create_publisher("Publisher", actor: user)
{:ok, publisher} = Metadata.assign_publisher_dbi_server(publisher, server.id, actor: user)
publisher_attrs = %{
id: publisher.id,
name: "Old Publisher",
inserted_at: "2022-03-21T09:20:48.791539Z",
updated_at: "2022-03-21T09:20:48.791539Z",
dbi_server_id: server.id
}
assert :ok = PublisherSync.create_update(publisher_attrs, server.id)
assert {:ok, saved_publisher} = Metadata.get_publisher_by_id(publisher.id)
assert get_submap(saved_publisher, publisher_attrs) != publisher_attrs
assert saved_publisher.name == "Publisher"
end
end end
end end

Loading…
Cancel
Save