frenzy/lib/frenzy_web/controllers/fervor/items_controller.ex

173 lines
4.2 KiB
Elixir
Raw Normal View History

2019-03-31 14:52:56 +00:00
defmodule FrenzyWeb.Fervor.ItemsController do
use FrenzyWeb, :controller
2022-01-12 23:01:43 +00:00
alias Frenzy.{Repo, Item, Group, Feed}
2019-03-31 14:52:56 +00:00
import Ecto.Query
alias FrenzyWeb.Fervor.Paginator
plug :get_specific_item
2022-01-12 23:01:43 +00:00
def get_specific_item(%Plug.Conn{path_params: %{"id" => id}} = conn, _opts) when id != "sync" do
user = conn.assigns[:user] |> Repo.preload(:feeds)
2019-03-31 14:52:56 +00:00
item = Repo.get(Item, id)
if Enum.any?(user.feeds, fn f -> f.id == item.feed_id end) do
2019-03-31 14:52:56 +00:00
assign(conn, :item, item)
else
conn
|> put_status(404)
|> json(%{error: "Unknown item"})
|> halt()
end
end
def get_specific_item(conn, _opts), do: conn
def items_list(conn, params) do
user = conn.assigns[:user] |> Repo.preload(groups: [:feeds])
feed_ids =
user.groups
|> Enum.flat_map(fn g -> g.feeds end)
|> Enum.map(fn f -> f.id end)
query = from(i in Item, where: i.feed_id in ^feed_ids)
query =
case Map.get(params, "only") do
"read" -> from(i in query, where: i.read)
"unread" -> from(i in query, where: not i.read)
nil -> query
end
|> Paginator.paginate(params)
|> Paginator.limit(params)
items =
query
|> Repo.all()
|> Enum.map(&Item.to_fervor/1)
json(conn, items)
end
def specific_item(conn, _params) do
item = conn.assigns[:item]
json(conn, Item.to_fervor(item))
end
defp mark_item(conn, changes) do
2019-03-31 14:52:56 +00:00
item = conn.assigns[:item] |> Repo.preload(:feed)
changeset = Item.changeset(item, changes)
if changeset.valid? do
{:ok, item} = Repo.update(changeset)
2019-03-31 14:52:56 +00:00
json(conn, Item.to_fervor(item))
else
json(conn, Item.to_fervor(item))
end
2019-03-31 14:52:56 +00:00
end
def read_specific_item(conn, _params) do
mark_item(conn, %{read: true})
end
def unread_specific_item(conn, _params) do
mark_item(conn, %{read: false})
end
defp mark_multiple_items(conn, %{"ids" => ids}, changes) do
2019-03-31 14:52:56 +00:00
user = conn.assigns[:user] |> Repo.preload(groups: [:feeds])
feeds = Enum.flat_map(user.groups, fn g -> g.feeds end)
read_ids =
ids
|> String.split(",")
|> Enum.map(fn s ->
{id, _} =
s
|> String.trim()
|> Integer.parse()
Repo.get(Item, id)
end)
|> Enum.filter(fn item ->
Enum.any?(feeds, fn f -> f.id == item.feed_id end) && !item.tombstone
2019-03-31 14:52:56 +00:00
end)
|> Enum.map(fn item ->
item = Repo.preload(item, :feed)
changeset = Item.changeset(item, changes)
case Repo.update(changeset) do
{:ok, item} -> item.id
_ -> nil
end
end)
|> Enum.reject(&is_nil/1)
json(conn, read_ids)
end
defp mark_multiple_items(conn, _params, _changes) do
2019-03-31 14:52:56 +00:00
conn
|> put_status(400)
|> json(%{error: "No items provided."})
end
def read_multiple(conn, params) do
mark_multiple_items(conn, params, %{read: true})
end
def unread_multiple(conn, params) do
mark_multiple_items(conn, params, %{read: false})
end
2022-01-12 23:01:43 +00:00
def sync(conn, params) do
sync_timestamp = Timex.now()
feed_ids =
Group
|> where([g], g.user_id == ^conn.assigns.user.id)
|> join(:inner, [g], f in Feed, on: f.group_id == g.id)
|> select([g, f], f.id)
|> Repo.all()
last_sync =
with s when is_binary(s) <- Map.get(params, "last_sync"),
2022-09-16 21:15:21 +00:00
{:ok, datetime} <- Timex.parse(s, "{ISO:Extended:Z}") do
datetime
else
_ ->
nil
2022-01-12 23:01:43 +00:00
end
{deleted_ids, upserted} =
case last_sync do
nil ->
2022-09-16 20:39:08 +00:00
items =
Item
|> where([i], not i.tombstone and i.feed_id in ^feed_ids)
|> order_by([i], desc: i.inserted_at)
|> limit(1000)
|> Repo.all()
2022-01-12 23:01:43 +00:00
{[], items}
_ ->
all_items =
Repo.all(from i in Item, where: i.feed_id in ^feed_ids and i.updated_at >= ^last_sync)
{tombstones, rest} = Enum.split_with(all_items, & &1.tombstone)
{Enum.map(tombstones, & &1.id), rest}
end
json(conn, %{
sync_timestamp: Timex.format!(sync_timestamp, "{ISO:Extended:Z}"),
delete: Enum.map(deleted_ids, &to_string/1),
upsert: Enum.map(upserted, &Item.to_fervor/1)
})
end
2019-03-31 14:52:56 +00:00
end