Compare commits

...

2 Commits

Author SHA1 Message Date
Shadowfacts eefa65ed56
Add feed filters 2019-03-14 21:42:02 -04:00
Shadowfacts 27cf787d52
Add tombstone items 2019-03-14 19:48:46 -04:00
5 changed files with 110 additions and 20 deletions

View File

@ -0,0 +1,33 @@
defmodule Frenzy.FilterEngine do
def matches?(item, filter) do
score =
filter.rules
|> Enum.map(fn rule -> score(item, rule) end)
|> Enum.sum()
score >= filter.score
end
def score(item, rule) do
prop_value = get_property(item, rule.property)
if matches(prop_value, rule.mode, rule.param) do
rule.weight
else
0
end
end
def matches(value, "contains_string", param) do
String.contains?(value, param)
end
def matches(value, "matches_regex", param) do
regex = Regex.compile(param)
String.match?(value, regex)
end
def get_property(item, "url"), do: item.url
def get_property(item, "title"), do: item.title
def get_property(item, "author"), do: item.author
end

View File

@ -25,7 +25,8 @@ defmodule Frenzy.Item do
field :read, :boolean, default: false field :read, :boolean, default: false
field :read_date, :utc_datetime field :read_date, :utc_datetime
field :title, :string field :title, :string
field :tombstone, :boolean, default: false
belongs_to :feed, Frenzy.Feed belongs_to :feed, Frenzy.Feed
timestamps() timestamps()
@ -34,7 +35,7 @@ defmodule Frenzy.Item do
@doc false @doc false
def changeset(item, attrs) do def changeset(item, attrs) do
item item
|> cast(attrs, [:guid, :title, :url, :creator, :date, :content, :read, :read_date]) |> cast(attrs, [:guid, :title, :url, :creator, :date, :content, :read, :read_date, :tombstone])
|> validate_required([:guid, :title, :url, :date, :content, :feed]) |> validate_required([:guid, :title, :url, :date, :content, :feed])
end end
end end

View File

@ -1,6 +1,6 @@
defmodule Frenzy.UpdateFeeds do defmodule Frenzy.UpdateFeeds do
use GenServer use GenServer
alias Frenzy.{Repo, Feed, Item} alias Frenzy.{Repo, Feed, Item, FilterEngine}
import Ecto.Query import Ecto.Query
require Logger require Logger
@ -32,22 +32,33 @@ defmodule Frenzy.UpdateFeeds do
defp schedule_update() do defp schedule_update() do
# 15 minutes # 15 minutes
Process.send_after(self(), :update_feeds, 15 * 60 * 1000) # Process.send_after(self(), :update_feeds, 15 * 60 * 1000)
# Process.send_after(self(), :update_feeds, 60 * 1000) # 1 minutes # 1 minutes
Process.send_after(self(), :update_feeds, 60 * 1000)
end end
defp update_feeds() do defp update_feeds() do
Logger.info("Updating all feeds") Logger.info("Updating all feeds")
Enum.map(Repo.all(Feed), &update_feed/1) Repo.all(from Feed, preload: [:filter])
|> Enum.map(&update_feed/1)
prune_old_items() prune_old_items()
end end
defp prune_old_items() do defp prune_old_items() do
{count, _} = {count, _} =
Repo.delete_all(from i in Item, where: i.read, where: i.read_date <= from_now(-1, "week")) from(i in Item,
where: i.read and not i.tombstone,
# where: i.read_date <= from_now(-1, "week"),
where: i.read_date <= from_now(-1, "minute"),
update: [
set: [tombstone: true, content: nil, creator: nil, date: nil, url: nil, title: nil]
]
)
|> Repo.update_all([])
Logger.info("Removed #{count} read items") Logger.info("Converted #{count} read items to tombstones")
end end
defp update_feed(feed) do defp update_feed(feed) do
@ -78,7 +89,7 @@ defmodule Frenzy.UpdateFeeds do
Repo.update(changeset) Repo.update(changeset)
feed = Repo.preload(feed, :items) feed = Repo.preload(feed, items: [], filter: [:rules])
Enum.map(rss.items, fn entry -> Enum.map(rss.items, fn entry ->
# todo: use Repo.exists for this # todo: use Repo.exists for this
@ -89,8 +100,7 @@ defmodule Frenzy.UpdateFeeds do
end end
defp create_item(feed, entry) do defp create_item(feed, entry) do
Logger.debug("Creating item for:") Logger.debug("Creating item for #{entry.url}")
IO.inspect(entry)
url = get_real_url(entry) url = get_real_url(entry)
@ -104,15 +114,43 @@ defmodule Frenzy.UpdateFeeds do
entry.description entry.description
end end
item_params = %{
guid: entry.id,
title: entry.title,
url: url,
date: parse_date(entry.published_at),
creator: "",
content: content
}
result =
if feed.filter_enabled do
case {feed.filter.mode, FilterEngine.matches?(item_params, feed.filter)} do
{"accept", true} ->
:store
{"reject", false} ->
:store
_ ->
Logger.debug("Skipping item #{url} due to feed filter")
:tombstone
end
else
:store
end
changeset = changeset =
Ecto.build_assoc(feed, :items, %{ case result do
guid: entry.id, :store ->
title: entry.title, Ecto.build_assoc(feed, :items, item_params)
url: url,
date: parse_date(entry.published_at), :tombstone ->
creator: "", Ecto.build_assoc(feed, :items, %{
content: content guid: entry.id,
}) tombstone: true
})
end
Repo.insert(changeset) Repo.insert(changeset)
end end

View File

@ -11,7 +11,7 @@ defmodule FrenzyWeb.FeedController do
def show(conn, %{"id" => id}) do def show(conn, %{"id" => id}) do
feed = Repo.get(Feed, id) |> Repo.preload(:filter) feed = Repo.get(Feed, id) |> Repo.preload(:filter)
items = Repo.all(from Item, where: [feed_id: ^id], order_by: [desc: :date]) items = Repo.all(from Item, where: [feed_id: ^id, tombstone: false], order_by: [desc: :date])
render(conn, "show.html", %{ render(conn, "show.html", %{
feed: feed, feed: feed,
@ -39,6 +39,15 @@ defmodule FrenzyWeb.FeedController do
) )
{:ok, feed} = Repo.insert(changeset) {:ok, feed} = Repo.insert(changeset)
# changeset =
# Ecto.build_assoc(feed, :filter, %{
# mode: "reject",
# score: 0
# })
# {:ok, _} = Repo.insert(changeset)
redirect(conn, to: Routes.feed_path(Endpoint, :index)) redirect(conn, to: Routes.feed_path(Endpoint, :index))
end end

View File

@ -0,0 +1,9 @@
defmodule Frenzy.Repo.Migrations.ItemsAddTombstone do
use Ecto.Migration
def change do
alter table(:items) do
add :tombstone, :boolean
end
end
end