Compare commits
No commits in common. "eefa65ed5695c567c7cf8a69b011f72266e7d3de" and "3c4210d9a6ab80ddb17c071e49a12b851152dc8c" have entirely different histories.
eefa65ed56
...
3c4210d9a6
|
@ -1,33 +0,0 @@
|
|||
defmodule Frenzy.FilterEngine do
|
||||
def matches?(item, filter) do
|
||||
score =
|
||||
filter.rules
|
||||
|> Enum.map(fn rule -> score(item, rule) end)
|
||||
|> Enum.sum()
|
||||
|
||||
score >= filter.score
|
||||
end
|
||||
|
||||
def score(item, rule) do
|
||||
prop_value = get_property(item, rule.property)
|
||||
|
||||
if matches(prop_value, rule.mode, rule.param) do
|
||||
rule.weight
|
||||
else
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
def matches(value, "contains_string", param) do
|
||||
String.contains?(value, param)
|
||||
end
|
||||
|
||||
def matches(value, "matches_regex", param) do
|
||||
regex = Regex.compile(param)
|
||||
String.match?(value, regex)
|
||||
end
|
||||
|
||||
def get_property(item, "url"), do: item.url
|
||||
def get_property(item, "title"), do: item.title
|
||||
def get_property(item, "author"), do: item.author
|
||||
end
|
|
@ -25,8 +25,7 @@ defmodule Frenzy.Item do
|
|||
field :read, :boolean, default: false
|
||||
field :read_date, :utc_datetime
|
||||
field :title, :string
|
||||
field :tombstone, :boolean, default: false
|
||||
|
||||
|
||||
belongs_to :feed, Frenzy.Feed
|
||||
|
||||
timestamps()
|
||||
|
@ -35,7 +34,7 @@ defmodule Frenzy.Item do
|
|||
@doc false
|
||||
def changeset(item, attrs) do
|
||||
item
|
||||
|> cast(attrs, [:guid, :title, :url, :creator, :date, :content, :read, :read_date, :tombstone])
|
||||
|> cast(attrs, [:guid, :title, :url, :creator, :date, :content, :read, :read_date])
|
||||
|> validate_required([:guid, :title, :url, :date, :content, :feed])
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
defmodule Frenzy.UpdateFeeds do
|
||||
use GenServer
|
||||
alias Frenzy.{Repo, Feed, Item, FilterEngine}
|
||||
alias Frenzy.{Repo, Feed, Item}
|
||||
import Ecto.Query
|
||||
require Logger
|
||||
|
||||
|
@ -32,33 +32,22 @@ defmodule Frenzy.UpdateFeeds do
|
|||
|
||||
defp schedule_update() do
|
||||
# 15 minutes
|
||||
# Process.send_after(self(), :update_feeds, 15 * 60 * 1000)
|
||||
# 1 minutes
|
||||
Process.send_after(self(), :update_feeds, 60 * 1000)
|
||||
Process.send_after(self(), :update_feeds, 15 * 60 * 1000)
|
||||
# Process.send_after(self(), :update_feeds, 60 * 1000) # 1 minutes
|
||||
end
|
||||
|
||||
defp update_feeds() do
|
||||
Logger.info("Updating all feeds")
|
||||
|
||||
Repo.all(from Feed, preload: [:filter])
|
||||
|> Enum.map(&update_feed/1)
|
||||
|
||||
Enum.map(Repo.all(Feed), &update_feed/1)
|
||||
prune_old_items()
|
||||
end
|
||||
|
||||
defp prune_old_items() do
|
||||
{count, _} =
|
||||
from(i in Item,
|
||||
where: i.read and not i.tombstone,
|
||||
# where: i.read_date <= from_now(-1, "week"),
|
||||
where: i.read_date <= from_now(-1, "minute"),
|
||||
update: [
|
||||
set: [tombstone: true, content: nil, creator: nil, date: nil, url: nil, title: nil]
|
||||
]
|
||||
)
|
||||
|> Repo.update_all([])
|
||||
Repo.delete_all(from i in Item, where: i.read, where: i.read_date <= from_now(-1, "week"))
|
||||
|
||||
Logger.info("Converted #{count} read items to tombstones")
|
||||
Logger.info("Removed #{count} read items")
|
||||
end
|
||||
|
||||
defp update_feed(feed) do
|
||||
|
@ -89,7 +78,7 @@ defmodule Frenzy.UpdateFeeds do
|
|||
|
||||
Repo.update(changeset)
|
||||
|
||||
feed = Repo.preload(feed, items: [], filter: [:rules])
|
||||
feed = Repo.preload(feed, :items)
|
||||
|
||||
Enum.map(rss.items, fn entry ->
|
||||
# todo: use Repo.exists for this
|
||||
|
@ -100,7 +89,8 @@ defmodule Frenzy.UpdateFeeds do
|
|||
end
|
||||
|
||||
defp create_item(feed, entry) do
|
||||
Logger.debug("Creating item for #{entry.url}")
|
||||
Logger.debug("Creating item for:")
|
||||
IO.inspect(entry)
|
||||
|
||||
url = get_real_url(entry)
|
||||
|
||||
|
@ -114,43 +104,15 @@ defmodule Frenzy.UpdateFeeds do
|
|||
entry.description
|
||||
end
|
||||
|
||||
item_params = %{
|
||||
guid: entry.id,
|
||||
title: entry.title,
|
||||
url: url,
|
||||
date: parse_date(entry.published_at),
|
||||
creator: "",
|
||||
content: content
|
||||
}
|
||||
|
||||
result =
|
||||
if feed.filter_enabled do
|
||||
case {feed.filter.mode, FilterEngine.matches?(item_params, feed.filter)} do
|
||||
{"accept", true} ->
|
||||
:store
|
||||
|
||||
{"reject", false} ->
|
||||
:store
|
||||
|
||||
_ ->
|
||||
Logger.debug("Skipping item #{url} due to feed filter")
|
||||
:tombstone
|
||||
end
|
||||
else
|
||||
:store
|
||||
end
|
||||
|
||||
changeset =
|
||||
case result do
|
||||
:store ->
|
||||
Ecto.build_assoc(feed, :items, item_params)
|
||||
|
||||
:tombstone ->
|
||||
Ecto.build_assoc(feed, :items, %{
|
||||
guid: entry.id,
|
||||
tombstone: true
|
||||
})
|
||||
end
|
||||
Ecto.build_assoc(feed, :items, %{
|
||||
guid: entry.id,
|
||||
title: entry.title,
|
||||
url: url,
|
||||
date: parse_date(entry.published_at),
|
||||
creator: "",
|
||||
content: content
|
||||
})
|
||||
|
||||
Repo.insert(changeset)
|
||||
end
|
||||
|
|
|
@ -11,7 +11,7 @@ defmodule FrenzyWeb.FeedController do
|
|||
|
||||
def show(conn, %{"id" => id}) do
|
||||
feed = Repo.get(Feed, id) |> Repo.preload(:filter)
|
||||
items = Repo.all(from Item, where: [feed_id: ^id, tombstone: false], order_by: [desc: :date])
|
||||
items = Repo.all(from Item, where: [feed_id: ^id], order_by: [desc: :date])
|
||||
|
||||
render(conn, "show.html", %{
|
||||
feed: feed,
|
||||
|
@ -39,15 +39,6 @@ defmodule FrenzyWeb.FeedController do
|
|||
)
|
||||
|
||||
{:ok, feed} = Repo.insert(changeset)
|
||||
|
||||
# changeset =
|
||||
# Ecto.build_assoc(feed, :filter, %{
|
||||
# mode: "reject",
|
||||
# score: 0
|
||||
# })
|
||||
|
||||
# {:ok, _} = Repo.insert(changeset)
|
||||
|
||||
redirect(conn, to: Routes.feed_path(Endpoint, :index))
|
||||
end
|
||||
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
defmodule Frenzy.Repo.Migrations.ItemsAddTombstone do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
alter table(:items) do
|
||||
add :tombstone, :boolean
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue