Compare commits

...

3 Commits

7 changed files with 179 additions and 4 deletions

View File

@ -31,6 +31,7 @@ config :phoenix, :json_library, Jason
config :logger, truncate: :infinity
config :frenzy, sentry_enabled: false
config :frenzy, external_readability: false
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.

View File

@ -16,7 +16,8 @@ defmodule Frenzy.Application do
FrenzyWeb.Endpoint,
# Starts a worker by calling: Frenzy.Worker.start_link(arg)
# {Frenzy.Worker, arg},
{Frenzy.UpdateFeeds, name: Frenzy.UpdateFeeds}
{Frenzy.UpdateFeeds, name: Frenzy.UpdateFeeds},
{Frenzy.BuiltinExtractor, name: Frenzy.BuiltinExtractor}
]
# See https://hexdocs.pm/elixir/Supervisor.html

View File

@ -0,0 +1,76 @@
defmodule Frenzy.BuiltinExtractor do
use GenServer
alias Frenzy.Network
require Logger
@external_url Application.get_env(:frenzy, :external_readability_url)
def start_link(state) do
GenServer.start_link(__MODULE__, :ok, state)
end
@spec article(String.t(), String.t()) :: Floki.html_tree()
def article(url, html) do
GenServer.call(__MODULE__, {:article, url, html})
end
def init(_state) do
use_external = Application.get_env(:frenzy, :external_readability)
use_external =
if use_external do
uri = URI.parse(@external_url)
uri = %URI{uri | path: "/status"}
uri = URI.to_string(uri)
case Network.http_get(uri) do
{:ok, %Tesla.Env{status: 200}} ->
true
_ ->
Logger.warn("Could not reach external readability for healthcheck, disabling")
false
end
else
false
end
{:ok, use_external}
end
def handle_call({:article, url, html}, _from, state) do
# the genserver state is a boolean telling us whether to use the external readability
if state do
uri = URI.parse(@external_url)
uri = %URI{uri | path: "/readability", query: URI.encode_query(url: url)}
uri = URI.to_string(uri)
Logger.debug("Sending external readability request: #{uri}")
case Network.http_post(uri, html, headers: [{"content-type", "text/html"}]) do
{:ok, %Tesla.Env{status: 200, body: body}} ->
{:reply, Floki.parse(body), state}
{:ok, %Tesla.Env{status: status}} ->
Logger.error("External readability failed, got HTTP #{status}")
if Frenzy.sentry_enabled?() do
Sentry.capture_message("External readability failed, got HTTP #{status}")
end
{:reply, Readability.article(html), state}
{:error, reason} ->
Logger.error("External readability failed: #{inspect(reason)}")
if Frenzy.sentry_enabled?() do
Sentry.capture_message("External readability failed: #{inspect(reason)}")
end
{:reply, Readability.article(html), state}
end
else
{:reply, Readability.article(html), state}
end
end
end

View File

@ -30,6 +30,15 @@ defmodule Frenzy.Network do
HTTP.get(url)
end
@spec http_post(String.t(), Tesla.Env.body(), [Tesla.option()]) :: Tesla.Env.result()
def http_post(url, body, options \\ []) do
if Frenzy.sentry_enabled?() do
Sentry.Context.add_breadcrumb(%{category: "http_get", message: url})
end
HTTP.post(url, body, options)
end
# @http_redirect_codes [301, 302]
# @spec http_get(String.t()) :: {:ok, HTTPoison.Response.t()} | {:error, term()}

View File

@ -0,0 +1,74 @@
defmodule Frenzy.Pipeline.Extractor.ArsTechnica do
@moduledoc """
Extractor for https://arstechnica.com
Handles multi-page articles
"""
require Logger
alias Frenzy.Network
alias Frenzy.Pipeline.Extractor
@behaviour Extractor
@impl Extractor
def extract(html_tree) do
case get_pages_from_tree(html_tree) do
{:error, _} = err -> err
content -> {:ok, content}
end
end
defp get_pages_from_tree(tree) do
with [article | _] <- Floki.find(tree, ~s([itemtype="http://schema.org/NewsArticle"])),
[content | _] <- Floki.find(article, ~s([itemprop=articleBody])) do
content = clean_content(content)
next_page_url =
with [next | _] <- Floki.find(article, ".page-numbers a:last-of-type"),
"Next" <> _ <- Floki.text(next),
[href] <- Floki.attribute(next, "href") do
href
else
_ ->
nil
end
if next_page_url != nil do
with body when not is_nil(body) <- fetch_page(next_page_url),
next_pages when is_list(next_pages) <- get_pages_from_tree(Floki.parse(body)) do
[content] ++ next_pages
else
_ ->
[
content,
{"p", [], [{"em", [], ["Article truncated, unable to scrape subsequent pages"]}]}
]
end
else
[content]
end
else
_ -> {:error, "no matching elements"}
end
end
defp clean_content(tree) do
Floki.filter_out(tree, ".social-left, .story-sidebar, .ad_wrapper, figcaption .enlarge-link")
end
defp fetch_page(url) do
Logger.debug("Getting Ars Technica page from #{url}")
case Network.http_get(url) do
{:ok, %Tesla.Env{status: code, body: body}} when code in 200..299 ->
body
{:ok, %Tesla.Env{status: code}} ->
Logger.warn("Unexpected HTTP code #{code} getting Ars Technica page #{url}")
nil
{:error, reason} ->
Logger.error("Couldn't get Ars Technica page #{url}: #{inspect(reason)}")
nil
end
end
end

View File

@ -1,6 +1,7 @@
defmodule Frenzy.Pipeline.ScrapeStage do
require Logger
alias Frenzy.Network
alias Frenzy.BuiltinExtractor
alias Frenzy.Pipeline.Stage
@behaviour Stage
@ -88,13 +89,25 @@ defmodule Frenzy.Pipeline.ScrapeStage do
defp handle_response(url, %Tesla.Env{body: body}, opts) do
case opts["extractor"] do
"builtin" ->
{:ok, Readability.article(body)}
{:ok, BuiltinExtractor.article(url, body), true}
module_name ->
html_tree = Floki.parse(body)
try do
apply(String.to_existing_atom("Elixir." <> module_name), :extract, [html_tree])
|> case do
{:ok, content} ->
# non-builtin extractors go through readable_html to cleanup any bad/untrusted html
# this is what Floki.readable_html without turning back into a string
content =
Readability.Helper.remove_attrs(content, Readability.regexes(:protect_attrs))
{:ok, content}
err ->
err
end
rescue
e ->
Logger.error(
@ -110,7 +123,7 @@ defmodule Frenzy.Pipeline.ScrapeStage do
)
end
{:ok, Readability.article(body)}
{:ok, BuiltinExtractor.article(url, body), true}
end
end
|> case do
@ -123,7 +136,7 @@ defmodule Frenzy.Pipeline.ScrapeStage do
html = Floki.map(html, rewrite_image_urls(convert_to_data_uris, URI.parse(url)))
{:ok, Readability.readable_html(html)}
{:ok, Floki.raw_html(html)}
res ->
res

View File

@ -4,6 +4,7 @@ defmodule FrenzyWeb.ConfigureStage.ScrapeStageLive do
@extractors [
{"Builtin", "builtin"},
{"512 Pixels", Frenzy.Pipeline.Extractor.FiveTwelvePixels},
{"Ars Technica", Frenzy.Pipeline.Extractor.ArsTechnica},
{"beckyhansmeyer.com", Frenzy.Pipeline.Extractor.BeckyHansmeyer},
{"daringfireball.net", Frenzy.Pipeline.Extractor.DaringFireball},
{"ericasadun.com", Frenzy.Pipeline.Extractor.EricaSadun},