Change extractors to accept/return html trees

This commit is contained in:
Shadowfacts 2019-10-31 17:12:02 -04:00
parent d476839fce
commit eec0b918e7
Signed by: shadowfacts
GPG Key ID: 94A5AB95422746E5
5 changed files with 20 additions and 19 deletions

View File

@ -1,3 +1,3 @@
defmodule Frenzy.Pipeline.Extractor do defmodule Frenzy.Pipeline.Extractor do
@callback extract(String.t()) :: {:ok, String.t()} | {:error, String.t()} @callback extract(Floki.html_tree()) :: {:ok, Floki.html_tree()} | {:error, String.t()}
end end

View File

@ -7,15 +7,13 @@ defmodule Frenzy.Pipeline.Extractor.DaringFireball do
@behaviour Extractor @behaviour Extractor
@impl Extractor @impl Extractor
def extract(body) do def extract(html_tree) do
html_tree = Floki.parse(body)
case get_article_element(html_tree) || get_link_element(html_tree) do case get_article_element(html_tree) || get_link_element(html_tree) do
nil -> nil ->
{:error, "no matching elements"} {:error, "no matching elements"}
elem -> elem ->
{:ok, Floki.raw_html(elem)} {:ok, elem}
end end
end end

View File

@ -7,18 +7,14 @@ defmodule Frenzy.Pipeline.Extractor.EricaSadun do
@behaviour Extractor @behaviour Extractor
@impl Extractor @impl Extractor
def extract(body) do def extract(html_tree) do
html_tree = Floki.parse(body)
case Floki.find(html_tree, ".post-content") do case Floki.find(html_tree, ".post-content") do
[content_elem | _] -> [content_elem | _] ->
# content element includes social media buttons and related posts # content element includes social media buttons and related posts
content = {
content_elem :ok,
|> Floki.filter_out("div.sharedaddy, div#jp-relatedposts") Floki.filter_out(content_elem, "div.sharedaddy, div#jp-relatedposts")
|> Floki.raw_html() }
{:ok, content}
_ -> _ ->
{:error, "no matching elements"} {:error, "no matching elements"}

View File

@ -7,15 +7,13 @@ defmodule Frenzy.Pipeline.Extractor.WhateverScale do
@behaviour Extractor @behaviour Extractor
@impl Extractor @impl Extractor
def extract(body) do def extract(html_tree) do
html_tree = Floki.parse(body)
case get_article_content(html_tree) do case get_article_content(html_tree) do
nil -> nil ->
{:error, "no matching elements"} {:error, "no matching elements"}
elem -> elem ->
{:ok, Floki.raw_html(elem)} {:ok, elem}
end end
end end

View File

@ -68,7 +68,16 @@ defmodule Frenzy.Pipeline.ScrapeStage do
{:ok, Readability.readable_html(article)} {:ok, Readability.readable_html(article)}
module_name -> module_name ->
apply(String.to_existing_atom("Elixir." <> module_name), :extract, [body]) html_tree = Floki.parse(body)
case apply(String.to_existing_atom("Elixir." <> module_name), :extract, [html_tree]) do
{:error, _} = err ->
err
{:ok, html_tree} ->
html_tree
|> Floki.raw_html()
end
end end
end end