Add option to convert images in article content to data URIs
This commit is contained in:
parent
98a182986c
commit
3bc37952d1
|
@ -5,7 +5,7 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
|
|
||||||
@impl Stage
|
@impl Stage
|
||||||
def apply(opts, %{url: url} = item_params) do
|
def apply(opts, %{url: url} = item_params) do
|
||||||
case get_article_content(url, opts["extractor"]) do
|
case get_article_content(url, opts) do
|
||||||
{:ok, content} ->
|
{:ok, content} ->
|
||||||
{:ok, %{item_params | content: content}}
|
{:ok, %{item_params | content: content}}
|
||||||
|
|
||||||
|
@ -18,24 +18,36 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
@impl Stage
|
@impl Stage
|
||||||
def validate_opts(opts) when is_map(opts) do
|
def validate_opts(opts) when is_map(opts) do
|
||||||
# todo: figure out why this errors when an empty map is provided
|
# todo: figure out why this errors when an empty map is provided
|
||||||
case opts["extractor"] do
|
opts =
|
||||||
|
case opts["extractor"] do
|
||||||
|
nil ->
|
||||||
|
{:ok, %{opts | extractor: "builtin"}}
|
||||||
|
|
||||||
|
extractor when not is_binary(extractor) ->
|
||||||
|
{:error, "extractor must be a string"}
|
||||||
|
|
||||||
|
"builtin" ->
|
||||||
|
{:ok, opts}
|
||||||
|
|
||||||
|
extractor ->
|
||||||
|
try do
|
||||||
|
String.to_existing_atom("Elixir." <> extractor)
|
||||||
|
{:ok, opts}
|
||||||
|
rescue
|
||||||
|
ArgumentError ->
|
||||||
|
{:error, "extractor must be \"builtin\" or a module that exists"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
case opts["convert_to_data_uris"] do
|
||||||
nil ->
|
nil ->
|
||||||
{:ok, %{opts | extractor: "builtin"}}
|
{:ok, %{opts | convert_to_data_uris: true}}
|
||||||
|
|
||||||
extractor when not is_binary(extractor) ->
|
value when is_boolean(value) ->
|
||||||
{:error, "extractor must be a string"}
|
|
||||||
|
|
||||||
"builtin" ->
|
|
||||||
{:ok, opts}
|
{:ok, opts}
|
||||||
|
|
||||||
extractor ->
|
_ ->
|
||||||
try do
|
{:error, "convert_to_data_uris must be a boolean"}
|
||||||
String.to_existing_atom("Elixir." <> extractor)
|
|
||||||
{:ok, opts}
|
|
||||||
rescue
|
|
||||||
ArgumentError ->
|
|
||||||
{:error, "extractor must be \"builtin\" or a module that exists"}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -43,14 +55,14 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
def validate_opts(_), do: {:error, "options must be a map"}
|
def validate_opts(_), do: {:error, "options must be a map"}
|
||||||
|
|
||||||
@spec get_article_content(String.t(), String.t()) :: {:ok, String.t()} | {:error, String.t()}
|
@spec get_article_content(String.t(), String.t()) :: {:ok, String.t()} | {:error, String.t()}
|
||||||
defp get_article_content(url, extractor) when is_binary(url) and url != "" do
|
defp get_article_content(url, opts) when is_binary(url) and url != "" do
|
||||||
Logger.debug("Getting article from #{url}")
|
Logger.debug("Getting article from #{url}")
|
||||||
|
|
||||||
url
|
url
|
||||||
|> HTTPoison.get()
|
|> HTTPoison.get()
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, response} ->
|
{:ok, response} ->
|
||||||
handle_response(url, response, extractor)
|
handle_response(url, response, opts)
|
||||||
|
|
||||||
{:error, %HTTPoison.Error{reason: reason}} ->
|
{:error, %HTTPoison.Error{reason: reason}} ->
|
||||||
{:error, "HTTPoison error: #{reason}"}
|
{:error, "HTTPoison error: #{reason}"}
|
||||||
|
@ -61,8 +73,8 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
|
|
||||||
@spec handle_response(String.t(), HTTPoison.Response.t(), String.t()) ::
|
@spec handle_response(String.t(), HTTPoison.Response.t(), String.t()) ::
|
||||||
{:ok, String.t()} | {:error, String.t()}
|
{:ok, String.t()} | {:error, String.t()}
|
||||||
defp handle_response(url, %HTTPoison.Response{status_code: 200, body: body}, extractor) do
|
defp handle_response(url, %HTTPoison.Response{status_code: 200, body: body}, opts) do
|
||||||
case extractor do
|
case opts["extractor"] do
|
||||||
"builtin" ->
|
"builtin" ->
|
||||||
{:ok, Readability.article(body)}
|
{:ok, Readability.article(body)}
|
||||||
|
|
||||||
|
@ -72,9 +84,15 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
end
|
end
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, html} ->
|
{:ok, html} ->
|
||||||
html = Floki.map(html, rewrite_image_urls(URI.parse(url)))
|
convert_to_data_uris =
|
||||||
|
case opts["convert_to_data_uris"] do
|
||||||
|
nil -> true
|
||||||
|
value -> value
|
||||||
|
end
|
||||||
|
|
||||||
case extractor do
|
html = Floki.map(html, rewrite_image_urls(convert_to_data_uris, URI.parse(url)))
|
||||||
|
|
||||||
|
case opts["extractor"] do
|
||||||
"builtin" ->
|
"builtin" ->
|
||||||
{:ok, Readability.readable_html(html)}
|
{:ok, Readability.readable_html(html)}
|
||||||
|
|
||||||
|
@ -120,21 +138,54 @@ defmodule Frenzy.Pipeline.ScrapeStage do
|
||||||
# Generates a helper function for the article with the given URI that takes an HTML element and,
|
# Generates a helper function for the article with the given URI that takes an HTML element and,
|
||||||
# if it's an <img> element whose src attribute does not have a hostname, adds the hostname and
|
# if it's an <img> element whose src attribute does not have a hostname, adds the hostname and
|
||||||
# scheme to the element.
|
# scheme to the element.
|
||||||
defp rewrite_image_urls(%URI{host: host, scheme: scheme}) do
|
defp rewrite_image_urls(convert_to_data_uris, %URI{host: host, scheme: scheme}) do
|
||||||
fn
|
fn
|
||||||
{"img", [{"src", src} | attrs]} = elem ->
|
{"img", attrs} ->
|
||||||
case URI.parse(src) do
|
new_attrs =
|
||||||
%URI{host: nil, path: path} ->
|
Enum.map(attrs, fn
|
||||||
new_src = URI.to_string(%URI{path: path, host: host, scheme: scheme})
|
{"src", src} ->
|
||||||
|
case URI.parse(src) do
|
||||||
|
%URI{host: nil, path: path} ->
|
||||||
|
new_src =
|
||||||
|
URI.to_string(%URI{path: path, host: host, scheme: scheme})
|
||||||
|
|> image_to_data_uri(convert_to_data_uris)
|
||||||
|
|
||||||
{"img", [{"src", new_src} | attrs]}
|
{"src", new_src}
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
elem
|
{"src", image_to_data_uri(convert_to_data_uris, src)}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
attr ->
|
||||||
|
attr
|
||||||
|
end)
|
||||||
|
|
||||||
|
{"img", new_attrs}
|
||||||
|
|
||||||
elem ->
|
elem ->
|
||||||
elem
|
elem
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@content_type_allowlist ["image/jpeg", "image/png", "image/heic", "image/heif", "image/tiff"]
|
||||||
|
|
||||||
|
# convert images to data URIs so that they're stored by clients as part of the body
|
||||||
|
defp image_to_data_uri(true, src) do
|
||||||
|
case HTTPoison.get(src) do
|
||||||
|
{:ok, %HTTPoison.Response{status_code: 200, body: body, headers: headers}} ->
|
||||||
|
{"Content-Type", content_type} =
|
||||||
|
Enum.find(headers, fn {header, _value} -> header == "Content-Type" end)
|
||||||
|
|
||||||
|
if content_type in @content_type_allowlist do
|
||||||
|
"data:#{content_type};base64,#{Base.encode64(body)}"
|
||||||
|
else
|
||||||
|
src
|
||||||
|
end
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
src
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp image_to_data_uri(false, src), do: src
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue