mirror of
https://gitlab.com/news-flash/article_scraper.git
synced 2025-07-08 00:19:59 +02:00
load config files in background thread
This commit is contained in:
parent
2cac8a2678
commit
f570873aba
8 changed files with 480 additions and 326 deletions
|
@ -1,13 +1,10 @@
|
||||||
use std;
|
use crate::error::{ScraperError, ScraperErrorKind};
|
||||||
use url::Url;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use chrono::NaiveDateTime;
|
use chrono::NaiveDateTime;
|
||||||
use crate::error::{
|
|
||||||
ScraperError,
|
|
||||||
ScraperErrorKind,
|
|
||||||
};
|
|
||||||
use std::io::Write;
|
|
||||||
use failure::ResultExt;
|
use failure::ResultExt;
|
||||||
|
use std;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
pub struct Article {
|
pub struct Article {
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
|
@ -19,7 +16,6 @@ pub struct Article {
|
||||||
|
|
||||||
impl Article {
|
impl Article {
|
||||||
pub fn save_html(&self, path: &PathBuf) -> Result<(), ScraperError> {
|
pub fn save_html(&self, path: &PathBuf) -> Result<(), ScraperError> {
|
||||||
|
|
||||||
if let Some(ref html) = self.html {
|
if let Some(ref html) = self.html {
|
||||||
if let Ok(()) = std::fs::create_dir_all(&path) {
|
if let Ok(()) = std::fs::create_dir_all(&path) {
|
||||||
let mut file_name = match self.title.clone() {
|
let mut file_name = match self.title.clone() {
|
||||||
|
@ -29,12 +25,13 @@ impl Article {
|
||||||
file_name.push_str(".html");
|
file_name.push_str(".html");
|
||||||
let path = path.join(file_name);
|
let path = path.join(file_name);
|
||||||
let mut html_file = std::fs::File::create(&path).context(ScraperErrorKind::IO)?;
|
let mut html_file = std::fs::File::create(&path).context(ScraperErrorKind::IO)?;
|
||||||
html_file.write_all(html.as_bytes()).context(ScraperErrorKind::IO)?;
|
html_file
|
||||||
return Ok(())
|
.write_all(html.as_bytes())
|
||||||
|
.context(ScraperErrorKind::IO)?;
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ScraperErrorKind::Unknown)?
|
Err(ScraperErrorKind::Unknown)?
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use failure::{Context, Fail, Backtrace, Error};
|
use failure::{Backtrace, Context, Error, Fail};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -40,7 +40,9 @@ impl fmt::Display for ConfigError {
|
||||||
|
|
||||||
impl From<ConfigErrorKind> for ConfigError {
|
impl From<ConfigErrorKind> for ConfigError {
|
||||||
fn from(kind: ConfigErrorKind) -> ConfigError {
|
fn from(kind: ConfigErrorKind) -> ConfigError {
|
||||||
ConfigError { inner: Context::new(kind) }
|
ConfigError {
|
||||||
|
inner: Context::new(kind),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,6 +54,8 @@ impl From<Context<ConfigErrorKind>> for ConfigError {
|
||||||
|
|
||||||
impl From<Error> for ConfigError {
|
impl From<Error> for ConfigError {
|
||||||
fn from(_: Error) -> ConfigError {
|
fn from(_: Error) -> ConfigError {
|
||||||
ConfigError { inner: Context::new(ConfigErrorKind::Unknown) }
|
ConfigError {
|
||||||
|
inner: Context::new(ConfigErrorKind::Unknown),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,43 +1,43 @@
|
||||||
macro_rules! extract_vec_multi {
|
macro_rules! extract_vec_multi {
|
||||||
(
|
(
|
||||||
$line: ident,
|
$line: ident,
|
||||||
$identifier: ident,
|
$identifier: ident,
|
||||||
$vector: ident
|
$vector: ident
|
||||||
) => {
|
) => {
|
||||||
if $line.starts_with($identifier) {
|
if $line.starts_with($identifier) {
|
||||||
let value = GrabberConfig::extract_value($identifier, $line);
|
let value = GrabberConfig::extract_value($identifier, $line);
|
||||||
let value = GrabberConfig::split_values(value);
|
let value = GrabberConfig::split_values(value);
|
||||||
let value: Vec<String> = value.iter().map(|s| s.trim().to_string()).collect();
|
let value: Vec<String> = value.iter().map(|s| s.trim().to_string()).collect();
|
||||||
$vector.extend(value);
|
$vector.extend(value);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! extract_vec_single {
|
macro_rules! extract_vec_single {
|
||||||
(
|
(
|
||||||
$line: ident,
|
$line: ident,
|
||||||
$identifier: ident,
|
$identifier: ident,
|
||||||
$vector: ident
|
$vector: ident
|
||||||
) => {
|
) => {
|
||||||
if $line.starts_with($identifier) {
|
if $line.starts_with($identifier) {
|
||||||
let value = GrabberConfig::extract_value($identifier, $line);
|
let value = GrabberConfig::extract_value($identifier, $line);
|
||||||
$vector.push(value.to_string());
|
$vector.push(value.to_string());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! extract_option_single {
|
macro_rules! extract_option_single {
|
||||||
(
|
(
|
||||||
$line: ident,
|
$line: ident,
|
||||||
$identifier: ident,
|
$identifier: ident,
|
||||||
$option: ident
|
$option: ident
|
||||||
) => {
|
) => {
|
||||||
if $line.starts_with($identifier) {
|
if $line.starts_with($identifier) {
|
||||||
let value = GrabberConfig::extract_value($identifier, $line);
|
let value = GrabberConfig::extract_value($identifier, $line);
|
||||||
$option = Some(value.to_string());
|
$option = Some(value.to_string());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
|
use self::error::{ConfigError, ConfigErrorKind};
|
||||||
|
use failure::ResultExt;
|
||||||
|
use log::warn;
|
||||||
use std::collections;
|
use std::collections;
|
||||||
use std::path::{PathBuf};
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::BufRead;
|
use std::io::BufRead;
|
||||||
use failure::ResultExt;
|
use std::path::PathBuf;
|
||||||
use log::error;
|
|
||||||
use self::error::{ConfigError, ConfigErrorKind};
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod macros;
|
mod macros;
|
||||||
|
@ -13,11 +13,13 @@ mod error;
|
||||||
|
|
||||||
pub type ConfigCollection = collections::HashMap<String, GrabberConfig>;
|
pub type ConfigCollection = collections::HashMap<String, GrabberConfig>;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct Replace {
|
pub struct Replace {
|
||||||
pub to_replace: String,
|
pub to_replace: String,
|
||||||
pub replace_with: String,
|
pub replace_with: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct GrabberConfig {
|
pub struct GrabberConfig {
|
||||||
pub xpath_title: Vec<String>,
|
pub xpath_title: Vec<String>,
|
||||||
pub xpath_author: Vec<String>,
|
pub xpath_author: Vec<String>,
|
||||||
|
@ -32,33 +34,36 @@ pub struct GrabberConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GrabberConfig {
|
impl GrabberConfig {
|
||||||
|
|
||||||
pub fn parse_directory(directory: &PathBuf) -> Result<ConfigCollection, ConfigError> {
|
pub fn parse_directory(directory: &PathBuf) -> Result<ConfigCollection, ConfigError> {
|
||||||
// create data dir if it doesn't already exist
|
// create data dir if it doesn't already exist
|
||||||
std::fs::DirBuilder::new()
|
std::fs::DirBuilder::new()
|
||||||
.recursive(true)
|
.recursive(true)
|
||||||
.create(&directory)
|
.create(&directory)
|
||||||
.context(ConfigErrorKind::IO)?;
|
.context(ConfigErrorKind::IO)?;
|
||||||
|
|
||||||
let paths = fs::read_dir(directory).context(ConfigErrorKind::IO)?;
|
let paths = fs::read_dir(directory).context(ConfigErrorKind::IO)?;
|
||||||
|
|
||||||
let mut collection: collections::HashMap<String, GrabberConfig> = collections::HashMap::new();
|
let mut collection: collections::HashMap<String, GrabberConfig> =
|
||||||
|
collections::HashMap::new();
|
||||||
|
|
||||||
for path in paths {
|
for path in paths {
|
||||||
if let Ok(path) = path {
|
if let Ok(path) = path {
|
||||||
if let Some(extension) = path.path().extension() {
|
if let Some(extension) = path.path().extension() {
|
||||||
if let Some(extension) = extension.to_str() {
|
if let Some(extension) = extension.to_str() {
|
||||||
if extension == "txt" {
|
if extension == "txt" {
|
||||||
if let Ok(config) = GrabberConfig::new(path.path()) {
|
if let Ok(config) = GrabberConfig::new(path.path()) {
|
||||||
collection.insert(path.file_name().to_string_lossy().into_owned(), config);
|
collection.insert(
|
||||||
|
path.file_name().to_string_lossy().into_owned(),
|
||||||
|
config,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(collection)
|
Ok(collection)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(config_path: PathBuf) -> Result<GrabberConfig, ConfigError> {
|
fn new(config_path: PathBuf) -> Result<GrabberConfig, ConfigError> {
|
||||||
|
@ -99,47 +104,45 @@ impl GrabberConfig {
|
||||||
let mut iterator = buffer.lines().peekable();
|
let mut iterator = buffer.lines().peekable();
|
||||||
while let Some(Ok(line)) = iterator.next() {
|
while let Some(Ok(line)) = iterator.next() {
|
||||||
let line = line.trim();
|
let line = line.trim();
|
||||||
if line.starts_with("#")
|
if line.starts_with("#")
|
||||||
|| line.starts_with(tidy)
|
|| line.starts_with(tidy)
|
||||||
|| line.starts_with(prune)
|
|| line.starts_with(prune)
|
||||||
|| line.starts_with(test_url)
|
|| line.starts_with(test_url)
|
||||||
|| line.starts_with(autodetect)
|
|| line.starts_with(autodetect)
|
||||||
|| line.is_empty() {
|
|| line.is_empty()
|
||||||
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
extract_vec_multi!(line, title, xpath_title);
|
||||||
|
extract_vec_multi!(line, body, xpath_body);
|
||||||
|
extract_vec_multi!(line, date, xpath_date);
|
||||||
|
extract_vec_multi!(line, author, xpath_author);
|
||||||
|
|
||||||
extract_vec_multi!(line, title, xpath_title);
|
extract_vec_single!(line, strip, xpath_strip);
|
||||||
extract_vec_multi!(line, body, xpath_body);
|
extract_vec_single!(line, strip_id, strip_id_or_class);
|
||||||
extract_vec_multi!(line, date, xpath_date);
|
extract_vec_single!(line, strip_img, strip_image_src);
|
||||||
extract_vec_multi!(line, author, xpath_author);
|
|
||||||
|
|
||||||
extract_vec_single!(line, strip, xpath_strip);
|
extract_option_single!(line, single_page, single_page_link);
|
||||||
extract_vec_single!(line, strip_id, strip_id_or_class);
|
extract_option_single!(line, next_page, next_page_link);
|
||||||
extract_vec_single!(line, strip_img, strip_image_src);
|
|
||||||
|
|
||||||
extract_option_single!(line, single_page, single_page_link);
|
|
||||||
extract_option_single!(line, next_page, next_page_link);
|
|
||||||
|
|
||||||
if line.starts_with(replace_single) {
|
if line.starts_with(replace_single) {
|
||||||
let value = GrabberConfig::extract_value(replace_single, line);
|
let value = GrabberConfig::extract_value(replace_single, line);
|
||||||
let value: Vec<&str> = value.split("): ").map(|s| s.trim()).collect();
|
let value: Vec<&str> = value.split("): ").map(|s| s.trim()).collect();
|
||||||
if value.len() != 2{
|
if value.len() != 2 {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(to_replace) = value.get(0) {
|
if let Some(to_replace) = value.get(0) {
|
||||||
if let Some(replace_with) = value.get(1) {
|
if let Some(replace_with) = value.get(1) {
|
||||||
replace_vec.push(
|
replace_vec.push(Replace {
|
||||||
Replace {
|
to_replace: to_replace.to_string(),
|
||||||
to_replace: to_replace.to_string(),
|
replace_with: replace_with.to_string(),
|
||||||
replace_with: replace_with.to_string(),
|
});
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if line.starts_with(find) {
|
if line.starts_with(find) {
|
||||||
|
@ -155,12 +158,12 @@ impl GrabberConfig {
|
||||||
|
|
||||||
replace_vec.push(r);
|
replace_vec.push(r);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if xpath_body.len() == 0 {
|
if xpath_body.len() == 0 {
|
||||||
error!("No body xpath found for {}", config_path.display());
|
warn!("No body xpath found for {}", config_path.display());
|
||||||
Err(ConfigErrorKind::BadConfig)?
|
Err(ConfigErrorKind::BadConfig)?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
12
src/error.rs
12
src/error.rs
|
@ -1,4 +1,4 @@
|
||||||
use failure::{Context, Fail, Backtrace, Error};
|
use failure::{Backtrace, Context, Error, Fail};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -50,7 +50,9 @@ impl ScraperError {
|
||||||
|
|
||||||
impl From<ScraperErrorKind> for ScraperError {
|
impl From<ScraperErrorKind> for ScraperError {
|
||||||
fn from(kind: ScraperErrorKind) -> ScraperError {
|
fn from(kind: ScraperErrorKind) -> ScraperError {
|
||||||
ScraperError { inner: Context::new(kind) }
|
ScraperError {
|
||||||
|
inner: Context::new(kind),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,6 +64,8 @@ impl From<Context<ScraperErrorKind>> for ScraperError {
|
||||||
|
|
||||||
impl From<Error> for ScraperError {
|
impl From<Error> for ScraperError {
|
||||||
fn from(_: Error) -> ScraperError {
|
fn from(_: Error) -> ScraperError {
|
||||||
ScraperError { inner: Context::new(ScraperErrorKind::Unknown) }
|
ScraperError {
|
||||||
|
inner: Context::new(ScraperErrorKind::Unknown),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use failure::{Context, Fail, Backtrace, Error};
|
|
||||||
use std::fmt;
|
|
||||||
use super::super::ScraperErrorKind;
|
use super::super::ScraperErrorKind;
|
||||||
|
use failure::{Backtrace, Context, Error, Fail};
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ImageDownloadError {
|
pub struct ImageDownloadError {
|
||||||
|
@ -55,7 +55,9 @@ impl ImageDownloadError {
|
||||||
|
|
||||||
impl From<ImageDownloadErrorKind> for ImageDownloadError {
|
impl From<ImageDownloadErrorKind> for ImageDownloadError {
|
||||||
fn from(kind: ImageDownloadErrorKind) -> ImageDownloadError {
|
fn from(kind: ImageDownloadErrorKind) -> ImageDownloadError {
|
||||||
ImageDownloadError { inner: Context::new(kind) }
|
ImageDownloadError {
|
||||||
|
inner: Context::new(kind),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,12 +74,16 @@ impl From<ScraperErrorKind> for ImageDownloadError {
|
||||||
_ => ImageDownloadErrorKind::Unknown,
|
_ => ImageDownloadErrorKind::Unknown,
|
||||||
};
|
};
|
||||||
|
|
||||||
ImageDownloadError { inner: Context::new(kind) }
|
ImageDownloadError {
|
||||||
|
inner: Context::new(kind),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Error> for ImageDownloadError {
|
impl From<Error> for ImageDownloadError {
|
||||||
fn from(_: Error) -> ImageDownloadError {
|
fn from(_: Error) -> ImageDownloadError {
|
||||||
ImageDownloadError { inner: Context::new(ImageDownloadErrorKind::Unknown) }
|
ImageDownloadError {
|
||||||
|
inner: Context::new(ImageDownloadErrorKind::Unknown),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,16 @@
|
||||||
use reqwest;
|
|
||||||
use log::{
|
|
||||||
error,
|
|
||||||
debug,
|
|
||||||
};
|
|
||||||
use libxml::parser::Parser;
|
|
||||||
use libxml::xpath::Context;
|
|
||||||
use libxml::tree::{
|
|
||||||
Node,
|
|
||||||
SaveOptions,
|
|
||||||
};
|
|
||||||
use url;
|
|
||||||
use failure::ResultExt;
|
|
||||||
use std::error::Error;
|
|
||||||
use self::error::{ImageDownloadError, ImageDownloadErrorKind};
|
use self::error::{ImageDownloadError, ImageDownloadErrorKind};
|
||||||
use base64;
|
|
||||||
use std;
|
|
||||||
use image;
|
|
||||||
use crate::ArticleScraper;
|
use crate::ArticleScraper;
|
||||||
|
use base64;
|
||||||
|
use failure::ResultExt;
|
||||||
|
use image;
|
||||||
|
use libxml::parser::Parser;
|
||||||
|
use libxml::tree::{Node, SaveOptions};
|
||||||
|
use libxml::xpath::Context;
|
||||||
|
use log::{debug, error};
|
||||||
|
use reqwest;
|
||||||
|
use std;
|
||||||
|
use std::error::Error;
|
||||||
|
use url;
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
|
|
||||||
|
@ -26,7 +20,6 @@ pub struct ImageDownloader {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ImageDownloader {
|
impl ImageDownloader {
|
||||||
|
|
||||||
pub fn new(max_size: (u32, u32)) -> ImageDownloader {
|
pub fn new(max_size: (u32, u32)) -> ImageDownloader {
|
||||||
ImageDownloader {
|
ImageDownloader {
|
||||||
client: reqwest::Client::new(),
|
client: reqwest::Client::new(),
|
||||||
|
@ -34,7 +27,10 @@ impl ImageDownloader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_images_from_string(&self, html: &str) -> Result<String, ImageDownloadError> {
|
pub async fn download_images_from_string(
|
||||||
|
&self,
|
||||||
|
html: &str,
|
||||||
|
) -> Result<String, ImageDownloadError> {
|
||||||
let parser = Parser::default_html();
|
let parser = Parser::default_html();
|
||||||
let doc = parser.parse_string(html).map_err(|_| {
|
let doc = parser.parse_string(html).map_err(|_| {
|
||||||
error!("Failed to parse HTML string");
|
error!("Failed to parse HTML string");
|
||||||
|
@ -61,7 +57,10 @@ impl ImageDownloader {
|
||||||
Ok(doc.to_string_with_options(options))
|
Ok(doc.to_string_with_options(options))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn download_images_from_context(&self, context: &Context) -> Result<(), ImageDownloadError> {
|
pub async fn download_images_from_context(
|
||||||
|
&self,
|
||||||
|
context: &Context,
|
||||||
|
) -> Result<(), ImageDownloadError> {
|
||||||
let xpath = "//img";
|
let xpath = "//img";
|
||||||
let node_vec = ArticleScraper::evaluate_xpath(context, xpath, false)
|
let node_vec = ArticleScraper::evaluate_xpath(context, xpath, false)
|
||||||
.context(ImageDownloadErrorKind::HtmlParse)?;
|
.context(ImageDownloadErrorKind::HtmlParse)?;
|
||||||
|
@ -74,7 +73,9 @@ impl ImageDownloader {
|
||||||
Err(_) => None,
|
Err(_) => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok((small_image, big_image)) = self.save_image(&url, &parent_url).await {
|
if let Ok((small_image, big_image)) =
|
||||||
|
self.save_image(&url, &parent_url).await
|
||||||
|
{
|
||||||
if let Err(_) = node.set_property("src", &small_image) {
|
if let Err(_) = node.set_property("src", &small_image) {
|
||||||
return Err(ImageDownloadErrorKind::HtmlParse)?;
|
return Err(ImageDownloadErrorKind::HtmlParse)?;
|
||||||
}
|
}
|
||||||
|
@ -92,17 +93,27 @@ impl ImageDownloader {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn save_image(&self, image_url: &url::Url, parent_url: &Option<url::Url>) -> Result<(String, Option<String>), ImageDownloadError> {
|
async fn save_image(
|
||||||
|
&self,
|
||||||
let response = self.client.get(image_url.clone()).send().await.map_err(|err| {
|
image_url: &url::Url,
|
||||||
error!("GET {} failed - {}", image_url.as_str(), err.description());
|
parent_url: &Option<url::Url>,
|
||||||
err
|
) -> Result<(String, Option<String>), ImageDownloadError> {
|
||||||
}).context(ImageDownloadErrorKind::Http)?;
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(image_url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|err| {
|
||||||
|
error!("GET {} failed - {}", image_url.as_str(), err.description());
|
||||||
|
err
|
||||||
|
})
|
||||||
|
.context(ImageDownloadErrorKind::Http)?;
|
||||||
|
|
||||||
let content_type_small = ImageDownloader::check_image_content_type(&response)?;
|
let content_type_small = ImageDownloader::check_image_content_type(&response)?;
|
||||||
let content_type_small = content_type_small.to_str()
|
let content_type_small = content_type_small
|
||||||
|
.to_str()
|
||||||
.context(ImageDownloadErrorKind::ContentType)?;
|
.context(ImageDownloadErrorKind::ContentType)?;
|
||||||
let mut content_type_big : Option<String> = None;
|
let mut content_type_big: Option<String> = None;
|
||||||
|
|
||||||
let mut small_image = response
|
let mut small_image = response
|
||||||
.bytes()
|
.bytes()
|
||||||
|
@ -110,21 +121,29 @@ impl ImageDownloader {
|
||||||
.context(ImageDownloadErrorKind::IO)?
|
.context(ImageDownloadErrorKind::IO)?
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.to_vec();
|
.to_vec();
|
||||||
|
|
||||||
let mut big_image : Option<Vec<u8>> = None;
|
let mut big_image: Option<Vec<u8>> = None;
|
||||||
|
|
||||||
if let Some(parent_url) = parent_url {
|
if let Some(parent_url) = parent_url {
|
||||||
let response_big = self.client.get(parent_url.clone()).send().await
|
let response_big = self
|
||||||
.context(ImageDownloadErrorKind::Http)?;
|
.client
|
||||||
content_type_big = Some(ImageDownloader::check_image_content_type(&response_big)?
|
.get(parent_url.clone())
|
||||||
.to_str()
|
.send()
|
||||||
.context(ImageDownloadErrorKind::ContentType)?
|
|
||||||
.to_owned());
|
|
||||||
big_image = Some(response_big
|
|
||||||
.bytes()
|
|
||||||
.await
|
.await
|
||||||
.context(ImageDownloadErrorKind::IO)?
|
.context(ImageDownloadErrorKind::Http)?;
|
||||||
.to_vec());
|
content_type_big = Some(
|
||||||
|
ImageDownloader::check_image_content_type(&response_big)?
|
||||||
|
.to_str()
|
||||||
|
.context(ImageDownloadErrorKind::ContentType)?
|
||||||
|
.to_owned(),
|
||||||
|
);
|
||||||
|
big_image = Some(
|
||||||
|
response_big
|
||||||
|
.bytes()
|
||||||
|
.await
|
||||||
|
.context(ImageDownloadErrorKind::IO)?
|
||||||
|
.to_vec(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if content_type_small != "image/svg+xml" && content_type_small != "image/gif" {
|
if content_type_small != "image/svg+xml" && content_type_small != "image/gif" {
|
||||||
|
@ -135,74 +154,94 @@ impl ImageDownloader {
|
||||||
big_image = Some(original_image);
|
big_image = Some(original_image);
|
||||||
content_type_big = Some(content_type_small.to_owned());
|
content_type_big = Some(content_type_small.to_owned());
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
small_image = original_image;
|
small_image = original_image;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let small_image_base64 = base64::encode(&small_image);
|
let small_image_base64 = base64::encode(&small_image);
|
||||||
let big_image_base64 = match big_image {
|
let big_image_base64 = match big_image {
|
||||||
Some(big_image) => Some(base64::encode(&big_image)),
|
Some(big_image) => Some(base64::encode(&big_image)),
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
let small_image_string = format!("data:{};base64,{}", content_type_small, small_image_base64);
|
let small_image_string =
|
||||||
|
format!("data:{};base64,{}", content_type_small, small_image_base64);
|
||||||
let big_image_string = match big_image_base64 {
|
let big_image_string = match big_image_base64 {
|
||||||
Some(big_image_base64) => {
|
Some(big_image_base64) => {
|
||||||
let content_type_big = content_type_big.ok_or(ImageDownloadErrorKind::ParentDownload)
|
let content_type_big = content_type_big
|
||||||
|
.ok_or(ImageDownloadErrorKind::ParentDownload)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
debug!("content_type_big should not be None when a big image exists");
|
debug!("content_type_big should not be None when a big image exists");
|
||||||
err
|
err
|
||||||
})?;
|
})?;
|
||||||
Some(format!("data:{};base64,{}", content_type_big, big_image_base64))
|
Some(format!(
|
||||||
},
|
"data:{};base64,{}",
|
||||||
|
content_type_big, big_image_base64
|
||||||
|
))
|
||||||
|
}
|
||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
Ok((small_image_string, big_image_string))
|
Ok((small_image_string, big_image_string))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_image_content_type(response: &reqwest::Response) -> Result<reqwest::header::HeaderValue, ImageDownloadError> {
|
fn check_image_content_type(
|
||||||
|
response: &reqwest::Response,
|
||||||
|
) -> Result<reqwest::header::HeaderValue, ImageDownloadError> {
|
||||||
if response.status().is_success() {
|
if response.status().is_success() {
|
||||||
if let Some(content_type) = response.headers().get(reqwest::header::CONTENT_TYPE) {
|
if let Some(content_type) = response.headers().get(reqwest::header::CONTENT_TYPE) {
|
||||||
if content_type.to_str().context(ImageDownloadErrorKind::ContentType)?.contains("image") {
|
if content_type
|
||||||
return Ok(content_type.clone())
|
.to_str()
|
||||||
|
.context(ImageDownloadErrorKind::ContentType)?
|
||||||
|
.contains("image")
|
||||||
|
{
|
||||||
|
return Ok(content_type.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
error!("{} is not an image", response.url());
|
error!("{} is not an image", response.url());
|
||||||
return Err(ImageDownloadErrorKind::ContentType)?
|
return Err(ImageDownloadErrorKind::ContentType)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ImageDownloadErrorKind::Http)?
|
Err(ImageDownloadErrorKind::Http)?
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scale_image(image_buffer: &[u8], max_dimensions: (u32, u32)) -> Result<(Vec<u8>, Option<Vec<u8>>), ImageDownloadError> {
|
fn scale_image(
|
||||||
let mut original_image : Vec<u8> = Vec::new();
|
image_buffer: &[u8],
|
||||||
let mut resized_image : Option<Vec<u8>> = None;
|
max_dimensions: (u32, u32),
|
||||||
|
) -> Result<(Vec<u8>, Option<Vec<u8>>), ImageDownloadError> {
|
||||||
|
let mut original_image: Vec<u8> = Vec::new();
|
||||||
|
let mut resized_image: Option<Vec<u8>> = None;
|
||||||
|
|
||||||
let mut image = image::load_from_memory(image_buffer)
|
let mut image = image::load_from_memory(image_buffer)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Failed to open image to resize");
|
error!("Failed to open image to resize");
|
||||||
err
|
err
|
||||||
}).context(ImageDownloadErrorKind::ImageScale)?;
|
})
|
||||||
|
.context(ImageDownloadErrorKind::ImageScale)?;
|
||||||
image.write_to(&mut original_image, image::ImageOutputFormat::PNG)
|
|
||||||
|
image
|
||||||
|
.write_to(&mut original_image, image::ImageOutputFormat::PNG)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Failed to save resized image to resize");
|
error!("Failed to save resized image to resize");
|
||||||
err
|
err
|
||||||
}).context(ImageDownloadErrorKind::ImageScale)?;
|
})
|
||||||
|
.context(ImageDownloadErrorKind::ImageScale)?;
|
||||||
|
|
||||||
let dimensions = Self::get_image_dimensions(&image);
|
let dimensions = Self::get_image_dimensions(&image);
|
||||||
if dimensions.0 > max_dimensions.0
|
if dimensions.0 > max_dimensions.0 || dimensions.1 > max_dimensions.1 {
|
||||||
|| dimensions.1 > max_dimensions.1 {
|
image = image.resize(
|
||||||
image = image.resize(max_dimensions.0, max_dimensions.1, image::FilterType::Lanczos3);
|
max_dimensions.0,
|
||||||
let mut resized_buf : Vec<u8> = Vec::new();
|
max_dimensions.1,
|
||||||
image.write_to(&mut resized_buf, image::ImageOutputFormat::PNG)
|
image::FilterType::Lanczos3,
|
||||||
|
);
|
||||||
|
let mut resized_buf: Vec<u8> = Vec::new();
|
||||||
|
image
|
||||||
|
.write_to(&mut resized_buf, image::ImageOutputFormat::PNG)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Failed to save resized image to resize");
|
error!("Failed to save resized image to resize");
|
||||||
err
|
err
|
||||||
}).context(ImageDownloadErrorKind::ImageScale)?;
|
})
|
||||||
|
.context(ImageDownloadErrorKind::ImageScale)?;
|
||||||
resized_image = Some(resized_buf);
|
resized_image = Some(resized_buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,22 +259,40 @@ impl ImageDownloader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn check_image_parent(&self, node: &Node, child_url: &url::Url) -> Result<url::Url, ImageDownloadError> {
|
async fn check_image_parent(
|
||||||
|
&self,
|
||||||
|
node: &Node,
|
||||||
|
child_url: &url::Url,
|
||||||
|
) -> Result<url::Url, ImageDownloadError> {
|
||||||
if let Some(parent) = node.get_parent() {
|
if let Some(parent) = node.get_parent() {
|
||||||
if parent.get_name() == "a" {
|
if parent.get_name() == "a" {
|
||||||
if let Some(url) = parent.get_property("href") {
|
if let Some(url) = parent.get_property("href") {
|
||||||
let parent_url = url::Url::parse(&url).context(ImageDownloadErrorKind::ParentDownload)?;
|
let parent_url =
|
||||||
let parent_response = self.client.head(parent_url.clone()).send().await.context(ImageDownloadErrorKind::ParentDownload)?;
|
url::Url::parse(&url).context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
let _ = ImageDownloader::check_image_content_type(&parent_response).context(ImageDownloadErrorKind::ParentDownload)?;
|
let parent_response = self
|
||||||
let child_response = self.client.get(child_url.clone()).send().await.context(ImageDownloadErrorKind::ParentDownload)?;
|
.client
|
||||||
let parent_length = Self::get_content_lenght(&parent_response).context(ImageDownloadErrorKind::ParentDownload)?;
|
.head(parent_url.clone())
|
||||||
let child_length = Self::get_content_lenght(&child_response).context(ImageDownloadErrorKind::ParentDownload)?;
|
.send()
|
||||||
|
.await
|
||||||
|
.context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
|
let _ = ImageDownloader::check_image_content_type(&parent_response)
|
||||||
|
.context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
|
let child_response = self
|
||||||
|
.client
|
||||||
|
.get(child_url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
|
let parent_length = Self::get_content_lenght(&parent_response)
|
||||||
|
.context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
|
let child_length = Self::get_content_lenght(&child_response)
|
||||||
|
.context(ImageDownloadErrorKind::ParentDownload)?;
|
||||||
|
|
||||||
if parent_length > child_length {
|
if parent_length > child_length {
|
||||||
return Ok(parent_url)
|
return Ok(parent_url);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(child_url.clone())
|
return Ok(child_url.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -249,7 +306,7 @@ impl ImageDownloader {
|
||||||
if let Some(content_length) = response.headers().get(reqwest::header::CONTENT_LENGTH) {
|
if let Some(content_length) = response.headers().get(reqwest::header::CONTENT_LENGTH) {
|
||||||
if let Ok(content_length) = content_length.to_str() {
|
if let Ok(content_length) = content_length.to_str() {
|
||||||
if let Ok(content_length) = content_length.parse::<u64>() {
|
if let Ok(content_length) = content_length.parse::<u64>() {
|
||||||
return Ok(content_length)
|
return Ok(content_length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,7 +315,6 @@ impl ImageDownloader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -270,11 +326,14 @@ mod tests {
|
||||||
let image_dowloader = ImageDownloader::new((2048, 2048));
|
let image_dowloader = ImageDownloader::new((2048, 2048));
|
||||||
let hdyleaflet = fs::read_to_string(r"./resources/tests/planetGnome/fedora31.html")
|
let hdyleaflet = fs::read_to_string(r"./resources/tests/planetGnome/fedora31.html")
|
||||||
.expect("Failed to read HTML");
|
.expect("Failed to read HTML");
|
||||||
let result = image_dowloader.download_images_from_string(&hdyleaflet)
|
let result = image_dowloader
|
||||||
|
.download_images_from_string(&hdyleaflet)
|
||||||
.await
|
.await
|
||||||
.expect("Failed to downalod images");
|
.expect("Failed to downalod images");
|
||||||
let mut file = fs::File::create(r"./resources/tests/planetGnome/fedora31_images_downloaded.html")
|
let mut file =
|
||||||
.expect("Failed to create output file");
|
fs::File::create(r"./resources/tests/planetGnome/fedora31_images_downloaded.html")
|
||||||
file.write_all(result.as_bytes()).expect("Failed to write result to file");
|
.expect("Failed to create output file");
|
||||||
|
file.write_all(result.as_bytes())
|
||||||
|
.expect("Failed to write result to file");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
389
src/lib.rs
389
src/lib.rs
|
@ -1,69 +1,78 @@
|
||||||
|
mod article;
|
||||||
mod config;
|
mod config;
|
||||||
mod error;
|
mod error;
|
||||||
mod article;
|
|
||||||
pub mod images;
|
pub mod images;
|
||||||
|
|
||||||
use reqwest;
|
use self::error::{ScraperError, ScraperErrorKind};
|
||||||
use url;
|
|
||||||
use regex;
|
|
||||||
use log::{
|
|
||||||
error,
|
|
||||||
debug,
|
|
||||||
info,
|
|
||||||
warn,
|
|
||||||
};
|
|
||||||
use crate::article::Article;
|
use crate::article::Article;
|
||||||
use libxml::parser::Parser;
|
use crate::config::{ConfigCollection, GrabberConfig};
|
||||||
use libxml::xpath::Context;
|
|
||||||
use libxml::tree::{
|
|
||||||
Document,
|
|
||||||
Node,
|
|
||||||
SaveOptions,
|
|
||||||
};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::ops::Index;
|
|
||||||
use failure::ResultExt;
|
|
||||||
use std::error::Error;
|
|
||||||
use crate::config::{
|
|
||||||
GrabberConfig,
|
|
||||||
ConfigCollection
|
|
||||||
};
|
|
||||||
use encoding_rs::{
|
|
||||||
Encoding,
|
|
||||||
};
|
|
||||||
use chrono::NaiveDateTime;
|
|
||||||
use std::str::FromStr;
|
|
||||||
use crate::images::ImageDownloader;
|
use crate::images::ImageDownloader;
|
||||||
use self::error::{
|
use chrono::NaiveDateTime;
|
||||||
ScraperError,
|
use encoding_rs::Encoding;
|
||||||
ScraperErrorKind
|
use failure::ResultExt;
|
||||||
};
|
use libxml::parser::Parser;
|
||||||
|
use libxml::tree::{Document, Node, SaveOptions};
|
||||||
|
use libxml::xpath::Context;
|
||||||
|
use log::{debug, error, info, warn};
|
||||||
|
use regex;
|
||||||
|
use reqwest;
|
||||||
|
use std::collections;
|
||||||
|
use std::error::Error;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::str::FromStr;
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
use std::thread;
|
||||||
|
use url;
|
||||||
|
|
||||||
pub struct ArticleScraper {
|
pub struct ArticleScraper {
|
||||||
pub image_downloader: ImageDownloader,
|
pub image_downloader: ImageDownloader,
|
||||||
config_files: ConfigCollection,
|
config_files: Arc<RwLock<Option<ConfigCollection>>>,
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ArticleScraper {
|
impl ArticleScraper {
|
||||||
pub fn new(config_path: PathBuf) -> Result<ArticleScraper, ScraperError> {
|
pub fn new(config_path: PathBuf) -> Result<ArticleScraper, ScraperError> {
|
||||||
|
let config_files = Arc::new(RwLock::new(None));
|
||||||
|
|
||||||
let config_files = GrabberConfig::parse_directory(&config_path).context(ScraperErrorKind::Config)?;
|
let locked_config_files = config_files.clone();
|
||||||
|
thread::spawn(move || {
|
||||||
|
if let Ok(config_files) = GrabberConfig::parse_directory(&config_path) {
|
||||||
|
locked_config_files
|
||||||
|
.write()
|
||||||
|
.expect("Failed to lock config file cache")
|
||||||
|
.replace(config_files);
|
||||||
|
} else {
|
||||||
|
locked_config_files
|
||||||
|
.write()
|
||||||
|
.expect("Failed to lock config file cache")
|
||||||
|
.replace(collections::HashMap::new());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
Ok(ArticleScraper {
|
Ok(ArticleScraper {
|
||||||
image_downloader: ImageDownloader::new((2048, 2048)),
|
image_downloader: ImageDownloader::new((2048, 2048)),
|
||||||
config_files: config_files,
|
config_files,
|
||||||
client: reqwest::Client::new(),
|
client: reqwest::Client::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse(&self, url: url::Url, download_images: bool) -> Result<Article, ScraperError> {
|
pub async fn parse(
|
||||||
|
&self,
|
||||||
|
url: url::Url,
|
||||||
|
download_images: bool,
|
||||||
|
) -> Result<Article, ScraperError> {
|
||||||
info!("Scraping article: {}", url.as_str());
|
info!("Scraping article: {}", url.as_str());
|
||||||
let response = self.client.head(url.clone()).send().await
|
let response = self
|
||||||
|
.client
|
||||||
|
.head(url.clone())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Failed head request to: {} - {}", url.as_str(), err.description());
|
error!(
|
||||||
|
"Failed head request to: {} - {}",
|
||||||
|
url.as_str(),
|
||||||
|
err.description()
|
||||||
|
);
|
||||||
err
|
err
|
||||||
})
|
})
|
||||||
.context(ScraperErrorKind::Http)?;
|
.context(ScraperErrorKind::Http)?;
|
||||||
|
@ -77,7 +86,7 @@ impl ArticleScraper {
|
||||||
|
|
||||||
// check if we are dealing with text/html
|
// check if we are dealing with text/html
|
||||||
if !ArticleScraper::check_content_type(&response)? {
|
if !ArticleScraper::check_content_type(&response)? {
|
||||||
return Err(ScraperErrorKind::ContentType)?
|
return Err(ScraperErrorKind::ContentType)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if we have a config for the url
|
// check if we have a config for the url
|
||||||
|
@ -91,19 +100,16 @@ impl ArticleScraper {
|
||||||
html: None,
|
html: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut document = Document::new().map_err(|()| {
|
let mut document = Document::new().map_err(|()| ScraperErrorKind::Xml)?;
|
||||||
ScraperErrorKind::Xml
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut root = Node::new("article", None, &document).map_err(|()| {
|
let mut root = Node::new("article", None, &document).map_err(|()| ScraperErrorKind::Xml)?;
|
||||||
ScraperErrorKind::Xml
|
|
||||||
})?;
|
|
||||||
|
|
||||||
document.set_root_element(&root);
|
document.set_root_element(&root);
|
||||||
|
|
||||||
ArticleScraper::generate_head(&mut root, &document)?;
|
ArticleScraper::generate_head(&mut root, &document)?;
|
||||||
|
|
||||||
self.parse_pages(&mut article, &url, &mut root, config).await?;
|
self.parse_pages(&mut article, &url, &mut root, &config)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let context = Context::new(&document).map_err(|()| {
|
let context = Context::new(&document).map_err(|()| {
|
||||||
error!("Failed to create xpath context for extracted article");
|
error!("Failed to create xpath context for extracted article");
|
||||||
|
@ -112,16 +118,20 @@ impl ArticleScraper {
|
||||||
|
|
||||||
if let Err(error) = ArticleScraper::prevent_self_closing_tags(&context) {
|
if let Err(error) = ArticleScraper::prevent_self_closing_tags(&context) {
|
||||||
error!("Preventing self closing tags failed - {}", error);
|
error!("Preventing self closing tags failed - {}", error);
|
||||||
return Err(error)
|
return Err(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Err(error) = ArticleScraper::eliminate_noscrip_tag(&context) {
|
if let Err(error) = ArticleScraper::eliminate_noscrip_tag(&context) {
|
||||||
error!("Eliminating <noscript> tag failed - {}", error);
|
error!("Eliminating <noscript> tag failed - {}", error);
|
||||||
return Err(error)
|
return Err(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
if download_images {
|
if download_images {
|
||||||
if let Err(error) = self.image_downloader.download_images_from_context(&context).await {
|
if let Err(error) = self
|
||||||
|
.image_downloader
|
||||||
|
.download_images_from_context(&context)
|
||||||
|
.await
|
||||||
|
{
|
||||||
error!("Downloading images failed: {}", error);
|
error!("Downloading images failed: {}", error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -139,24 +149,34 @@ impl ArticleScraper {
|
||||||
};
|
};
|
||||||
let html = document.to_string_with_options(options);
|
let html = document.to_string_with_options(options);
|
||||||
article.html = Some(html);
|
article.html = Some(html);
|
||||||
|
|
||||||
Ok(article)
|
Ok(article)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn parse_pages(&self, article: &mut Article, url: &url::Url, root: &mut Node, config: &GrabberConfig) -> Result<(), ScraperError> {
|
async fn parse_pages(
|
||||||
|
&self,
|
||||||
|
article: &mut Article,
|
||||||
|
url: &url::Url,
|
||||||
|
root: &mut Node,
|
||||||
|
config: &GrabberConfig,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let html = ArticleScraper::download(&url, &self.client).await?;
|
let html = ArticleScraper::download(&url, &self.client).await?;
|
||||||
let mut document = Self::parse_html(html, config)?;
|
let mut document = Self::parse_html(html, config)?;
|
||||||
let mut xpath_ctx = Self::get_xpath_ctx(&document)?;
|
let mut xpath_ctx = Self::get_xpath_ctx(&document)?;
|
||||||
|
|
||||||
// check for single page link
|
// check for single page link
|
||||||
if let Some(xpath_single_page_link) = config.single_page_link.clone() {
|
if let Some(xpath_single_page_link) = config.single_page_link.clone() {
|
||||||
debug!("Single page link xpath specified in config {}", xpath_single_page_link);
|
debug!(
|
||||||
|
"Single page link xpath specified in config {}",
|
||||||
|
xpath_single_page_link
|
||||||
|
);
|
||||||
if let Ok(result) = xpath_ctx.findvalue(&xpath_single_page_link, None) {
|
if let Ok(result) = xpath_ctx.findvalue(&xpath_single_page_link, None) {
|
||||||
// parse again with single page url
|
// parse again with single page url
|
||||||
debug!("Single page link found {}", result);
|
debug!("Single page link found {}", result);
|
||||||
let single_page_url = url::Url::parse(&result).context(ScraperErrorKind::Url)?;
|
let single_page_url = url::Url::parse(&result).context(ScraperErrorKind::Url)?;
|
||||||
return self.parse_single_page(article, &single_page_url, root, config).await;
|
return self
|
||||||
|
.parse_single_page(article, &single_page_url, root, config)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,7 +201,7 @@ impl ArticleScraper {
|
||||||
|
|
||||||
fn parse_html(html: String, config: &GrabberConfig) -> Result<Document, ScraperError> {
|
fn parse_html(html: String, config: &GrabberConfig) -> Result<Document, ScraperError> {
|
||||||
// replace matches in raw html
|
// replace matches in raw html
|
||||||
|
|
||||||
let mut html = html;
|
let mut html = html;
|
||||||
for replace in &config.replace {
|
for replace in &config.replace {
|
||||||
html = html.replace(&replace.to_replace, &replace.replace_with);
|
html = html.replace(&replace.to_replace, &replace.replace_with);
|
||||||
|
@ -202,7 +222,11 @@ impl ArticleScraper {
|
||||||
})?)
|
})?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn evaluate_xpath(xpath_ctx: &Context, xpath: &str, thorw_if_empty: bool) -> Result<Vec<Node>, ScraperError> {
|
pub fn evaluate_xpath(
|
||||||
|
xpath_ctx: &Context,
|
||||||
|
xpath: &str,
|
||||||
|
thorw_if_empty: bool,
|
||||||
|
) -> Result<Vec<Node>, ScraperError> {
|
||||||
let res = xpath_ctx.evaluate(xpath).map_err(|()| {
|
let res = xpath_ctx.evaluate(xpath).map_err(|()| {
|
||||||
error!("Evaluation of xpath {} yielded no results", xpath);
|
error!("Evaluation of xpath {} yielded no results", xpath);
|
||||||
ScraperErrorKind::Xml
|
ScraperErrorKind::Xml
|
||||||
|
@ -213,15 +237,20 @@ impl ArticleScraper {
|
||||||
if node_vec.len() == 0 {
|
if node_vec.len() == 0 {
|
||||||
error!("Evaluation of xpath {} yielded no results", xpath);
|
error!("Evaluation of xpath {} yielded no results", xpath);
|
||||||
if thorw_if_empty {
|
if thorw_if_empty {
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(node_vec)
|
Ok(node_vec)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn parse_single_page(&self, article: &mut Article, url: &url::Url, root: &mut Node, config: &GrabberConfig) -> Result<(), ScraperError> {
|
async fn parse_single_page(
|
||||||
|
&self,
|
||||||
|
article: &mut Article,
|
||||||
|
url: &url::Url,
|
||||||
|
root: &mut Node,
|
||||||
|
config: &GrabberConfig,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let html = ArticleScraper::download(&url, &self.client).await?;
|
let html = ArticleScraper::download(&url, &self.client).await?;
|
||||||
let document = Self::parse_html(html, config)?;
|
let document = Self::parse_html(html, config)?;
|
||||||
let xpath_ctx = Self::get_xpath_ctx(&document)?;
|
let xpath_ctx = Self::get_xpath_ctx(&document)?;
|
||||||
|
@ -233,10 +262,16 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download(url: &url::Url, client: &reqwest::Client) -> Result<String, ScraperError> {
|
async fn download(url: &url::Url, client: &reqwest::Client) -> Result<String, ScraperError> {
|
||||||
|
let response = client
|
||||||
let response = client.get(url.as_str()).send().await
|
.get(url.as_str())
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Downloading HTML failed: GET {} - {}", url.as_str(), err.description());
|
error!(
|
||||||
|
"Downloading HTML failed: GET {} - {}",
|
||||||
|
url.as_str(),
|
||||||
|
err.description()
|
||||||
|
);
|
||||||
err
|
err
|
||||||
})
|
})
|
||||||
.context(ScraperErrorKind::Http)?;
|
.context(ScraperErrorKind::Http)?;
|
||||||
|
@ -245,30 +280,35 @@ impl ArticleScraper {
|
||||||
let headers = response.headers().clone();
|
let headers = response.headers().clone();
|
||||||
let text = response.text().await.context(ScraperErrorKind::Http)?;
|
let text = response.text().await.context(ScraperErrorKind::Http)?;
|
||||||
{
|
{
|
||||||
if let Some(decoded_html) = ArticleScraper::decode_html(&text, ArticleScraper::get_encoding_from_html(&text)) {
|
if let Some(decoded_html) = ArticleScraper::decode_html(
|
||||||
return Ok(decoded_html)
|
&text,
|
||||||
|
ArticleScraper::get_encoding_from_html(&text),
|
||||||
|
) {
|
||||||
|
return Ok(decoded_html);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(decoded_html) = ArticleScraper::decode_html(&text, ArticleScraper::get_encoding_from_http_header(&headers)) {
|
if let Some(decoded_html) = ArticleScraper::decode_html(
|
||||||
return Ok(decoded_html)
|
&text,
|
||||||
|
ArticleScraper::get_encoding_from_http_header(&headers),
|
||||||
|
) {
|
||||||
|
return Ok(decoded_html);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
warn!("No encoding of HTML detected - assuming utf-8");
|
warn!("No encoding of HTML detected - assuming utf-8");
|
||||||
return Ok(text)
|
return Ok(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ScraperErrorKind::Http)?
|
Err(ScraperErrorKind::Http)?
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_encoding_from_http_header(headers: &reqwest::header::HeaderMap) -> Option<&str> {
|
fn get_encoding_from_http_header(headers: &reqwest::header::HeaderMap) -> Option<&str> {
|
||||||
|
|
||||||
if let Some(content_type) = headers.get(reqwest::header::CONTENT_TYPE) {
|
if let Some(content_type) = headers.get(reqwest::header::CONTENT_TYPE) {
|
||||||
if let Ok(content_type) = content_type.to_str() {
|
if let Ok(content_type) = content_type.to_str() {
|
||||||
let regex = regex::Regex::new(r#"charset=([^"']+)"#).unwrap();
|
let regex = regex::Regex::new(r#"charset=([^"']+)"#).unwrap();
|
||||||
if let Some(captures) = regex.captures(content_type) {
|
if let Some(captures) = regex.captures(content_type) {
|
||||||
if let Some(regex_match) = captures.get(1) {
|
if let Some(regex_match) = captures.get(1) {
|
||||||
return Some(regex_match.as_str())
|
return Some(regex_match.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -280,20 +320,19 @@ impl ArticleScraper {
|
||||||
let regex = regex::Regex::new(r#"<meta.*?charset=([^"']+)"#).unwrap();
|
let regex = regex::Regex::new(r#"<meta.*?charset=([^"']+)"#).unwrap();
|
||||||
if let Some(captures) = regex.captures(html) {
|
if let Some(captures) = regex.captures(html) {
|
||||||
if let Some(regex_match) = captures.get(1) {
|
if let Some(regex_match) = captures.get(1) {
|
||||||
return Some(regex_match.as_str())
|
return Some(regex_match.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decode_html(html: &str, encoding: Option<&str>) -> Option<String> {
|
fn decode_html(html: &str, encoding: Option<&str>) -> Option<String> {
|
||||||
|
|
||||||
if let Some(encoding) = encoding {
|
if let Some(encoding) = encoding {
|
||||||
if let Some(encoding) = Encoding::for_label(encoding.as_bytes()) {
|
if let Some(encoding) = Encoding::for_label(encoding.as_bytes()) {
|
||||||
let (decoded_html, _, invalid_chars) = encoding.decode(html.as_bytes());
|
let (decoded_html, _, invalid_chars) = encoding.decode(html.as_bytes());
|
||||||
|
|
||||||
if !invalid_chars {
|
if !invalid_chars {
|
||||||
return Some(decoded_html.into_owned())
|
return Some(decoded_html.into_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
warn!("Could not decode HTML. Encoding: {}", encoding);
|
warn!("Could not decode HTML. Encoding: {}", encoding);
|
||||||
|
@ -301,46 +340,49 @@ impl ArticleScraper {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_grabber_config(&self, url: &url::Url) -> Result<&GrabberConfig, ScraperError> {
|
fn get_grabber_config(&self, url: &url::Url) -> Result<GrabberConfig, ScraperError> {
|
||||||
|
let config_name = match url.host_str() {
|
||||||
let config_name = match url.host_str()
|
|
||||||
{
|
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
let mut name = name;
|
let mut name = name;
|
||||||
if name.starts_with("www.") {
|
if name.starts_with("www.") {
|
||||||
name = &name[4..]
|
name = &name[4..]
|
||||||
}
|
}
|
||||||
name
|
name
|
||||||
},
|
}
|
||||||
None => {
|
None => {
|
||||||
error!("Getting config failed due to bad Url");
|
error!("Getting config failed due to bad Url");
|
||||||
return Err(ScraperErrorKind::Config)?
|
return Err(ScraperErrorKind::Config)?;
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let config_name = config_name.to_owned() + ".txt";
|
let config_name = config_name.to_owned() + ".txt";
|
||||||
|
|
||||||
if !self.config_files.contains_key(&config_name) {
|
if let Some(config_files) = &*self.config_files.read().unwrap() {
|
||||||
error!("No config file of the name {} fount", config_name);
|
match config_files.get(&config_name) {
|
||||||
Err(ScraperErrorKind::Config)?
|
Some(config) => return Ok(config.clone()),
|
||||||
|
None => {
|
||||||
|
error!("No config file of the name {} fount", config_name);
|
||||||
|
Err(ScraperErrorKind::Config)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
error!("Config files have not been parsed yet.");
|
||||||
|
return Err(ScraperErrorKind::Config)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(self.config_files.index(&config_name))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_content_type(response: &reqwest::Response) -> Result<bool, ScraperError> {
|
fn check_content_type(response: &reqwest::Response) -> Result<bool, ScraperError> {
|
||||||
|
|
||||||
if response.status().is_success() {
|
if response.status().is_success() {
|
||||||
if let Some(content_type) = response.headers().get(reqwest::header::CONTENT_TYPE) {
|
if let Some(content_type) = response.headers().get(reqwest::header::CONTENT_TYPE) {
|
||||||
if let Ok(content_type) = content_type.to_str() {
|
if let Ok(content_type) = content_type.to_str() {
|
||||||
if content_type.contains("text/html") {
|
if content_type.contains("text/html") {
|
||||||
return Ok(true)
|
return Ok(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
error!("Content type is not text/HTML");
|
error!("Content type is not text/HTML");
|
||||||
return Ok(false)
|
return Ok(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
error!("Failed to determine content type");
|
error!("Failed to determine content type");
|
||||||
|
@ -348,10 +390,9 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_redirect(response: &reqwest::Response) -> Option<url::Url> {
|
fn check_redirect(response: &reqwest::Response) -> Option<url::Url> {
|
||||||
|
|
||||||
if response.status() == reqwest::StatusCode::PERMANENT_REDIRECT {
|
if response.status() == reqwest::StatusCode::PERMANENT_REDIRECT {
|
||||||
debug!("Article url redirects to {}", response.url().as_str());
|
debug!("Article url redirects to {}", response.url().as_str());
|
||||||
return Some(response.url().clone())
|
return Some(response.url().clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
|
@ -360,9 +401,9 @@ impl ArticleScraper {
|
||||||
fn extract_value(context: &Context, xpath: &str) -> Result<String, ScraperError> {
|
fn extract_value(context: &Context, xpath: &str) -> Result<String, ScraperError> {
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
if let Some(val) = node_vec.get(0) {
|
if let Some(val) = node_vec.get(0) {
|
||||||
return Ok(val.get_content())
|
return Ok(val.get_content());
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ScraperErrorKind::Xml)?
|
Err(ScraperErrorKind::Xml)?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -372,8 +413,8 @@ impl ArticleScraper {
|
||||||
for node in node_vec {
|
for node in node_vec {
|
||||||
val.push_str(&node.get_content());
|
val.push_str(&node.get_content());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(val.trim().to_string())
|
return Ok(val.trim().to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_node(context: &Context, xpath: &String) -> Result<(), ScraperError> {
|
fn strip_node(context: &Context, xpath: &String) -> Result<(), ScraperError> {
|
||||||
|
@ -391,7 +432,10 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_id_or_class(context: &Context, id_or_class: &String) -> Result<(), ScraperError> {
|
fn strip_id_or_class(context: &Context, id_or_class: &String) -> Result<(), ScraperError> {
|
||||||
let xpath = &format!("//*[contains(@class, '{}') or contains(@id, '{}')]", id_or_class, id_or_class);
|
let xpath = &format!(
|
||||||
|
"//*[contains(@class, '{}') or contains(@id, '{}')]",
|
||||||
|
id_or_class, id_or_class
|
||||||
|
);
|
||||||
|
|
||||||
let mut ancestor = xpath.clone();
|
let mut ancestor = xpath.clone();
|
||||||
if ancestor.starts_with("//") {
|
if ancestor.starts_with("//") {
|
||||||
|
@ -406,8 +450,11 @@ impl ArticleScraper {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix_lazy_images(context: &Context, class: &str, property_url: &str) -> Result<(), ScraperError> {
|
fn fix_lazy_images(
|
||||||
|
context: &Context,
|
||||||
|
class: &str,
|
||||||
|
property_url: &str,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let xpath = &format!("//img[contains(@class, '{}')]", class);
|
let xpath = &format!("//img[contains(@class, '{}')]", class);
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
|
@ -421,7 +468,6 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix_iframe_size(context: &Context, site_name: &str) -> Result<(), ScraperError> {
|
fn fix_iframe_size(context: &Context, site_name: &str) -> Result<(), ScraperError> {
|
||||||
|
|
||||||
let xpath = &format!("//iframe[contains(@src, '{}')]", site_name);
|
let xpath = &format!("//iframe[contains(@src, '{}')]", site_name);
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
|
@ -439,70 +485,86 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
error!("Failed to add video wrapper <div> as parent of iframe");
|
error!("Failed to add video wrapper <div> as parent of iframe");
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
error!("Failed to get parent of iframe");
|
error!("Failed to get parent of iframe");
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_attribute(context: &Context, tag: Option<&str>, attribute: &str) -> Result<(), ScraperError> {
|
fn remove_attribute(
|
||||||
|
context: &Context,
|
||||||
|
tag: Option<&str>,
|
||||||
|
attribute: &str,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let xpath_tag = match tag {
|
let xpath_tag = match tag {
|
||||||
Some(tag) => tag,
|
Some(tag) => tag,
|
||||||
None => "*"
|
None => "*",
|
||||||
};
|
};
|
||||||
|
|
||||||
let xpath = &format!("//{}[@{}]", xpath_tag, attribute);
|
let xpath = &format!("//{}[@{}]", xpath_tag, attribute);
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
if let Err(_) = node.remove_property(attribute) {
|
if let Err(_) = node.remove_property(attribute) {
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_attribute(context: &Context, tag: Option<&str>, attribute: &str, value: &str) -> Result<(), ScraperError> {
|
fn add_attribute(
|
||||||
|
context: &Context,
|
||||||
|
tag: Option<&str>,
|
||||||
|
attribute: &str,
|
||||||
|
value: &str,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let xpath_tag = match tag {
|
let xpath_tag = match tag {
|
||||||
Some(tag) => tag,
|
Some(tag) => tag,
|
||||||
None => "*"
|
None => "*",
|
||||||
};
|
};
|
||||||
|
|
||||||
let xpath = &format!("//{}", xpath_tag);
|
let xpath = &format!("//{}", xpath_tag);
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
if let Err(_) = node.set_attribute(attribute, value) {
|
if let Err(_) = node.set_attribute(attribute, value) {
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_attribute(context: &Context, xpath: &str, attribute: &str) -> Result<String, ScraperError> {
|
fn get_attribute(
|
||||||
|
context: &Context,
|
||||||
|
xpath: &str,
|
||||||
|
attribute: &str,
|
||||||
|
) -> Result<String, ScraperError> {
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for node in node_vec {
|
for node in node_vec {
|
||||||
if let Some(value) = node.get_attribute(attribute) {
|
if let Some(value) = node.get_attribute(attribute) {
|
||||||
return Ok(value)
|
return Ok(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(ScraperErrorKind::Xml)?
|
Err(ScraperErrorKind::Xml)?
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repair_urls(context: &Context, xpath: &str, attribute: &str, article_url: &url::Url) -> Result<(), ScraperError> {
|
fn repair_urls(
|
||||||
|
context: &Context,
|
||||||
|
xpath: &str,
|
||||||
|
attribute: &str,
|
||||||
|
article_url: &url::Url,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
if let Some(val) = node.get_attribute(attribute) {
|
if let Some(val) = node.get_attribute(attribute) {
|
||||||
if let Err(url::ParseError::RelativeUrlWithoutBase) = url::Url::parse(&val) {
|
if let Err(url::ParseError::RelativeUrlWithoutBase) = url::Url::parse(&val) {
|
||||||
if let Ok(fixed_url) = ArticleScraper::complete_url(article_url, &val) {
|
if let Ok(fixed_url) = ArticleScraper::complete_url(article_url, &val) {
|
||||||
if let Err(_) = node.set_attribute(attribute, fixed_url.as_str()) {
|
if let Err(_) = node.set_attribute(attribute, fixed_url.as_str()) {
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -511,8 +573,10 @@ impl ArticleScraper {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_url(article_url: &url::Url, incomplete_url: &str) -> Result<url::Url, ScraperError> {
|
fn complete_url(
|
||||||
|
article_url: &url::Url,
|
||||||
|
incomplete_url: &str,
|
||||||
|
) -> Result<url::Url, ScraperError> {
|
||||||
let mut completed_url = article_url.scheme().to_owned();
|
let mut completed_url = article_url.scheme().to_owned();
|
||||||
completed_url.push(':');
|
completed_url.push(':');
|
||||||
|
|
||||||
|
@ -522,17 +586,16 @@ impl ArticleScraper {
|
||||||
completed_url.push_str("//");
|
completed_url.push_str("//");
|
||||||
completed_url.push_str(host);
|
completed_url.push_str(host);
|
||||||
}
|
}
|
||||||
_ => return Err(ScraperErrorKind::Url)?
|
_ => return Err(ScraperErrorKind::Url)?,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
completed_url.push_str(incomplete_url);
|
completed_url.push_str(incomplete_url);
|
||||||
let url = url::Url::parse(&completed_url).context(ScraperErrorKind::Url)?;
|
let url = url::Url::parse(&completed_url).context(ScraperErrorKind::Url)?;
|
||||||
return Ok(url)
|
return Ok(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_junk(context: &Context, config: &GrabberConfig, url: &url::Url) {
|
fn strip_junk(context: &Context, config: &GrabberConfig, url: &url::Url) {
|
||||||
|
|
||||||
// strip specified xpath
|
// strip specified xpath
|
||||||
for xpath_strip in &config.xpath_strip {
|
for xpath_strip in &config.xpath_strip {
|
||||||
let _ = ArticleScraper::strip_node(&context, xpath_strip);
|
let _ = ArticleScraper::strip_node(&context, xpath_strip);
|
||||||
|
@ -545,7 +608,10 @@ impl ArticleScraper {
|
||||||
|
|
||||||
// strip any <img> element where @src attribute contains this substring
|
// strip any <img> element where @src attribute contains this substring
|
||||||
for xpath_strip_img_src in &config.strip_image_src {
|
for xpath_strip_img_src in &config.strip_image_src {
|
||||||
let _ = ArticleScraper::strip_node(&context, &format!("//img[contains(@src,'{}')]", xpath_strip_img_src));
|
let _ = ArticleScraper::strip_node(
|
||||||
|
&context,
|
||||||
|
&format!("//img[contains(@src,'{}')]", xpath_strip_img_src),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = ArticleScraper::fix_lazy_images(&context, "lazyload", "data-src");
|
let _ = ArticleScraper::fix_lazy_images(&context, "lazyload", "data-src");
|
||||||
|
@ -563,13 +629,16 @@ impl ArticleScraper {
|
||||||
let _ = ArticleScraper::repair_urls(&context, "//iframe", "src", &url);
|
let _ = ArticleScraper::repair_urls(&context, "//iframe", "src", &url);
|
||||||
|
|
||||||
// strip elements using Readability.com and Instapaper.com ignore class names
|
// strip elements using Readability.com and Instapaper.com ignore class names
|
||||||
// .entry-unrelated and .instapaper_ignore
|
// .entry-unrelated and .instapaper_ignore
|
||||||
// See http://blog.instapaper.com/post/730281947
|
// See http://blog.instapaper.com/post/730281947
|
||||||
let _ = ArticleScraper::strip_node(&context, &String::from(
|
let _ = ArticleScraper::strip_node(&context, &String::from(
|
||||||
"//*[contains(@class,' entry-unrelated ') or contains(@class,' instapaper_ignore ')]"));
|
"//*[contains(@class,' entry-unrelated ') or contains(@class,' instapaper_ignore ')]"));
|
||||||
|
|
||||||
// strip elements that contain style="display: none;"
|
// strip elements that contain style="display: none;"
|
||||||
let _ = ArticleScraper::strip_node(&context, &String::from("//*[contains(@style,'display:none')]"));
|
let _ = ArticleScraper::strip_node(
|
||||||
|
&context,
|
||||||
|
&String::from("//*[contains(@style,'display:none')]"),
|
||||||
|
);
|
||||||
|
|
||||||
// strip all scripts
|
// strip all scripts
|
||||||
let _ = ArticleScraper::strip_node(&context, &String::from("//script"));
|
let _ = ArticleScraper::strip_node(&context, &String::from("//script"));
|
||||||
|
@ -580,12 +649,11 @@ impl ArticleScraper {
|
||||||
// strip all empty url-tags <a/>
|
// strip all empty url-tags <a/>
|
||||||
let _ = ArticleScraper::strip_node(&context, &String::from("//a[not(node())]"));
|
let _ = ArticleScraper::strip_node(&context, &String::from("//a[not(node())]"));
|
||||||
|
|
||||||
// strip all external css and fonts
|
// strip all external css and fonts
|
||||||
let _ = ArticleScraper::strip_node(&context, &String::from("//*[@type='text/css']"));
|
let _ = ArticleScraper::strip_node(&context, &String::from("//*[@type='text/css']"));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_metadata(context: &Context, config: &GrabberConfig, article: &mut Article) {
|
fn extract_metadata(context: &Context, config: &GrabberConfig, article: &mut Article) {
|
||||||
|
|
||||||
// try to get title
|
// try to get title
|
||||||
for xpath_title in &config.xpath_title {
|
for xpath_title in &config.xpath_title {
|
||||||
if let Ok(title) = ArticleScraper::extract_value_merge(&context, xpath_title) {
|
if let Ok(title) = ArticleScraper::extract_value_merge(&context, xpath_title) {
|
||||||
|
@ -611,47 +679,51 @@ impl ArticleScraper {
|
||||||
if let Ok(date) = NaiveDateTime::from_str(&date_string) {
|
if let Ok(date) = NaiveDateTime::from_str(&date_string) {
|
||||||
article.date = Some(date);
|
article.date = Some(date);
|
||||||
break;
|
break;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
warn!("Parsing the date string '{}' failed", date_string);
|
warn!("Parsing the date string '{}' failed", date_string);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_body(context: &Context, root: &mut Node, config: &GrabberConfig) -> Result<(), ScraperError> {
|
fn extract_body(
|
||||||
|
context: &Context,
|
||||||
|
root: &mut Node,
|
||||||
|
config: &GrabberConfig,
|
||||||
|
) -> Result<(), ScraperError> {
|
||||||
let mut found_something = false;
|
let mut found_something = false;
|
||||||
for xpath_body in &config.xpath_body {
|
for xpath_body in &config.xpath_body {
|
||||||
found_something = ArticleScraper::extract_body_single(&context, root, xpath_body)?;
|
found_something = ArticleScraper::extract_body_single(&context, root, xpath_body)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !found_something {
|
if !found_something {
|
||||||
return Err(ScraperErrorKind::Scrape)?
|
return Err(ScraperErrorKind::Scrape)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_body_single(context: &Context, root: &mut Node, xpath: &str) -> Result<bool, ScraperError> {
|
fn extract_body_single(
|
||||||
|
context: &Context,
|
||||||
|
root: &mut Node,
|
||||||
|
xpath: &str,
|
||||||
|
) -> Result<bool, ScraperError> {
|
||||||
let mut found_something = false;
|
let mut found_something = false;
|
||||||
{
|
{
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
if node.get_property("style").is_some() {
|
if node.get_property("style").is_some() {
|
||||||
if let Err(_) = node.remove_property("style") {
|
if let Err(_) = node.remove_property("style") {
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
node.unlink();
|
node.unlink();
|
||||||
if let Ok(_) = root.add_child(&mut node) {
|
if let Ok(_) = root.add_child(&mut node) {
|
||||||
found_something = true;
|
found_something = true;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
error!("Failed to add body to prepared document");
|
error!("Failed to add body to prepared document");
|
||||||
return Err(ScraperErrorKind::Xml)?
|
return Err(ScraperErrorKind::Xml)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -660,11 +732,12 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_for_next_page(&self, context: &Context, config: &GrabberConfig) -> Option<url::Url> {
|
fn check_for_next_page(&self, context: &Context, config: &GrabberConfig) -> Option<url::Url> {
|
||||||
|
|
||||||
if let Some(next_page_xpath) = config.next_page_link.clone() {
|
if let Some(next_page_xpath) = config.next_page_link.clone() {
|
||||||
if let Ok(next_page_string) = ArticleScraper::get_attribute(&context, &next_page_xpath, "href") {
|
if let Ok(next_page_string) =
|
||||||
|
ArticleScraper::get_attribute(&context, &next_page_xpath, "href")
|
||||||
|
{
|
||||||
if let Ok(next_page_url) = url::Url::parse(&next_page_string) {
|
if let Ok(next_page_url) = url::Url::parse(&next_page_string) {
|
||||||
return Some(next_page_url)
|
return Some(next_page_url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -674,12 +747,11 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_head(root: &mut Node, document: &Document) -> Result<(), ScraperError> {
|
fn generate_head(root: &mut Node, document: &Document) -> Result<(), ScraperError> {
|
||||||
|
|
||||||
if let Ok(mut head_node) = Node::new("head", None, document) {
|
if let Ok(mut head_node) = Node::new("head", None, document) {
|
||||||
if let Ok(()) = root.add_prev_sibling(&mut head_node) {
|
if let Ok(()) = root.add_prev_sibling(&mut head_node) {
|
||||||
if let Ok(mut meta) = head_node.new_child(None, "meta") {
|
if let Ok(mut meta) = head_node.new_child(None, "meta") {
|
||||||
if let Ok(_) = meta.set_property("charset", "utf-8") {
|
if let Ok(_) = meta.set_property("charset", "utf-8") {
|
||||||
return Ok(())
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -689,7 +761,6 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prevent_self_closing_tags(context: &Context) -> Result<(), ScraperError> {
|
fn prevent_self_closing_tags(context: &Context) -> Result<(), ScraperError> {
|
||||||
|
|
||||||
// search document for empty tags and add a empty text node as child
|
// search document for empty tags and add a empty text node as child
|
||||||
// this prevents libxml from self closing non void elements such as iframe
|
// this prevents libxml from self closing non void elements such as iframe
|
||||||
|
|
||||||
|
@ -697,7 +768,7 @@ impl ArticleScraper {
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
for mut node in node_vec {
|
for mut node in node_vec {
|
||||||
if node.get_name() == "meta" {
|
if node.get_name() == "meta" {
|
||||||
continue
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _ = node.add_text_child(None, "empty", "");
|
let _ = node.add_text_child(None, "empty", "");
|
||||||
|
@ -707,7 +778,6 @@ impl ArticleScraper {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eliminate_noscrip_tag(context: &Context) -> Result<(), ScraperError> {
|
fn eliminate_noscrip_tag(context: &Context) -> Result<(), ScraperError> {
|
||||||
|
|
||||||
let xpath = "//noscript";
|
let xpath = "//noscript";
|
||||||
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
let node_vec = Self::evaluate_xpath(context, xpath, false)?;
|
||||||
|
|
||||||
|
@ -720,18 +790,16 @@ impl ArticleScraper {
|
||||||
let _ = parent.add_child(&mut child);
|
let _ = parent.add_child(&mut child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
#[tokio::test(basic_scheduler)]
|
#[tokio::test(basic_scheduler)]
|
||||||
async fn golem() {
|
async fn golem() {
|
||||||
let config_path = PathBuf::from(r"./resources/tests/golem");
|
let config_path = PathBuf::from(r"./resources/tests/golem");
|
||||||
|
@ -742,7 +810,12 @@ mod tests {
|
||||||
let article = grabber.parse(url, true).await.unwrap();
|
let article = grabber.parse(url, true).await.unwrap();
|
||||||
article.save_html(&out_path).unwrap();
|
article.save_html(&out_path).unwrap();
|
||||||
|
|
||||||
assert_eq!(article.title, Some(String::from("HTTP Error 418: Fehlercode \"Ich bin eine Teekanne\" darf bleiben")));
|
assert_eq!(
|
||||||
|
article.title,
|
||||||
|
Some(String::from(
|
||||||
|
"HTTP Error 418: Fehlercode \"Ich bin eine Teekanne\" darf bleiben"
|
||||||
|
))
|
||||||
|
);
|
||||||
assert_eq!(article.author, Some(String::from("Hauke Gierow")));
|
assert_eq!(article.author, Some(String::from("Hauke Gierow")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -750,12 +823,20 @@ mod tests {
|
||||||
async fn phoronix() {
|
async fn phoronix() {
|
||||||
let config_path = PathBuf::from(r"./resources/tests/phoronix");
|
let config_path = PathBuf::from(r"./resources/tests/phoronix");
|
||||||
let out_path = PathBuf::from(r"./test_output");
|
let out_path = PathBuf::from(r"./test_output");
|
||||||
let url = url::Url::parse("http://www.phoronix.com/scan.php?page=article&item=amazon_ec2_bare&num=1").unwrap();
|
let url = url::Url::parse(
|
||||||
|
"http://www.phoronix.com/scan.php?page=article&item=amazon_ec2_bare&num=1",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let grabber = ArticleScraper::new(config_path).unwrap();
|
let grabber = ArticleScraper::new(config_path).unwrap();
|
||||||
let article = grabber.parse(url, true).await.unwrap();
|
let article = grabber.parse(url, true).await.unwrap();
|
||||||
article.save_html(&out_path).unwrap();
|
article.save_html(&out_path).unwrap();
|
||||||
|
|
||||||
assert_eq!(article.title, Some(String::from("Amazon EC2 Cloud Benchmarks Against Bare Metal Systems")));
|
assert_eq!(
|
||||||
|
article.title,
|
||||||
|
Some(String::from(
|
||||||
|
"Amazon EC2 Cloud Benchmarks Against Bare Metal Systems"
|
||||||
|
))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue