Skip to content

Commit

Permalink
misc: Run clippy and fixes all warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
Skallwar committed Jan 22, 2022
1 parent 931f1e5 commit c9eb641
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 23 deletions.
2 changes: 1 addition & 1 deletion src/disk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ pub fn symlink(source: &str, destination: &str, path: &Option<PathBuf>) {
None => PathBuf::from(destination),
};

if let Err(_) = symlink_auto(source, &destination) {
if symlink_auto(source, &destination).is_err() {
warn!(
"{} is already present, coulnd't create a symlink to {}",
destination.display(),
Expand Down
12 changes: 5 additions & 7 deletions src/downloader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,16 +120,14 @@ impl Downloader {
Some(content_type_header) => {
let content_type = content_type_header.to_str().unwrap();
let data_type_captures =
DATA_TYPE_REGEX.captures_iter(&content_type).nth(0);
DATA_TYPE_REGEX.captures_iter(content_type).next();
let data_type = data_type_captures
.map_or(String::from("text/html"), |first| {
String::from(first.get(1).unwrap().as_str().to_lowercase())
first.get(1).unwrap().as_str().to_lowercase()
});
let charset_captures =
CHARSET_REGEX.captures_iter(&content_type).nth(0);
let charset = charset_captures.map(|first| {
String::from(first.get(1).unwrap().as_str().to_lowercase())
});
let charset_captures = CHARSET_REGEX.captures_iter(content_type).next();
let charset = charset_captures
.map(|first| first.get(1).unwrap().as_str().to_lowercase());
(data_type, charset)
}
None => (String::from("text/html"), None),
Expand Down
19 changes: 7 additions & 12 deletions src/scraper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl Scraper {
let relative_path = diff_path.as_path().to_str().unwrap();

dom_url.clear();
dom_url.push_str(&relative_path);
dom_url.push_str(relative_path);
}

/// Find the charset of the webpage. ``data`` is not a String as this might not be utf8.
Expand All @@ -110,7 +110,7 @@ impl Scraper {
// We use the first one, hopping we are in the <head> of the page... or if nothing is found
// we used the http charset (if any).
captures
.map(|first| String::from(first.get(1).unwrap().as_str().to_lowercase()))
.map(|first| first.get(1).unwrap().as_str().to_lowercase())
.or(http_charset)
}

Expand All @@ -124,17 +124,13 @@ impl Scraper {
let decode_bytes = decode_result.0.borrow();

let encode_result = charset_dest.encode(decode_bytes);
let encode_bytes = encode_result.0.into_owned();

encode_bytes
encode_result.0.into_owned()
}

/// Check if the charset require conversion
fn needs_charset_conversion(charset: &str) -> bool {
match charset {
"utf-8" => false,
_ => true,
}
!matches!(charset, "utf-8")
}

/// Proces an html file: add new url to the chanel and prepare for offline navigation
Expand All @@ -157,8 +153,7 @@ impl Scraper {

let need_charset_conversion = Self::needs_charset_conversion(&charset_source_str);

let charset_source = match encoding_rs::Encoding::for_label(&charset_source_str.as_bytes())
{
let charset_source = match encoding_rs::Encoding::for_label(charset_source_str.as_bytes()) {
Some(encoder) => encoder,
None => {
warn!(
Expand Down Expand Up @@ -196,7 +191,7 @@ impl Scraper {
let path = url_helper::to_path(&next_full_url);

if scraper.map_url_path(&next_full_url, path.clone()) {
if !Scraper::is_on_another_domain(&next_url, &url) {
if !Scraper::is_on_another_domain(next_url, url) {
// If we are determining for a local domain
if scraper.args.depth == INFINITE_DEPTH || depth < scraper.args.depth {
Scraper::push(transmitter, next_full_url, depth + 1, ext_depth);
Expand Down Expand Up @@ -311,7 +306,7 @@ impl Scraper {
},
Ok((url, depth, ext_depth)) => {
counter = 0;
Scraper::handle_url(&self_clone, &tx, url, depth, ext_depth);
Scraper::handle_url(self_clone, &tx, url, depth, ext_depth);
self_clone.sleep(&mut rng);
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/url_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ pub fn to_path(url: &Url) -> String {
.map_or("", |filename| filename.to_str().unwrap())
.to_string();

if url_path_and_query.ends_with("/") {
if url_path_and_query.ends_with('/') {
filename = "index.html".to_string();
parent = url_path_and_query.trim_end_matches("/").to_string();
parent = url_path_and_query.trim_end_matches('/').to_string();
} else if Path::new(&filename).extension().is_none() {
parent = url_path_and_query.trim_end_matches("/").to_string();
parent = url_path_and_query.trim_end_matches('/').to_string();
filename = "index_no_slash.html".to_string();
}

Expand Down

0 comments on commit c9eb641

Please sign in to comment.