Skip to content
This repository has been archived by the owner on Sep 4, 2023. It is now read-only.

Commit

Permalink
rustfmt
Browse files Browse the repository at this point in the history
  • Loading branch information
Gambhiro committed Oct 12, 2019
1 parent 6d24742 commit 6aec427
Show file tree
Hide file tree
Showing 8 changed files with 267 additions and 159 deletions.
110 changes: 51 additions & 59 deletions src/app.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
use std::default::Default;
use std::error::Error;
use std::fs;
use std::process::Command;
use std::path::{Path, PathBuf};
use std::process::Command;

use chrono::prelude::*;
use regex::Regex;
use walkdir::WalkDir;
use chrono::prelude::*;

use crate::ebook::{Ebook, EbookMetadata, EbookFormat, DICTIONARY_METADATA_SEP, DICTIONARY_WORD_ENTRIES_SEP};
use crate::dict_word::{DictWord, DictWordHeader};
use crate::helpers::{is_hidden, ensure_parent, ensure_parent_all};
use crate::ebook::{
Ebook, EbookFormat, EbookMetadata, DICTIONARY_METADATA_SEP, DICTIONARY_WORD_ENTRIES_SEP,
};
use crate::error::ToolError;
use crate::helpers::{ensure_parent, ensure_parent_all, is_hidden};

pub struct AppStartParams {
pub ebook_format: EbookFormat,
Expand Down Expand Up @@ -47,7 +49,6 @@ pub enum ZipWith {

impl Default for AppStartParams {
fn default() -> Self {

// Zip cli tool is not usually available on Windows, so we zip with lib there.
//
// lise-henry/epub-builder notes that zipping epub with the lib sometimes gave her errors
Expand Down Expand Up @@ -149,7 +150,11 @@ fn look_for_kindlegen() -> Option<PathBuf> {
// Try if it is available from the system PATH.

let output = if cfg!(target_os = "windows") {
match Command::new("cmd").arg("/C").arg("where kindlegen.exe").output() {
match Command::new("cmd")
.arg("/C")
.arg("where kindlegen.exe")
.output()
{
Ok(o) => o,
Err(e) => {
warn!("🔥 Failed to find KindleGen: {:?}", e);
Expand Down Expand Up @@ -183,12 +188,7 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
let mut params = AppStartParams::default();

if let Some(sub_matches) = matches.subcommand_matches("suttacentral_json_to_markdown") {

if let Ok(x) = sub_matches
.value_of("json_path")
.unwrap()
.parse::<String>()
{
if let Ok(x) = sub_matches.value_of("json_path").unwrap().parse::<String>() {
let path = PathBuf::from(&x);
if path.exists() {
params.json_path = Some(path);
Expand All @@ -215,9 +215,7 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
}

params.run_command = RunCommand::SuttaCentralJsonToMarkdown;

} else if let Some(sub_matches) = matches.subcommand_matches("nyanatiloka_to_markdown") {

if let Ok(x) = sub_matches
.value_of("nyanatiloka_root")
.unwrap()
Expand Down Expand Up @@ -249,14 +247,12 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
}

params.run_command = RunCommand::NyanatilokaToMarkdown;

} else if let Some(sub_matches) = matches.subcommand_matches("markdown_to_ebook") {

if sub_matches.is_present("ebook_format") {
if let Ok(x) = sub_matches
.value_of("ebook_format")
.unwrap()
.parse::<String>()
.unwrap()
.parse::<String>()
{
let s = x.trim().to_lowercase();
if s == "epub" {
Expand All @@ -267,19 +263,20 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
params.ebook_format = EbookFormat::Epub;
}
}

}

if !sub_matches.is_present("markdown_path") && !sub_matches.is_present("markdown_paths_list") {
if !sub_matches.is_present("markdown_path")
&& !sub_matches.is_present("markdown_paths_list")
{
let msg = "🔥 Either 'markdown_path' or 'markdown_paths_list' must be used with command 'markdown_to_mobi'.".to_string();
return Err(Box::new(ToolError::Exit(msg)));
}

if sub_matches.is_present("markdown_path") {
if let Ok(x) = sub_matches
.value_of("markdown_path")
.unwrap()
.parse::<String>()
.unwrap()
.parse::<String>()
{
let path = PathBuf::from(&x);
if path.exists() {
Expand All @@ -292,20 +289,16 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
}

if sub_matches.is_present("title") {
if let Ok(x) = sub_matches
.value_of("title")
.unwrap()
.parse::<String>()
{
if let Ok(x) = sub_matches.value_of("title").unwrap().parse::<String>() {
params.title = Some(x);
}
}

if sub_matches.is_present("dict_label") {
if let Ok(x) = sub_matches
.value_of("dict_label")
.unwrap()
.parse::<String>()
.unwrap()
.parse::<String>()
{
params.dict_label = Some(x);
}
Expand All @@ -314,16 +307,16 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
if sub_matches.is_present("markdown_paths_list") {
if let Ok(x) = sub_matches
.value_of("markdown_paths_list")
.unwrap()
.parse::<String>()
.unwrap()
.parse::<String>()
{
let list_path = PathBuf::from(&x);
let s = match fs::read_to_string(&list_path) {
Ok(s) => s,
Err(e) => {
let msg = format!("🔥 Can't read path. {:?}", e);
return Err(Box::new(ToolError::Exit(msg)));
},
}
};
let s = s.trim();

Expand Down Expand Up @@ -351,20 +344,20 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
EbookFormat::Epub => {
let p = dir.join(PathBuf::from(filename).with_extension("epub"));
params.output_path = Some(ensure_parent(&p));
},
}
EbookFormat::Mobi => {
let p = dir.join(PathBuf::from(filename).with_extension("mobi"));
params.output_path = Some(ensure_parent(&p));
},
}
}
}
}

if sub_matches.is_present("mobi_compression") {
if let Ok(x) = sub_matches
.value_of("mobi_compression")
.unwrap()
.parse::<usize>()
.unwrap()
.parse::<usize>()
{
params.mobi_compression = x;
}
Expand All @@ -378,8 +371,8 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result<AppStartParams, Box
if sub_matches.is_present("kindlegen_path") {
if let Ok(x) = sub_matches
.value_of("kindlegen_path")
.unwrap()
.parse::<String>()
.unwrap()
.parse::<String>()
{
let path = PathBuf::from(&x);
if path.exists() {
Expand Down Expand Up @@ -462,10 +455,12 @@ pub fn process_nyanatiloka_entries(
dict_label: &Option<String>,
ebook: &mut Ebook,
) {
let nyanatiloka_root = &nyanatiloka_root.as_ref().expect("nyanatiloka_root is missing.");
let nyanatiloka_root = &nyanatiloka_root
.as_ref()
.expect("nyanatiloka_root is missing.");
let dict_label = &dict_label.as_ref().expect("dict_label is missing.");

info!{"=== Begin processing {:?} ===", nyanatiloka_root};
info! {"=== Begin processing {:?} ===", nyanatiloka_root};

#[derive(Deserialize)]
struct Entry {
Expand Down Expand Up @@ -503,10 +498,7 @@ pub fn process_nyanatiloka_entries(
word = cap[1].to_string();
}

entries.push(Entry {
word,
text,
});
entries.push(Entry { word, text });
}

for e in entries.iter() {
Expand All @@ -525,24 +517,18 @@ pub fn process_nyanatiloka_entries(
}
}


pub fn process_markdown_list(
markdown_paths: Vec<PathBuf>,
ebook: &mut Ebook
) -> Result<(), Box<dyn Error>>
{
ebook: &mut Ebook,
) -> Result<(), Box<dyn Error>> {
for p in markdown_paths.iter() {
process_markdown(p, ebook)?;
}

Ok(())
}

pub fn process_markdown(
markdown_path: &PathBuf,
ebook: &mut Ebook
) -> Result<(), Box<dyn Error>>
{
pub fn process_markdown(markdown_path: &PathBuf, ebook: &mut Ebook) -> Result<(), Box<dyn Error>> {
info! {"=== Begin processing {:?} ===", markdown_path};

let s = fs::read_to_string(markdown_path).unwrap();
Expand All @@ -551,19 +537,26 @@ pub fn process_markdown(
let parts: Vec<&str> = s.split(DICTIONARY_WORD_ENTRIES_SEP).collect();

if parts.len() != 2 {
let msg = "Bad Markdown input. Can't separate the Dictionary header and DictWord entries.".to_string();
let msg = "Bad Markdown input. Can't separate the Dictionary header and DictWord entries."
.to_string();
return Err(Box::new(ToolError::Exit(msg)));
}

let a = parts.get(0).unwrap().to_string()
let a = parts
.get(0)
.unwrap()
.to_string()
.replace(DICTIONARY_METADATA_SEP, "")
.replace("``` toml", "")
.replace("```", "");

let mut meta: EbookMetadata = match toml::from_str(&a) {
Ok(x) => x,
Err(e) => {
let msg = format!("🔥 Can't serialize from TOML String: {:?}\nError: {:?}", &a, e);
let msg = format!(
"🔥 Can't serialize from TOML String: {:?}\nError: {:?}",
&a, e
);
return Err(Box::new(ToolError::Exit(msg)));
}
};
Expand All @@ -574,7 +567,7 @@ pub fn process_markdown(
EbookFormat::Epub => {
meta.is_epub = true;
meta.is_mobi = false;
},
}
EbookFormat::Mobi => {
meta.is_epub = false;
meta.is_mobi = true;
Expand Down Expand Up @@ -602,7 +595,7 @@ pub fn process_markdown(
Err(e) => {
let msg = format!("{:?}", e);
return Err(Box::new(ToolError::Exit(msg)));
},
}
}
}

Expand All @@ -628,4 +621,3 @@ fn html_to_plain(html: &str) -> String {
plain
}
*/

34 changes: 26 additions & 8 deletions src/dict_word.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use regex::Regex;
use std::default::Default;
use std::error::Error;
use regex::Regex;

use crate::error::ToolError;

Expand All @@ -26,7 +26,11 @@ impl DictWord {

pub fn as_markdown_and_toml_string(&self) -> String {
let header = toml::to_string(&self.word_header).unwrap();
format!("``` toml\n{}\n```\n\n{}", &header.trim(), &self.definition_md.trim())
format!(
"``` toml\n{}\n```\n\n{}",
&header.trim(),
&self.definition_md.trim()
)
}

pub fn from_markdown(s: &str) -> Result<DictWord, Box<dyn Error>> {
Expand All @@ -37,7 +41,10 @@ impl DictWord {
let word_header: DictWordHeader = match toml::from_str(toml) {
Ok(x) => x,
Err(e) => {
let msg = format!("🔥 Can't serialize from TOML String: {:?}\nError: {:?}", &toml, e);
let msg = format!(
"🔥 Can't serialize from TOML String: {:?}\nError: {:?}",
&toml, e
);
return Err(Box::new(ToolError::Exit(msg)));
}
};
Expand Down Expand Up @@ -85,12 +92,18 @@ impl DictWord {
// See... with markdown link
// (see *[abbha](/define/abbha)*) -> (see abbha)
let re_see_markdown_links = Regex::new(r"\(see \*\[([^\]]+)\]\([^\)]+\)\**\)").unwrap();
summary = re_see_markdown_links.replace_all(&summary, "(see $1)").trim().to_string();
summary = re_see_markdown_links
.replace_all(&summary, "(see $1)")
.trim()
.to_string();

// markdown links
// [abbha](/define/abbha) -> abbha
let re_markdown_links = Regex::new(r"\[([^\]]+)\]\([^\)]+\)").unwrap();
summary = re_markdown_links.replace_all(&summary, "$1").trim().to_string();
summary = re_markdown_links
.replace_all(&summary, "$1")
.trim()
.to_string();

// remaining markdown markup: *, []
let re_markdown = Regex::new(r"[\*\[\]]").unwrap();
Expand All @@ -109,7 +122,10 @@ impl DictWord {
// grammar abbr., with- or without dot, with- or without parens
let re_abbr_one = Regex::new(r"^\(*(d|f|m|ṃ|n|r|s|t)\.*\)*\.*\b").unwrap();
let re_abbr_two = Regex::new(r"^\(*(ac|fn|id|mf|pl|pp|pr|sg|si)\.*\)*\.*\b").unwrap();
let re_abbr_three = Regex::new(r"^\(*(abl|acc|act|adv|aor|dat|fpp|fut|gen|inc|ind|inf|loc|mfn|neg|opt)\.*\)*\.*\b").unwrap();
let re_abbr_three = Regex::new(
r"^\(*(abl|acc|act|adv|aor|dat|fpp|fut|gen|inc|ind|inf|loc|mfn|neg|opt)\.*\)*\.*\b",
)
.unwrap();
let re_abbr_four = Regex::new(r"^\(*(caus|part|pass|pron)\.*\)*\.*\b").unwrap();
let re_abbr_more = Regex::new(r"^\(*(absol|abstr|accus|compar|desid|feminine|impers|instr|masculine|neuter|plural|singular)\.*\)*\.*\b").unwrap();

Expand Down Expand Up @@ -209,7 +225,10 @@ impl DictWord {
if !summary.is_empty() {
let sum_length = 50;
if summary.char_indices().count() > sum_length {
let (char_idx, _char) = summary.char_indices().nth(sum_length).ok_or("Bad char index")?;
let (char_idx, _char) = summary
.char_indices()
.nth(sum_length)
.ok_or("Bad char index")?;
summary = summary[..char_idx].trim().to_string();
}

Expand Down Expand Up @@ -244,4 +263,3 @@ impl Default for DictWordHeader {
}
}
}

Loading

0 comments on commit 6aec427

Please sign in to comment.