diff --git a/Cargo.lock b/Cargo.lock index 6b77782..9d64805 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -566,7 +566,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "snekdown" -version = "0.15.2" +version = "0.16.0" dependencies = [ "chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "colored 1.9.3 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index f335ef2..294d44e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "snekdown" -version = "0.15.2" +version = "0.16.0" authors = ["trivernis "] edition = "2018" license-file = "LICENSE" readme = "README.md" description = "A parser for the custom snekdown markdown syntax" -homepage = "https://github.com/Trivernis/snekdown" +repository = "https://github.com/Trivernis/snekdown" [lib] name = "snekdown" diff --git a/src/main.rs b/src/main.rs index 4679ba2..9d5df4f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -78,7 +78,7 @@ fn watch(opt: &Opt) { /// Renders the document to the output path fn render(opt: &Opt) -> Parser { let start = Instant::now(); - let mut parser = Parser::new_from_file(opt.input.to_str().unwrap().to_string()).unwrap(); + let mut parser = Parser::new_from_file(opt.input.clone()).unwrap(); let document = parser.parse(); println!( "{}", diff --git a/src/parsing/elements.rs b/src/parsing/elements.rs index 28ba5a6..cbd3035 100644 --- a/src/parsing/elements.rs +++ b/src/parsing/elements.rs @@ -322,6 +322,7 @@ impl Document { Block::Import(imp) => { let arc_anchor = Arc::clone(&imp.anchor); let anchor = &mut arc_anchor.write().unwrap(); + if let Some(doc) = &mut anchor.document { self.placeholders.append(&mut doc.placeholders); self.bibliography.combine(&mut doc.bibliography); diff --git a/src/parsing/parser.rs b/src/parsing/parser.rs index 6a62d74..d8e46e3 100644 --- a/src/parsing/parser.rs +++ b/src/parsing/parser.rs @@ -12,7 +12,7 @@ use std::collections::HashMap; use std::fs::File; use std::io; use std::io::{BufRead, BufReader, Cursor}; -use std::path::Path; +use std::path::PathBuf; use std::sync::{Arc, Mutex, RwLock}; use std::thread; @@ -23,8 +23,8 @@ pub struct Parser { section_nesting: u8, sections: Vec, section_return: Option, - path: Option, - paths: Arc>>, + path: Option, + paths: Arc>>, wg: WaitGroup, is_child: bool, pub(crate) block_break_at: Vec, @@ -37,10 +37,10 @@ pub struct Parser { impl Parser { /// Creates a new parser from a path - pub fn new_from_file(path: String) -> Result { + pub fn new_from_file(path: PathBuf) -> Result { let f = File::open(&path)?; Ok(Self::create( - Some(path), + Some(PathBuf::from(path)), Arc::new(Mutex::new(Vec::new())), false, Box::new(BufReader::new(f)), @@ -48,8 +48,13 @@ impl Parser { } /// Creates a new parser with text being the markdown text - pub fn new(text: String, path: Option) -> Self { + pub fn new(text: String, path: Option) -> Self { let text_bytes = text.as_bytes(); + let path = if let Some(inner_path) = path { + Some(PathBuf::from(inner_path)) + } else { + None + }; Parser::create( path, Arc::new(Mutex::new(Vec::new())), @@ -59,10 +64,10 @@ impl Parser { } /// Creates a child parser from string text - pub fn child(text: String, path: String, paths: Arc>>) -> Self { + pub fn child(text: String, path: PathBuf, paths: Arc>>) -> Self { let text_bytes = text.as_bytes(); Self::create( - Some(path), + Some(PathBuf::from(path)), paths, true, Box::new(Cursor::new(text_bytes.to_vec())), @@ -71,12 +76,12 @@ impl Parser { /// Creates a child parser from a file pub fn child_from_file( - path: String, - paths: Arc>>, + path: PathBuf, + paths: Arc>>, ) -> Result { let f = File::open(&path)?; Ok(Self::create( - Some(path), + Some(PathBuf::from(path)), paths, true, Box::new(BufReader::new(f)), @@ -84,17 +89,13 @@ impl Parser { } fn create( - path: Option, - paths: Arc>>, + path: Option, + paths: Arc>>, is_child: bool, mut reader: Box, ) -> Self { if let Some(path) = path.clone() { - let path_info = Path::new(&path); - paths - .lock() - .unwrap() - .push(path_info.to_str().unwrap().to_string()) + paths.lock().unwrap().push(path.clone()) } let mut text = Vec::new(); let mut current_char = ' '; @@ -142,37 +143,36 @@ impl Parser { } /// Returns the import paths of the parser - pub fn get_paths(&self) -> Vec { + pub fn get_paths(&self) -> Vec { self.paths.lock().unwrap().clone() } /// transform an import path to be relative to the current parsers file - fn transform_path(&mut self, path: String) -> String { - let mut path = path; - let first_path_info = Path::new(&path); - if first_path_info.is_absolute() { - return first_path_info.to_str().unwrap().to_string(); - } - if let Some(selfpath) = &self.path { - let path_info = Path::new(&selfpath); - if path_info.is_file() { - if let Some(dir) = path_info.parent() { - path = format!("{}/{}", dir.to_str().unwrap(), path); + fn transform_path(&mut self, path: String) -> PathBuf { + let mut path = PathBuf::from(path); + + if !path.is_absolute() { + if let Some(selfpath) = &self.path { + if let Some(dir) = selfpath.parent() { + path = PathBuf::new().join(dir).join(path); } } } - let path_info = Path::new(&path); - return path_info.to_str().unwrap().to_string(); + + path } /// starts up a new thread to parse the imported document fn import_document(&mut self, path: String) -> ParseResult>> { let path = self.transform_path(path); - let path_info = Path::new(&path); - if !path_info.exists() || !path_info.is_file() { + if !path.exists() || !path.is_file() { println!( "{}", - format!("Import of \"{}\" failed: The file doesn't exist.", path,).red() + format!( + "Import of \"{}\" failed: The file doesn't exist.", + path.to_str().unwrap() + ) + .red() ); return Err(ParseError::new_with_message( self.index, @@ -184,7 +184,11 @@ impl Parser { if paths.iter().find(|item| **item == path) != None { println!( "{}", - format!("Import of \"{}\" failed: Cyclic import.", path).yellow() + format!( + "Import of \"{}\" failed: Cyclic import.", + path.to_str().unwrap() + ) + .yellow() ); return Err(ParseError::new_with_message(self.index, "cyclic import")); } @@ -210,7 +214,11 @@ impl Parser { /// parses the given text into a document pub fn parse(&mut self) -> Document { - self.document.path = self.path.clone(); + self.document.path = if let Some(path) = &self.path { + Some(path.canonicalize().unwrap().to_str().unwrap().to_string()) + } else { + None + }; while self.index < self.text.len() { match self.parse_block() { @@ -225,12 +233,18 @@ impl Parser { "{}", format!( "Error in File {}:{}:{} - {}", - path, position.0, position.1, err + path.to_str().unwrap(), + position.0, + position.1, + err ) .red() ); } else { - println!("{}", format!("Error in File {}: {}", path, err).red()); + println!( + "{}", + format!("Error in File {}: {}", path.to_str().unwrap(), err).red() + ); } } else { println!("{}", err); diff --git a/src/parsing/tokens.rs b/src/parsing/tokens.rs index aa04655..bcac279 100644 --- a/src/parsing/tokens.rs +++ b/src/parsing/tokens.rs @@ -116,8 +116,3 @@ pub(crate) const SQ_PHOLDER_STOP: [char; 2] = [PHOLDER_CLOSE, PHOLDER_CLOSE]; pub(crate) const SQ_CENTERED_START: [char; 2] = [PIPE, PIPE]; pub(crate) const SQ_COLOR_START: [char; 2] = [COLOR_START, COLOR_OPEN]; pub(crate) const SQ_BIBREF_START: [char; 2] = [BIBREF_OPEN, BIBREF_REF]; - -// expressions - -pub(crate) const EXPR_URI: &str = - r"^(https?://)?\w+\.\w+(\.\w+|)?(/[\w, -.%&]+)*/?$|^([\w, -.]+|\w:)?(/[\w, -.]+)+$"; diff --git a/src/parsing/utils.rs b/src/parsing/utils.rs index a819077..51aed2a 100644 --- a/src/parsing/utils.rs +++ b/src/parsing/utils.rs @@ -70,11 +70,10 @@ impl ParseError { return None; } let split_content = content.split_at(self.index); - let line_number = split_content.0.matches("\n").count() as usize; - let overshoot_position = self.index as isize - split_content.0.len() as isize; + let line_number = split_content.0.lines().count() as usize; if let Some(line) = split_content.0.lines().last() { - let inline_position = (line.len() as isize + overshoot_position) as usize; + let inline_position = line.len(); Some((line_number, inline_position)) } else {