Compare commits

..

No commits in common. 'main' and 'v0.30.4' have entirely different histories.

@ -1,31 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Write...
2. Render...
3. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. Arch Linux]
- Architecture: [e.g. x86_64, ARM]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE]"
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

@ -1,42 +0,0 @@
name: Build and Test
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main, develop ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Cache build data
uses: actions/cache@v2
with:
path: |
target
~/.cargo/
key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --all-features
- name: Run tests
run: cargo test --verbose --all-features
- name: Test init
run: cargo run -- init
- name: Test HTML
run: cargo run -- render README.md README.html --format html
- name: Test PDF
run: cargo run --all-features -- render README.md README.pdf --format pdf

1229
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,9 +1,9 @@
[package] [package]
name = "snekdown" name = "snekdown"
version = "0.33.4" version = "0.30.4"
authors = ["trivernis <trivernis@protonmail.com>"] authors = ["trivernis <trivernis@protonmail.com>"]
edition = "2018" edition = "2018"
license = "GPL-3.0" license-file = "LICENSE"
readme = "README.md" readme = "README.md"
description = "A parser for the custom snekdown markdown syntax" description = "A parser for the custom snekdown markdown syntax"
repository = "https://github.com/Trivernis/snekdown" repository = "https://github.com/Trivernis/snekdown"
@ -21,9 +21,9 @@ path = "src/main.rs"
pdf = ["headless_chrome", "failure"] pdf = ["headless_chrome", "failure"]
[dependencies] [dependencies]
charred = "0.3.6" charred = "0.3.3"
asciimath-rs = "0.5.7" asciimath-rs = "0.5.7"
bibliographix = "0.6.0" bibliographix = "0.5.0"
crossbeam-utils = "0.7.2" crossbeam-utils = "0.7.2"
structopt = "0.3.14" structopt = "0.3.14"
minify = "1.1.1" minify = "1.1.1"
@ -36,8 +36,9 @@ colored = "1.9.3"
gh-emoji = "1.0.3" gh-emoji = "1.0.3"
notify = "4.0.12" notify = "4.0.12"
toml = "0.5.6" toml = "0.5.6"
serde = { version = "1.0.111", features = ["serde_derive"] } serde ="1.0.111"
reqwest = { version = "0.10", features = ["blocking"] } serde_derive = "1.0.111"
reqwest = {version = "0.10", features=["blocking"]}
mime_guess = "2.0.3" mime_guess = "2.0.3"
mime = "0.3.16" mime = "0.3.16"
base64 = "0.12.3" base64 = "0.12.3"
@ -47,11 +48,6 @@ log = "0.4.11"
env_logger = "0.7.1" env_logger = "0.7.1"
indicatif = "0.15.0" indicatif = "0.15.0"
platform-dirs = "0.2.0" platform-dirs = "0.2.0"
image = "0.23.12"
parking_lot = "0.11.1"
sha2 = "0.9.2"
config = "0.10.1"
rsass = "0.16.0"
headless_chrome = { version = "0.9.0", optional = true, features = ["fetch"] } headless_chrome = {version = "0.9.0", optional = true}
failure = { version = "0.1.8", optional = true } failure = {version = "0.1.8", optional = true}

@ -1,101 +1,368 @@
<p align="center"> # ![](https://i.imgur.com/FpdXqiT.png) Snekdown - More than just Markdown ![](https://img.shields.io/discord/729250668162056313)
<img src="https://i.imgur.com/FpdXqiT.png">
</p>
<h1 align="center">Snekdown</h1>
<p align="center">
<i>More than just Markdown</i>
</p>
<p align="center">
<a href="https://github.com/Trivernis/snekdown/actions">
<img src="https://img.shields.io/github/workflow/status/trivernis/snekdown/Build%20and%20Test/main?style=for-the-badge">
</a>
<a href="https://crates.io/crates/snekdown">
<img src="https://img.shields.io/crates/v/snekdown?style=for-the-badge">
</a>
<a href="https://aur.archlinux.org/packages/snekdown">
<img src="https://img.shields.io/aur/version/snekdown?style=for-the-badge">
</a>
<a href="https://discord.gg/vGAXW9nxUv">
<img src="https://img.shields.io/discord/729250668162056313?style=for-the-badge">
</a>
<br/>
<br/>
<a href="https://trivernis.net/snekdown/">Documentation</a> |
<a href="https://github.com/Trivernis/snekdown/releases">Releases</a>
</p>
- - -
## Description
This projects goal is to implement a fast markdown parser with an extended syntax fitted This projects goal is to implement a fast markdown parser with an extended syntax fitted
for my needs. for my needs.
## Core Features ## Installation
- Imports You need a working rust installation, for example by using [rustup](http://rustup.rs).
- Bibliography & Glossary
- AsciiMath
- Placeholders
- Advanced Images
## Prerequisites ```sh
cargo install snekdown
```
- Google Chrome/Chromium (for PDF rendering) With pdf rendering
## Installation ```sh
cargo install snekdown --features pdf
```
### Binaries ## Usage
You can download prebuilt binaries on the [Releases](https://github.com/Trivernis/snekdown/releases) Page. ```
USAGE:
snekdown [FLAGS] [OPTIONS] <input> <output> [SUBCOMMAND]
FLAGS:
-h, --help Prints help information
--no-cache Don't use the cache
-V, --version Prints version information
### Arch Linux OPTIONS:
-f, --format <format> the output format [default: html]
Snekdown is available in [the AUR](https://aur.archlinux.org/packages/snekdown). ARGS:
<input> Path to the input file
<output> Path for the output file
SUBCOMMANDS:
help Prints this message or the help of the given subcommand(s)
render Default. Parse and render the document
watch Watch the document and its imports and render on change
```
### Cargo ## Syntax
You need a working rust installation, for example by using [rustup](http://rustup.rs). ### Images
```sh ```md
cargo install snekdown Simple Syntax
!(url)
Extended syntax with a description
![description](url)
Extended syntax with metadata to specify the size
![description](url)[metadata]
Extended syntax with metadata and no description
!(url)[metadata]
``` ```
With pdf rendering When generating the html file the images are base64 embedded. To turn off this behaviour
set the config parameter `embed-external` to `false`.
```sh ### Quotes
cargo install snekdown --features pdf
```md
Simple (default) Syntax
> This is a quote
Multiline
> This is a
> Multiline Quote
Quote with metadata (e.g. Author)
[author=Trivernis year=2020 display='{{author}} - {{year}}']> This is a quote with metadata
``` ```
## Usage ### Imports
Imports can be used to import a different document to be attached to the main document.
Imports are parsed via multithreading.
```md
<[path]
<[document.md]
<[style.css][type=stylesheet]
```
The parser differentiates four different types of imported files.
- `document` - The default import which is just another snekdown document
- `stylesheet` - CSS Stylesheets that are inclued when rendering
- `bibliography` - A file including bibliography
- `config`/`manifest` - A config file that contains metadata
If no type is provided the parser guesses the type of file from the extension.
### Tables
Tables MUST start with a pipe character `|`
```md
Standalone header:
| header | header | header
Header with rows
| header | header | header
|--------|--------|-------
| row | row | row
```
### Placeholders
Placeholders can be used to insert special elements in a specific place.
Placeholders are always case insensitive.
```md
Insert the table of contents
[[TOC]]
Insert the current date
[[date]]
Insert the current time
[[time]]
```
### Metadata
Additional metadata can be provided for some elements.
```md
String value
[key = value]
String value
[key = "String value"]
Integer value
[key = 123]
Float value
[key = 1.23]
Boolean
[key]
Boolean
[key = false]
Placeholder
[key = [[placeholder]]]
```
Metadata can also be defined in a separate toml file with simple key-value pairs.
Example:
```toml
# bibliography.bib.toml
author = "Snek"
published = "2020"
test-key = ["test value", "test value 2"]
# those files won't get imported
ignored-imports = ["style.css"]
# stylesheets that should be included
included-stylesheets = ["style2.css"]
# other metadata files that should be included
included-configs = []
# bibliography that should be included
included-bibliography = ["mybib.toml"]
# glossary that sould be included
included-glossary = ["myglossary.toml"]
Use `snekdown help` and `snekdown <subcommand> --help` for more information. # if external sources (images, stylesheets, MathJax)
# should be embedded into the document (default: true)
embed-external = true
### Rendering # If SmartArrows should be used (default: true)
smart-arrows = true
`snekdown render <input> <output>` # Includes a MathJax script tag in the document to render MathML in chromium.
# (default: true)
include-math-jax = true
### Watching
`snekdown watch <input> <output>` ### PDF Options - needs the pdf feature enabled ###
# If the header and footer of the pdf should be displayed (default: true)
pdf-display-header-footer = true
# PDF header template of each page (default: empty)
pdf-header-template = "<div><span class='title'></span></div>"
## Editors # PDF footer template of each page (default: see chromium_pdf assets)
pdf-footer-template = "<div><span class='pageNumber'></span></div>"
I've created a [VisualStudio Code extension](https://marketplace.visualstudio.com/items?itemName=trivernis.snekdown) for Snekdown. # Top margin of the pdf. Should be between 0 and 1. (default: 1.0)
This extension provides a preview of snekdown files, exports and other commands similar to the pdf-margin-top = 1
cli. The source code can be found [here](https://github.com/Trivernis/snekdown-vscode-extension).
# Bottom margin of the pdf. Should be between 0 and 1. (default: 1.0)
pdf-margin-bottom = 1
# Left margin of the pdf. Should be between 0 and 1.
pdf-margin-left = 0
# Right margin of the pdf. Should be between 0 and 1.
pdf-margin-right = 0
# Page height of the pdf
pdf-page-height = 100
# Page width of the pdf
pdf-page-width = 80
# The scale at which the website is rendered into pdf.
pdf-page-scale = 1.0
```
The `[Section]` keys are not relevant as the structure gets flattened before the values are read.
#### Usage
```
Hide a section (including subsections) in the TOC
#[toc-hidden] Section
Set the size of an image
!(url)[width = 42% height=auto]
Set the source of a quote
[author=Me date=[[date]] display="{{author}} - {{date}}"]> It's me
Set options for placeholders
[[toc]][ordered]
```
### Centered Text
```
|| These two lines
|| are centered
```
### Inline
```md
*Italic*
**Bold**
~~Striked~~
_Underlined_
^Superscript^
`Monospace`
:Emoji:
§[#0C0]Colored text§[] §[red] red §[]
```
## Bibliography
Bibliography entries can be defined and referenced anywhere in the document.
Definition:
```md
[SD_BOOK]:[type=book, author=Snek, title = "Snekdown Book" date="20.08.2020", publisher=Snek]
[SD_GITHUB]: https://github.com/trivernis/snekdown
```
Usage:
```
There is a book about snekdown[^book] and a github repo[^github].
```
Entries can also be defined in a separate toml file with the following data layout:
```toml
# snekdown.toml
[BIB_KEY]
key = "value"
[SD_BOOK]
type = "book"
author = "Snek"
title = "Snekdown Book"
date = "20.08.2020"
publisher = "Snek"
[SD_GITHUB]
type = "website"
url = "https://github.com/trivernis/snekdown"
```
The valid types for entries and required fields can be found on in the [bibliographix README](https://github.com/Trivernis/bibliographix#bibliography-types-and-fields).
Bibliography entries are not rendered. To render a list of used bibliography insert the
`bib` placeholder at the place you want it to be rendered.
## Glossary
Glossary entries are to be defined in a `glossary.toml` file or any other toml file
that is imported as type `glossary`.
The definition of glossary entries has to follow the following structure
```toml
[SHORT]
long = "Long Form"
description = "The description of the entry"
# Example
[HTML]
long = "Hypertext Markup Language"
description = "The markup language of the web"
```
Those glossary entries can be referenced in the snekdown file as follows:
```md
~HTML is widely used for websites.
The format ~HTML is not considered a programming language by some definitions.
~~HTML
```
The first occurence of the glossary entry (`~HTML`) always uses the long form.
The second will always be the short form. The long form can be enforced by using two
(`~~HTML`) tildes.
## Math
Snekdown allows the embedding of [AsciiMath](http://asciimath.org/):
The AsciiMath parser is provided in the [asciimath-rs](https://github.com/Trivernis/asciimath-rs) crate
```
inline math $$ a^2 + b^2 = c^2 $$
Block Math
$$$
A = [[1, 2],[3,4]]
$$$
```
The expression get's converted into MathML which is then converted by MathJax when loaded in
the browser.
## Smart Arrows
Snekdown automatically renders the sequences `-->`, `==>`, `<--`, `<==`, `<-->`, `<==>` as
their respective unicode arrows (similar to [markdown-it-smartarrows](https://github.com/adam-p/markdown-it-smartarrows)).
This behavior can be turned off by setting the config parameter `smart-arrows` to `false`
(the config needs to be imported before the arrows are used for that to work).
## Roadmap ## Roadmap
The end goal is to have a markup language with features similar to LaTeX. The end goal is to have a markup language with features similar to LaTeX.
### Short Term
- [x] Checkboxes - [x] Checkboxes
- [x] Emojis (\:emoji:) - [x] Emojis (\:emoji:)
- [x] Colors - [x] Colors
@ -107,19 +374,9 @@ The end goal is to have a markup language with features similar to LaTeX.
- [x] Chromium based pdf rendering - [x] Chromium based pdf rendering
- [x] Custom Stylesheets - [x] Custom Stylesheets
- [x] Smart arrows - [x] Smart arrows
- [ ] Custom Elements via templates (50%)
- [ ] Cross References - [ ] Cross References
- [ ] Figures - [ ] Figures
- [ ] EPUB Rendering - [ ] EPUB Rendering
- [ ] Text sizes - [ ] Text sizes
- [ ] Title pages - [ ] Title pages
### Long Term
- Rewrite of the whole parsing process
- Custom Elements via templates
## License
This project is licensed under GPL 3.0. See LICENSE for more information.

@ -1,28 +1,19 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub mod tokens; pub mod tokens;
use crate::format::PlaceholderTemplate; use crate::format::PlaceholderTemplate;
use crate::references::configuration::{ConfigRefEntry, Configuration, Value};
use crate::references::glossary::{GlossaryManager, GlossaryReference}; use crate::references::glossary::{GlossaryManager, GlossaryReference};
use crate::references::placeholders::ProcessPlaceholders; use crate::references::placeholders::ProcessPlaceholders;
use crate::references::templates::{Template, TemplateVariable}; use crate::references::templates::{Template, TemplateVariable};
use crate::settings::Settings;
use crate::utils::downloads::{DownloadManager, PendingDownload}; use crate::utils::downloads::{DownloadManager, PendingDownload};
use crate::utils::image_converting::{ImageConverter, PendingImage};
use asciimath_rs::elements::special::Expression; use asciimath_rs::elements::special::Expression;
use bibliographix::bib_manager::BibManager; use bibliographix::bib_manager::BibManager;
use bibliographix::bibliography::bibliography_entry::BibliographyEntryReference; use bibliographix::bibliography::bibliography_entry::BibliographyEntryReference;
use bibliographix::references::bib_reference::BibRefAnchor; use bibliographix::references::bib_reference::BibRefAnchor;
use image::ImageFormat;
use mime::Mime;
use parking_lot::Mutex;
use std::collections::HashMap; use std::collections::HashMap;
use std::iter::FromIterator;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, RwLock}; use std::sync::{Arc, Mutex, RwLock};
pub const SECTION: &str = "section"; pub const SECTION: &str = "section";
pub const PARAGRAPH: &str = "paragraph"; pub const PARAGRAPH: &str = "paragraph";
@ -79,10 +70,9 @@ pub struct Document {
pub(crate) is_root: bool, pub(crate) is_root: bool,
pub(crate) path: Option<String>, pub(crate) path: Option<String>,
pub(crate) placeholders: Vec<Arc<RwLock<Placeholder>>>, pub(crate) placeholders: Vec<Arc<RwLock<Placeholder>>>,
pub config: Arc<Mutex<Settings>>, pub config: Configuration,
pub bibliography: BibManager, pub bibliography: BibManager,
pub downloads: Arc<Mutex<DownloadManager>>, pub downloads: Arc<Mutex<DownloadManager>>,
pub images: Arc<Mutex<ImageConverter>>,
pub stylesheets: Vec<Arc<Mutex<PendingDownload>>>, pub stylesheets: Vec<Arc<Mutex<PendingDownload>>>,
pub glossary: Arc<Mutex<GlossaryManager>>, pub glossary: Arc<Mutex<GlossaryManager>>,
} }
@ -194,7 +184,6 @@ pub enum Inline {
CharacterCode(CharacterCode), CharacterCode(CharacterCode),
LineBreak, LineBreak,
Arrow(Arrow), Arrow(Arrow),
Anchor(Anchor),
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -247,7 +236,7 @@ pub struct Url {
pub struct Image { pub struct Image {
pub(crate) url: Url, pub(crate) url: Url,
pub(crate) metadata: Option<InlineMetadata>, pub(crate) metadata: Option<InlineMetadata>,
pub(crate) image_data: Arc<Mutex<PendingImage>>, pub(crate) download: Arc<Mutex<PendingDownload>>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -259,7 +248,7 @@ pub struct Placeholder {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct RefLink { pub struct RefLink {
pub(crate) description: TextLine, pub(crate) description: Box<Line>,
pub(crate) reference: String, pub(crate) reference: String,
} }
@ -321,11 +310,10 @@ impl Document {
is_root: true, is_root: true,
path: None, path: None,
placeholders: Vec::new(), placeholders: Vec::new(),
config: Arc::new(Mutex::new(Settings::default())), config: Configuration::default(),
bibliography: BibManager::new(), bibliography: BibManager::new(),
stylesheets: Vec::new(), stylesheets: Vec::new(),
downloads: Arc::new(Mutex::new(DownloadManager::new())), downloads: Arc::new(Mutex::new(DownloadManager::new())),
images: Arc::new(Mutex::new(ImageConverter::new())),
glossary: Arc::new(Mutex::new(GlossaryManager::new())), glossary: Arc::new(Mutex::new(GlossaryManager::new())),
} }
} }
@ -341,7 +329,6 @@ impl Document {
bibliography: self.bibliography.create_child(), bibliography: self.bibliography.create_child(),
stylesheets: Vec::new(), stylesheets: Vec::new(),
downloads: Arc::clone(&self.downloads), downloads: Arc::clone(&self.downloads),
images: Arc::clone(&self.images),
glossary: Arc::clone(&self.glossary), glossary: Arc::clone(&self.glossary),
} }
} }
@ -359,7 +346,7 @@ impl Document {
list.ordered = ordered; list.ordered = ordered;
self.elements.iter().for_each(|e| match e { self.elements.iter().for_each(|e| match e {
Block::Section(sec) => { Block::Section(sec) => {
if !sec.is_hidden_in_toc() { if !sec.get_hide_in_toc() {
let mut item = let mut item =
ListItem::new(Line::RefLink(sec.header.get_anchor()), 1, ordered); ListItem::new(Line::RefLink(sec.header.get_anchor()), 1, ordered);
item.children.append(&mut sec.get_toc_list(ordered).items); item.children.append(&mut sec.get_toc_list(ordered).items);
@ -440,41 +427,9 @@ impl Document {
if self.is_root { if self.is_root {
self.process_definitions(); self.process_definitions();
self.bibliography.assign_entries_to_references(); self.bibliography.assign_entries_to_references();
self.glossary.lock().assign_entries_to_references(); self.glossary.lock().unwrap().assign_entries_to_references();
self.process_placeholders(); self.process_placeholders();
self.process_media();
}
}
fn process_media(&self) {
let downloads = Arc::clone(&self.downloads);
if self.config.lock().features.embed_external {
downloads.lock().download_all();
}
if let Some(s) = &self.config.lock().images.format {
if let Some(format) = ImageFormat::from_extension(s) {
self.images.lock().set_target_format(format);
}
}
let mut image_width = 0;
let mut image_height = 0;
if let Some(i) = self.config.lock().images.max_width {
image_width = i;
image_height = i;
}
if let Some(i) = self.config.lock().images.max_height {
image_height = i;
if image_width <= 0 {
image_width = i;
}
}
if image_width > 0 && image_height > 0 {
self.images
.lock()
.set_target_size((image_width as u32, image_height as u32));
} }
self.images.lock().convert_all();
} }
} }
@ -493,10 +448,9 @@ impl Section {
pub fn get_toc_list(&self, ordered: bool) -> List { pub fn get_toc_list(&self, ordered: bool) -> List {
let mut list = List::new(); let mut list = List::new();
self.elements.iter().for_each(|e| { self.elements.iter().for_each(|e| {
if let Block::Section(sec) = e { if let Block::Section(sec) = e {
if !sec.is_hidden_in_toc() { if !sec.get_hide_in_toc() {
let mut item = let mut item =
ListItem::new(Line::RefLink(sec.header.get_anchor()), 1, ordered); ListItem::new(Line::RefLink(sec.header.get_anchor()), 1, ordered);
item.children.append(&mut sec.get_toc_list(ordered).items); item.children.append(&mut sec.get_toc_list(ordered).items);
@ -508,7 +462,7 @@ impl Section {
list list
} }
pub(crate) fn is_hidden_in_toc(&self) -> bool { pub(crate) fn get_hide_in_toc(&self) -> bool {
if let Some(meta) = &self.metadata { if let Some(meta) = &self.metadata {
meta.get_bool("toc-hidden") meta.get_bool("toc-hidden")
} else { } else {
@ -564,7 +518,7 @@ impl Header {
pub fn get_anchor(&self) -> RefLink { pub fn get_anchor(&self) -> RefLink {
RefLink { RefLink {
description: self.line.as_raw_text().as_plain_line(), description: Box::new(self.line.clone()),
reference: self.anchor.clone(), reference: self.anchor.clone(),
} }
} }
@ -620,16 +574,6 @@ impl TextLine {
pub fn add_subtext(&mut self, subtext: Inline) { pub fn add_subtext(&mut self, subtext: Inline) {
self.subtext.push(subtext) self.subtext.push(subtext)
} }
pub fn as_plain_line(&self) -> TextLine {
TextLine {
subtext: self
.subtext
.iter()
.map(|s| Inline::Plain(s.as_plain_text()))
.collect(),
}
}
} }
impl Table { impl Table {
@ -672,15 +616,6 @@ impl Quote {
pub fn add_text(&mut self, text: TextLine) { pub fn add_text(&mut self, text: TextLine) {
self.text.push(text) self.text.push(text)
} }
/// Strips a single linebreak from the end of the quote
pub fn strip_linebreak(&mut self) {
if let Some(last) = self.text.last_mut() {
if let Some(Inline::LineBreak) = last.subtext.last() {
last.subtext.pop();
}
}
}
} }
impl ImportAnchor { impl ImportAnchor {
@ -716,8 +651,6 @@ impl Placeholder {
pub trait Metadata { pub trait Metadata {
fn get_bool(&self, key: &str) -> bool; fn get_bool(&self, key: &str) -> bool;
fn get_string(&self, key: &str) -> Option<String>; fn get_string(&self, key: &str) -> Option<String>;
fn get_float(&self, key: &str) -> Option<f64>;
fn get_integer(&self, key: &str) -> Option<i64>;
fn get_string_map(&self) -> HashMap<String, String>; fn get_string_map(&self) -> HashMap<String, String>;
} }
@ -738,24 +671,6 @@ impl Metadata for InlineMetadata {
} }
} }
fn get_float(&self, key: &str) -> Option<f64> {
if let Some(MetadataValue::Float(f)) = self.data.get(key) {
Some(*f)
} else if let Some(MetadataValue::Integer(i)) = self.data.get(key) {
Some(*i as f64)
} else {
None
}
}
fn get_integer(&self, key: &str) -> Option<i64> {
if let Some(MetadataValue::Integer(i)) = self.data.get(key) {
Some(*i)
} else {
None
}
}
fn get_string_map(&self) -> HashMap<String, String> { fn get_string_map(&self) -> HashMap<String, String> {
let mut string_map = HashMap::new(); let mut string_map = HashMap::new();
for (k, v) in &self.data { for (k, v) in &self.data {
@ -772,17 +687,26 @@ impl Metadata for InlineMetadata {
} }
} }
impl Into<HashMap<String, Value>> for InlineMetadata {
fn into(self) -> HashMap<String, Value> {
HashMap::from_iter(self.data.iter().filter_map(|(k, v)| match v {
MetadataValue::String(s) => Some((k.clone(), Value::String(s.clone()))),
MetadataValue::Bool(b) => Some((k.clone(), Value::Bool(*b))),
MetadataValue::Integer(i) => Some((k.clone(), Value::Integer(*i))),
MetadataValue::Float(f) => Some((k.clone(), Value::Float(*f))),
MetadataValue::Template(t) => Some((k.clone(), Value::Template(t.clone()))),
_ => None,
}))
}
}
impl Image { impl Image {
pub fn get_content(&self) -> Option<Vec<u8>> { pub fn get_content(&self) -> Option<Vec<u8>> {
let mut data = None; let mut data = None;
std::mem::swap(&mut data, &mut self.image_data.lock().data); std::mem::swap(&mut data, &mut self.download.lock().unwrap().data);
data data
} }
pub fn get_mime_type(&self) -> Mime {
self.image_data.lock().mime.clone()
}
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -795,11 +719,15 @@ pub struct BibEntry {
pub struct BibReference { pub struct BibReference {
pub(crate) key: String, pub(crate) key: String,
pub(crate) entry_anchor: Arc<Mutex<BibRefAnchor>>, pub(crate) entry_anchor: Arc<Mutex<BibRefAnchor>>,
pub(crate) display: Option<String>, pub(crate) display: Option<ConfigRefEntry>,
} }
impl BibReference { impl BibReference {
pub fn new(key: String, display: Option<String>, anchor: Arc<Mutex<BibRefAnchor>>) -> Self { pub fn new(
key: String,
display: Option<ConfigRefEntry>,
anchor: Arc<Mutex<BibRefAnchor>>,
) -> Self {
Self { Self {
key: key.to_string(), key: key.to_string(),
display, display,
@ -808,11 +736,12 @@ impl BibReference {
} }
pub(crate) fn get_formatted(&self) -> String { pub(crate) fn get_formatted(&self) -> String {
if let Some(entry) = &self.entry_anchor.lock().entry { if let Some(entry) = &self.entry_anchor.lock().unwrap().entry {
let entry = entry.lock(); let entry = entry.lock().unwrap();
if let Some(display) = &self.display { if let Some(display) = &self.display {
let mut template = PlaceholderTemplate::new(display.clone()); let display = display.read().unwrap();
let mut template = PlaceholderTemplate::new(display.get().as_string());
let mut value_map = HashMap::new(); let mut value_map = HashMap::new();
value_map.insert("key".to_string(), entry.key()); value_map.insert("key".to_string(), entry.key());
@ -841,71 +770,3 @@ impl MetadataValue {
} }
} }
} }
impl Line {
pub fn as_raw_text(&self) -> TextLine {
match self {
Line::Text(t) => t.clone(),
Line::Ruler(_) => TextLine::new(),
Line::RefLink(r) => r.description.clone(),
Line::Anchor(a) => a.inner.as_raw_text().as_plain_line(),
Line::Centered(c) => c.line.clone(),
Line::BibEntry(_) => TextLine::new(),
}
}
}
impl Inline {
pub fn as_plain_text(&self) -> PlainText {
match self {
Inline::Plain(p) => p.clone(),
Inline::Bold(b) => b.value.iter().fold(
PlainText {
value: String::new(),
},
|a, b| PlainText {
value: format!("{} {}", a.value, b.as_plain_text().value),
},
),
Inline::Italic(i) => i.value.iter().fold(
PlainText {
value: String::new(),
},
|a, b| PlainText {
value: format!("{} {}", a.value, b.as_plain_text().value),
},
),
Inline::Underlined(u) => u.value.iter().fold(
PlainText {
value: String::new(),
},
|a, b| PlainText {
value: format!("{} {}", a.value, b.as_plain_text().value),
},
),
Inline::Striked(s) => s.value.iter().fold(
PlainText {
value: String::new(),
},
|a, b| PlainText {
value: format!("{} {}", a.value, b.as_plain_text().value),
},
),
Inline::Monospace(m) => PlainText {
value: m.value.clone(),
},
Inline::Superscript(s) => s.value.iter().fold(
PlainText {
value: String::new(),
},
|a, b| PlainText {
value: format!("{} {}", a.value, b.as_plain_text().value),
},
),
Inline::Colored(c) => c.value.as_plain_text(),
_ => PlainText {
value: String::new(),
},
}
}
}

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
#![allow(unused)] #![allow(unused)]
pub(crate) const BACKSLASH: char = '\\'; pub(crate) const BACKSLASH: char = '\\';
@ -39,7 +33,6 @@ pub(crate) const L_BRACE: char = '}';
pub(crate) const PERCENT: char = '%'; pub(crate) const PERCENT: char = '%';
pub(crate) const COMMA: char = ','; pub(crate) const COMMA: char = ',';
pub(crate) const MATH: char = '$'; pub(crate) const MATH: char = '$';
pub(crate) const DOLLAR: char = '$';
pub(crate) const AMPERSAND: char = '&'; pub(crate) const AMPERSAND: char = '&';
pub(crate) const QUESTION_MARK: char = '?'; pub(crate) const QUESTION_MARK: char = '?';
@ -89,18 +82,6 @@ pub(crate) const CHARACTER_STOP: char = SEMICOLON;
pub(crate) const GLOSSARY_REF_START: char = TILDE; pub(crate) const GLOSSARY_REF_START: char = TILDE;
// Reference Anchors
pub(crate) const ANCHOR_START: &'static [char] = &[R_BRACKET, QUESTION_MARK];
pub(crate) const ANCHOR_STOP: char = L_BRACKET;
// References
pub(crate) const REF_START: &'static [char] = &[R_BRACKET, DOLLAR];
pub(crate) const REF_STOP: char = L_BRACKET;
pub(crate) const REF_DESC_START: char = R_PARENTH;
pub(crate) const REF_DESC_STOP: char = L_PARENTH;
// Arrows // Arrows
pub(crate) const A_RIGHT_ARROW: &'static [char] = &['-', '-', '>']; pub(crate) const A_RIGHT_ARROW: &'static [char] = &['-', '-', '>'];
@ -149,8 +130,6 @@ pub(crate) const INLINE_SPECIAL_SEQUENCES: &'static [&'static [char]] = &[
A_RIGHT_ARROW, A_RIGHT_ARROW,
A_LEFT_ARROW, A_LEFT_ARROW,
A_LEFT_RIGHT_ARROW, A_LEFT_RIGHT_ARROW,
ANCHOR_START,
REF_START,
]; ];
pub(crate) const LIST_SPECIAL_CHARS: [char; 14] = [ pub(crate) const LIST_SPECIAL_CHARS: [char; 14] = [

@ -1,167 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
body {
background-color: $body-background;
overflow-x: hidden;
color: $primary-color;
word-break: break-word;
}
.content {
font-family: "Fira Sans", "Noto Sans", SansSerif, sans-serif;
width: 100vh;
max-width: calc(100% - 4rem);
padding: 2rem;
margin: auto;
background-color: $background-color;
}
h1 {
font-size: 2.2rem;
}
h2 {
font-size: 1.8rem;
}
h3 {
font-size: 1.4rem;
}
h4 {
font-size: 1rem;
}
h5 {
font-size: 0.8rem;
}
h6 {
font-size: 0.4rem;
}
img {
max-width: 100%;
max-height: 100vh;
height: auto;
}
code {
color: $primary-color;
pre {
font-family: "Fira Code", "Mono", monospace;
padding: 0.8em 0.2em;
background-color: $code-background !important;
border-radius: 0.25em;
overflow: auto;
}
&.inlineCode {
font-family: "Fira Code", monospace;
border-radius: 0.1em;
background-color: $code-background;
padding: 0 0.1em;
}
}
.tableWrapper {
overflow-x: auto;
width: 100%;
& > table {
margin: auto;
}
}
table {
border-collapse: collapse;
tr {
&:nth-child(odd) {
background-color: $table-background-alt;
}
&:nth-child(1) {
background-color: $table-background-alt;
font-weight: bold;
border-bottom: 1px solid invert($background-color)
}
}
}
table td, table th {
border-left: 1px solid invert($background-color);
padding: 0.2em 0.5em;
}
table tr td:first-child, table tr th:first-child {
border-left: none;
}
blockquote {
margin-left: 0;
padding-top: 0.2em;
padding-bottom: 0.2em;
background-color: rgba(0, 0, 0, 0);
}
a {
color: $secondary-color;
}
.quote {
border-left: 0.3em solid $quote-background-alt;
border-radius: 0.2em;
padding-left: 1em;
margin-left: 0;
background-color: $quote-background;
.metadata {
font-style: italic;
padding-left: 0.5em;
color: $primary-variant-1;
}
}
.figure {
width: 100%;
display: block;
text-align: center;
.imageDescription {
display: block;
color: $primary-variant-1;
font-style: italic;
}
}
.centered {
text-align: center;
}
.glossaryReference {
text-decoration: none;
color: inherit;
border-bottom: 1px dotted $primary-color;
}
.arrow {
font-family: "Fira Code", "Mono", monospace;
}
@media print {
.content > section > section, .content > section > section {
page-break-inside: avoid;
}
body {
background-color: $background-color !important;
}
}

@ -1,20 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: #1e1d2c;
$background-color-variant-1: lighten($background-color, 7%);
$background-color-variant-2: lighten($background-color, 14%);
$background-color-variant-3: lighten($background-color, 21%);
$primary-color: #EEE;
$primary-variant-1: darken($primary-color, 14%);
$secondary-color: #3aa7df;
$body-background: darken($background-color, 5%);
$code-background: lighten($background-color, 5%);
$table-background-alt: $background-color-variant-2;
$quote-background: lighten($background-color-variant-1, 3%);
$quote-background-alt: $background-color-variant-3;

@ -1,20 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: darken(#2b303b, 8%);
$background-color-variant-1: lighten($background-color, 7%);
$background-color-variant-2: lighten($background-color, 14%);
$background-color-variant-3: lighten($background-color, 21%);
$primary-color: #EEE;
$primary-variant-1: darken($primary-color, 14%);
$secondary-color: #3aa7df;
$body-background: darken($background-color, 5%);
$code-background: $background-color-variant-1;
$table-background-alt: $background-color-variant-2;
$quote-background: lighten($background-color-variant-1, 3%);
$quote-background-alt: $background-color-variant-3;

@ -1,19 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: darken(#002b36, 5%);
$background-color-variant-1: lighten($background-color, 7%);
$background-color-variant-2: lighten($background-color, 14%);
$background-color-variant-3: lighten($background-color, 21%);
$primary-color: #EEE;
$primary-variant-1: darken($primary-color, 14%);
$secondary-color: #0096c9;
$body-background: darken($background-color, 3%);
$code-background: $background-color-variant-1;
$table-background-alt: lighten($background-color, 10%);
$quote-background: lighten($background-color-variant-1, 3%);
$quote-background-alt: $background-color-variant-3;

@ -1,19 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: #FFF;
$background-color-variant-1: darken($background-color, 7%);
$background-color-variant-2: darken($background-color, 14%);
$background-color-variant-3: darken($background-color, 21%);
$primary-color: #000;
$primary-variant-1: lighten($primary-color, 14%);
$secondary-color: #00286a;
$body-background: $background-color-variant-1;
$code-background: $background-color-variant-1;
$table-background-alt: $background-color-variant-2;
$quote-background: $background-color-variant-2;
$quote-background-alt: $background-color-variant-3;

@ -1,19 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: #FFF;
$background-color-variant-1: darken($background-color, 7%);
$background-color-variant-2: darken($background-color, 14%);
$background-color-variant-3: darken($background-color, 21%);
$primary-color: #112;
$primary-variant-1: lighten($primary-color, 14%);
$secondary-color: #00348e;
$body-background: $background-color-variant-1;
$code-background: $background-color-variant-1;
$table-background-alt: $background-color-variant-2;
$quote-background: $background-color-variant-2;
$quote-background-alt: $background-color-variant-3;

@ -1,19 +0,0 @@
/*!
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
$background-color: #fff8f0;
$background-color-variant-1: darken($background-color, 4%);
$background-color-variant-2: darken($background-color, 8%);
$background-color-variant-3: darken($background-color, 12%);
$primary-color: #112;
$primary-variant-1: lighten($primary-color, 14%);
$secondary-color: #2b61be;
$body-background: $background-color-variant-1;
$code-background: $background-color-variant-1;
$table-background-alt: $background-color-variant-2;
$quote-background: $background-color-variant-2;
$quote-background-alt: $background-color-variant-3;

@ -1,9 +1,3 @@
<!--
~ Snekdown - Custom Markdown flavour and parser
~ Copyright (C) 2021 Trivernis
~ See LICENSE for more information.
-->
<div style="font-size: 10px; text-align: center; width: 100%;"> <div style="font-size: 10px; text-align: center; width: 100%;">
<span class="pageNumber"></span>/<span class="totalPages"></span> <span class="pageNumber"></span>/<span class="totalPages"></span>
</div> </div>

@ -1,24 +1,20 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::Document; use crate::elements::Document;
use crate::format::chromium_pdf::result::{PdfRenderingError, PdfRenderingResult}; use crate::format::chromium_pdf::result::{PdfRenderingError, PdfRenderingResult};
use crate::format::html::html_writer::HTMLWriter; use crate::format::html::html_writer::HTMLWriter;
use crate::format::html::to_html::ToHtml; use crate::format::html::to_html::ToHtml;
use crate::settings::Settings; use crate::references::configuration::keys::{
use crate::utils::caching::CacheStorage; INCLUDE_MATHJAX, PDF_DISPLAY_HEADER_FOOTER, PDF_FOOTER_TEMPLATE, PDF_HEADER_TEMPLATE,
use bibliographix::Mutex; PDF_MARGIN_BOTTOM, PDF_MARGIN_LEFT, PDF_MARGIN_RIGHT, PDF_MARGIN_TOP, PDF_PAGE_HEIGHT,
PDF_PAGE_SCALE, PDF_PAGE_WIDTH,
};
use crate::references::configuration::Configuration;
use crate::utils::downloads::get_cached_path;
use headless_chrome::protocol::page::PrintToPdfOptions; use headless_chrome::protocol::page::PrintToPdfOptions;
use headless_chrome::{Browser, Tab}; use headless_chrome::{Browser, LaunchOptionsBuilder, Tab};
use std::env;
use std::fs; use std::fs;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::BufWriter; use std::io::BufWriter;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc;
use std::thread; use std::thread;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@ -26,17 +22,16 @@ pub mod result;
/// Renders the document to pdf and returns the resulting bytes /// Renders the document to pdf and returns the resulting bytes
pub fn render_to_pdf(document: Document) -> PdfRenderingResult<Vec<u8>> { pub fn render_to_pdf(document: Document) -> PdfRenderingResult<Vec<u8>> {
let cache = CacheStorage::new(); let mut file_path = PathBuf::from(format!("tmp-document.html"));
let mut file_path = PathBuf::from("tmp-document.html"); file_path = get_cached_path(file_path).with_extension("html");
file_path = cache.get_file_path(&file_path); let mut mathjax = false;
if !file_path.parent().map(|p| p.exists()).unwrap_or(false) { if let Some(entry) = document.config.get_entry(INCLUDE_MATHJAX) {
file_path = env::current_dir()?; if entry.get().as_bool() == Some(true) {
file_path.push(PathBuf::from(".tmp-document.html")) mathjax = true;
}
} }
let config = document.config.clone(); let config = document.config.clone();
let mathjax = config.lock().features.include_mathjax;
let handle = thread::spawn({ let handle = thread::spawn({
let file_path = file_path.clone(); let file_path = file_path.clone();
@ -49,15 +44,14 @@ pub fn render_to_pdf(document: Document) -> PdfRenderingResult<Vec<u8>> {
.truncate(true) .truncate(true)
.open(file_path)?, .open(file_path)?,
); );
let mut html_writer = let mut html_writer = HTMLWriter::new(Box::new(writer));
HTMLWriter::new(Box::new(writer), document.config.lock().style.theme.clone());
document.to_html(&mut html_writer)?; document.to_html(&mut html_writer)?;
log::info!("Successfully rendered temporary html file!"); log::info!("Successfully rendered temporary html file!");
html_writer.flush() html_writer.flush()
} }
}); });
let browser = Browser::default()?; let browser = Browser::new(LaunchOptionsBuilder::default().build().unwrap())?;
let tab = browser.wait_for_initial_tab()?; let tab = browser.wait_for_initial_tab()?;
handle.join().unwrap()?; handle.join().unwrap()?;
tab.navigate_to(format!("file:///{}", file_path.to_string_lossy()).as_str())?; tab.navigate_to(format!("file:///{}", file_path.to_string_lossy()).as_str())?;
@ -112,23 +106,49 @@ fn wait_for_mathjax(tab: &Tab, timeout: Duration) -> PdfRenderingResult<()> {
Ok(()) Ok(())
} }
fn get_pdf_options(config: Arc<Mutex<Settings>>) -> PrintToPdfOptions { fn get_pdf_options(config: Configuration) -> PrintToPdfOptions {
let config = config.lock().pdf.clone();
PrintToPdfOptions { PrintToPdfOptions {
landscape: None, landscape: None,
display_header_footer: Some(config.display_header_footer), display_header_footer: config
.get_entry(PDF_DISPLAY_HEADER_FOOTER)
.and_then(|value| value.get().as_bool()),
print_background: Some(true), print_background: Some(true),
scale: Some(config.page_scale), scale: config
paper_width: config.page_width, .get_entry(PDF_PAGE_SCALE)
paper_height: config.page_height, .and_then(|value| value.get().as_float())
margin_top: config.margin.top, .map(|value| value as f32),
margin_bottom: config.margin.bottom, paper_width: config
margin_left: config.margin.left, .get_entry(PDF_PAGE_WIDTH)
margin_right: config.margin.right, .and_then(|value| value.get().as_float())
.map(|value| value as f32),
paper_height: config
.get_entry(PDF_PAGE_HEIGHT)
.and_then(|value| value.get().as_float())
.map(|value| value as f32),
margin_top: config
.get_entry(PDF_MARGIN_TOP)
.and_then(|value| value.get().as_float())
.map(|f| f as f32),
margin_bottom: config
.get_entry(PDF_MARGIN_BOTTOM)
.and_then(|value| value.get().as_float())
.map(|f| f as f32),
margin_left: config
.get_entry(PDF_MARGIN_LEFT)
.and_then(|value| value.get().as_float())
.map(|f| f as f32),
margin_right: config
.get_entry(PDF_MARGIN_RIGHT)
.and_then(|value| value.get().as_float())
.map(|f| f as f32),
page_ranges: None, page_ranges: None,
ignore_invalid_page_ranges: None, ignore_invalid_page_ranges: None,
header_template: config.header_template, header_template: config
footer_template: config.footer_template, .get_entry(PDF_HEADER_TEMPLATE)
.map(|value| value.get().as_string()),
footer_template: config
.get_entry(PDF_FOOTER_TEMPLATE)
.map(|value| value.get().as_string()),
prefer_css_page_size: None, prefer_css_page_size: None,
} }
} }

@ -1,11 +1,5 @@
/* use serde::export::fmt::{self, Display};
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use std::error::Error; use std::error::Error;
use std::fmt::{self, Display};
use std::io; use std::io;
pub type PdfRenderingResult<T> = Result<T, PdfRenderingError>; pub type PdfRenderingResult<T> = Result<T, PdfRenderingError>;

@ -0,0 +1,149 @@
body {
background-color: #DDD;
overflow-x: hidden;
color: #000;
word-break: break-word;
}
.content {
font-family: "Fira Sans", "Noto Sans", SansSerif, sans-serif;
width: 100vh;
max-width: calc(100% - 4rem);
padding: 2rem;
margin: auto;
background-color: #FFF;
}
h1 {
font-size: 2.2rem;
}
h2 {
font-size: 1.8rem;
}
h3 {
font-size: 1.4rem;
}
h4 {
font-size: 1rem;
}
h5 {
font-size: 0.8rem;
}
h6 {
font-size: 0.4rem;
}
img {
max-width: 100%;
max-height: 100vh;
height: auto;
}
code {
color: #000;
}
code pre {
font-family: "Fira Code", "Mono", monospace;
padding: 0.8em 0.2em;
background-color: #EEE !important;
border-radius: 0.25em;
}
code.inlineCode {
font-family: "Fira Code", monospace;
border-radius: 0.1em;
background-color: #EEE;
padding: 0 0.1em
}
.tableWrapper {
overflow-x: auto;
width: 100%;
}
.tableWrapper > table {
margin: auto;
}
table {
border-collapse: collapse;
}
table tr:nth-child(odd) {
background-color: #DDD;
}
table tr:nth-child(1) {
background-color: white;
font-weight: bold;
border-bottom: 1px solid black;
}
table td, table th {
border-left: 1px solid black;
padding: 0.2em 0.5em
}
table tr td:first-child, table tr th:first-child {
border-left: none;
}
blockquote {
margin-left: 0;
background-color: rgba(0, 0, 0, 0);
}
.quote {
border-left: 0.3em solid gray;
border-radius: 0.2em;
padding-left: 1em;
margin-left: 0;
background-color: #EEE;
}
.quote .metadata {
font-style: italic;
padding-left: 0.5em;
color: #444
}
.figure {
width: 100%;
display: block;
text-align: center;
}
.figure .imageDescription {
display: block;
color: #444;
font-style: italic;
}
.centered {
text-align: center;
}
.glossaryReference {
text-decoration: none;
color: inherit;
border-bottom: 1px dotted #000;
}
.arrow {
font-family: "Fira Code", "Mono", monospace;
}
@media print {
.content > section > section, .content > section > section {
page-break-inside: avoid;
}
body {
background-color: white !important;
}
}

@ -1,22 +1,14 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::settings::style_settings::Theme;
use std::io; use std::io;
use std::io::Write; use std::io::Write;
pub struct HTMLWriter { pub struct HTMLWriter {
inner: Box<dyn Write>, inner: Box<dyn Write>,
theme: Theme,
} }
impl HTMLWriter { impl HTMLWriter {
/// Creates a new writer /// Creates a new writer
pub fn new(inner: Box<dyn Write>, theme: Theme) -> Self { pub fn new(inner: Box<dyn Write>) -> Self {
Self { inner, theme } Self { inner }
} }
/// Writes a raw string /// Writes a raw string
@ -38,9 +30,4 @@ impl HTMLWriter {
pub fn flush(&mut self) -> io::Result<()> { pub fn flush(&mut self) -> io::Result<()> {
self.inner.flush() self.inner.flush()
} }
/// Return the theme of the html writer
pub fn get_theme(&mut self) -> Theme {
self.theme.clone()
}
} }

@ -1,8 +1,2 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub mod html_writer; pub mod html_writer;
pub mod to_html; pub mod to_html;

@ -1,20 +1,18 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::*; use crate::elements::*;
use crate::format::html::html_writer::HTMLWriter; use crate::format::html::html_writer::HTMLWriter;
use crate::format::style::{get_code_theme_for_theme, get_css_for_theme};
use crate::format::PlaceholderTemplate; use crate::format::PlaceholderTemplate;
use crate::references::configuration::keys::{EMBED_EXTERNAL, INCLUDE_MATHJAX, META_LANG};
use crate::references::configuration::Value;
use crate::references::glossary::{GlossaryDisplay, GlossaryReference}; use crate::references::glossary::{GlossaryDisplay, GlossaryReference};
use crate::references::templates::{Template, TemplateVariable}; use crate::references::templates::{Template, TemplateVariable};
use asciimath_rs::format::mathml::ToMathML; use asciimath_rs::format::mathml::ToMathML;
use htmlescape::encode_attribute; use htmlescape::encode_attribute;
use minify::html::minify; use minify::html::minify;
use std::io; use std::io;
use std::sync::Arc;
use syntect::highlighting::ThemeSet;
use syntect::html::highlighted_html_for_string; use syntect::html::highlighted_html_for_string;
use syntect::parsing::SyntaxSet;
const MATHJAX_URL: &str = "https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"; const MATHJAX_URL: &str = "https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js";
@ -66,9 +64,8 @@ impl ToHtml for Inline {
Inline::Math(m) => m.to_html(writer), Inline::Math(m) => m.to_html(writer),
Inline::LineBreak => writer.write("<br>".to_string()), Inline::LineBreak => writer.write("<br>".to_string()),
Inline::CharacterCode(code) => code.to_html(writer), Inline::CharacterCode(code) => code.to_html(writer),
Inline::GlossaryReference(gloss) => gloss.lock().to_html(writer), Inline::GlossaryReference(gloss) => gloss.lock().unwrap().to_html(writer),
Inline::Arrow(a) => a.to_html(writer), Inline::Arrow(a) => a.to_html(writer),
Inline::Anchor(a) => a.to_html(writer),
} }
} }
} }
@ -105,6 +102,18 @@ impl ToHtml for MetadataValue {
impl ToHtml for Document { impl ToHtml for Document {
fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> { fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> {
let downloads = Arc::clone(&self.downloads);
if let Some(Value::Bool(embed)) = self
.config
.get_entry(EMBED_EXTERNAL)
.map(|e| e.get().clone())
{
if embed {
downloads.lock().unwrap().download_all();
}
} else {
downloads.lock().unwrap().download_all();
}
let path = if let Some(path) = &self.path { let path = if let Some(path) = &self.path {
format!("path=\"{}\"", encode_attribute(path.as_str())) format!("path=\"{}\"", encode_attribute(path.as_str()))
} else { } else {
@ -112,64 +121,38 @@ impl ToHtml for Document {
}; };
if self.is_root { if self.is_root {
let metadata = self.config.lock().metadata.clone(); let language = self
.config
let style = minify(get_css_for_theme(writer.get_theme()).as_str()); .get_entry(META_LANG)
.map(|e| e.get().as_string())
.unwrap_or("en".to_string());
let style = minify(std::include_str!("assets/style.css"));
writer.write("<!DOCTYPE html>".to_string())?; writer.write("<!DOCTYPE html>".to_string())?;
writer.write("<html lang=\"".to_string())?; writer.write("<html lang=\"".to_string())?;
writer.write_attribute(metadata.language)?; writer.write_attribute(language)?;
writer.write("\"><head>".to_string())?; writer.write("\"><head ".to_string())?;
writer.write(path)?;
writer.write("/>".to_string())?;
writer.write("<meta charset=\"UTF-8\">".to_string())?; writer.write("<meta charset=\"UTF-8\">".to_string())?;
if let Some(author) = metadata.author {
writer.write("<meta name=\"author\" content=\"".to_string())?;
writer.write_attribute(author)?;
writer.write("\">".to_string())?;
}
if let Some(title) = metadata.title {
writer.write("<title>".to_string())?;
writer.write_escaped(title.clone())?;
writer.write("</title>".to_string())?;
writer.write("<meta name=\"title\" content=\"".to_string())?;
writer.write_attribute(title)?;
writer.write("\">".to_string())?;
}
if let Some(description) = metadata.description {
writer.write("<meta name=\"description\" content=\"".to_string())?;
writer.write_attribute(description)?;
writer.write("\">".to_string())?;
}
if !metadata.keywords.is_empty() {
writer.write("<meta name=\"keywords\" content=\"".to_string())?;
writer.write_attribute(
metadata
.keywords
.iter()
.fold("".to_string(), |a, b| format!("{}, {}", a, b))
.trim_start_matches(", ")
.to_string(),
)?;
writer.write("\">".to_string())?;
}
writer.write("<style>".to_string())?; writer.write("<style>".to_string())?;
writer.write(style)?; writer.write(style)?;
writer.write("</style>".to_string())?; writer.write("</style>".to_string())?;
if self.config.lock().features.include_mathjax {
writer.write(format!(
"<script id=\"MathJax-script\" type=\"text/javascript\" async src={}></script>",
MATHJAX_URL
))?;
}
for stylesheet in &self.stylesheets { for stylesheet in &self.stylesheets {
let mut stylesheet = stylesheet.lock(); let mut stylesheet = stylesheet.lock().unwrap();
let data = std::mem::replace(&mut stylesheet.data, None); let data = std::mem::replace(&mut stylesheet.data, None);
if let Some(data) = data { if let Some(data) = data {
if self
.config
.get_entry(INCLUDE_MATHJAX)
.and_then(|e| e.get().as_bool())
.unwrap_or(true)
{
writer.write(format!(
"<script id=\"MathJax-script\" type=\"text/javascript\" async src={}></script>",
MATHJAX_URL
))?;
}
writer.write("<style>".to_string())?; writer.write("<style>".to_string())?;
writer.write(minify(String::from_utf8(data).unwrap().as_str()))?; writer.write(minify(String::from_utf8(data).unwrap().as_str()))?;
writer.write("</style>".to_string())?; writer.write("</style>".to_string())?;
@ -257,7 +240,7 @@ impl ToHtml for Paragraph {
} }
if self.elements.len() > 1 { if self.elements.len() > 1 {
for element in &self.elements[1..] { for element in &self.elements[1..] {
writer.write(" ".to_string())?; writer.write("<br/>".to_string())?;
element.to_html(writer)?; element.to_html(writer)?;
} }
} }
@ -352,19 +335,19 @@ impl ToHtml for Cell {
impl ToHtml for CodeBlock { impl ToHtml for CodeBlock {
fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> { fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> {
writer.write("<div><code".to_string())?; writer.write("<div><code".to_string())?;
if self.language.len() > 0 { if self.language.len() > 0 {
writer.write(" lang=\"".to_string())?; writer.write(" lang=\"".to_string())?;
writer.write_attribute(self.language.clone())?; writer.write_attribute(self.language.clone())?;
writer.write("\">".to_string())?; writer.write("\">".to_string())?;
let (theme, syntax_set) = get_code_theme_for_theme(writer.get_theme()); lazy_static::lazy_static! { static ref PS: SyntaxSet = SyntaxSet::load_defaults_nonewlines(); }
lazy_static::lazy_static! { static ref TS: ThemeSet = ThemeSet::load_defaults(); }
if let Some(syntax) = syntax_set.find_syntax_by_token(self.language.as_str()) { if let Some(syntax) = PS.find_syntax_by_token(self.language.as_str()) {
writer.write(highlighted_html_for_string( writer.write(highlighted_html_for_string(
self.code.as_str(), self.code.as_str(),
&syntax_set, &PS,
syntax, syntax,
&theme, &TS.themes["InspiredGitHub"],
))?; ))?;
} else { } else {
writer.write("<pre>".to_string())?; writer.write("<pre>".to_string())?;
@ -418,7 +401,7 @@ impl ToHtml for Image {
let mut style = String::new(); let mut style = String::new();
let url = if let Some(content) = self.get_content() { let url = if let Some(content) = self.get_content() {
let mime_type = self.get_mime_type(); let mime_type = mime_guess::from_path(&self.url.url).first_or(mime::IMAGE_PNG);
format!( format!(
"data:{};base64,{}", "data:{};base64,{}",
mime_type.to_string(), mime_type.to_string(),
@ -437,7 +420,7 @@ impl ToHtml for Image {
} }
if let Some(description) = self.url.description.clone() { if let Some(description) = self.url.description.clone() {
writer.write("<div class=\"figure\"><a href=\"".to_string())?; writer.write("<div class=\"figure\"><a href=\"".to_string())?;
writer.write_attribute(url.clone())?; writer.write_attribute(self.url.url.clone())?;
writer.write("\"><img src=\"".to_string())?; writer.write("\"><img src=\"".to_string())?;
writer.write(url)?; writer.write(url)?;
writer.write("\" style=\"".to_string())?; writer.write("\" style=\"".to_string())?;
@ -685,7 +668,7 @@ impl ToHtml for Anchor {
impl ToHtml for GlossaryReference { impl ToHtml for GlossaryReference {
fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> { fn to_html(&self, writer: &mut HTMLWriter) -> io::Result<()> {
if let Some(entry) = &self.entry { if let Some(entry) = &self.entry {
let entry = entry.lock(); let entry = entry.lock().unwrap();
writer.write("<a class=\"glossaryReference\" href=\"#".to_string())?; writer.write("<a class=\"glossaryReference\" href=\"#".to_string())?;
writer.write_attribute(self.short.clone())?; writer.write_attribute(self.short.clone())?;
writer.write("\">".to_string())?; writer.write("\">".to_string())?;

@ -1,16 +1,9 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use regex::Regex; use regex::Regex;
use std::collections::HashMap; use std::collections::HashMap;
#[cfg(feature = "pdf")] #[cfg(feature = "pdf")]
pub mod chromium_pdf; pub mod chromium_pdf;
pub mod html; pub mod html;
pub mod style;
pub struct PlaceholderTemplate { pub struct PlaceholderTemplate {
value: String, value: String,

@ -1,61 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::settings::style_settings::Theme;
use std::time::Instant;
use syntect::highlighting::ThemeSet;
use syntect::parsing::SyntaxSet;
/// Returns the css of a theme compiled from sass
pub fn get_css_for_theme(theme: Theme) -> String {
let start = Instant::now();
let vars = match theme {
Theme::GitHub => include_str!("assets/light-github.scss"),
Theme::SolarizedDark => include_str!("assets/dark-solarized.scss"),
Theme::SolarizedLight => include_str!("assets/light-solarized.scss"),
Theme::OceanDark => include_str!("assets/dark-ocean.scss"),
Theme::OceanLight => include_str!("assets/light-ocean.scss"),
Theme::MagicDark => include_str!("assets/dark-magic.scss"),
};
let style = format!("{}\n{}", vars, include_str!("assets/base.scss"));
let css = compile_sass(&*style);
log::debug!("Compiled style in {} ms", start.elapsed().as_millis());
css
}
/// Returns the syntax theme for a given theme
pub fn get_code_theme_for_theme(theme: Theme) -> (syntect::highlighting::Theme, SyntaxSet) {
lazy_static::lazy_static! { static ref PS: SyntaxSet = SyntaxSet::load_defaults_nonewlines(); }
lazy_static::lazy_static! { static ref TS: ThemeSet = ThemeSet::load_defaults(); }
let theme = match theme {
Theme::GitHub => "InspiredGitHub",
Theme::SolarizedDark => "Solarized (dark)",
Theme::SolarizedLight => "Solarized (light)",
Theme::OceanDark => "base16-ocean.dark",
Theme::OceanLight => "base16-ocean.light",
Theme::MagicDark => "base16-ocean.dark",
};
return (TS.themes[theme].clone(), PS.clone());
}
fn compile_sass(sass: &str) -> String {
String::from_utf8(
rsass::compile_scss(
sass.as_bytes(),
rsass::output::Format {
style: rsass::output::Style::Compressed,
precision: 5,
},
)
.unwrap(),
)
.unwrap()
}

@ -1,14 +1,7 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub mod elements; pub mod elements;
pub mod format; pub mod format;
pub mod parser; pub mod parser;
pub mod references; pub mod references;
pub mod settings;
pub mod utils; pub mod utils;
pub use parser::Parser; pub use parser::Parser;

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use colored::Colorize; use colored::Colorize;
use env_logger::Env; use env_logger::Env;
use log::{Level, LevelFilter}; use log::{Level, LevelFilter};
@ -12,68 +6,44 @@ use snekdown::elements::Document;
use snekdown::format::html::html_writer::HTMLWriter; use snekdown::format::html::html_writer::HTMLWriter;
use snekdown::format::html::to_html::ToHtml; use snekdown::format::html::to_html::ToHtml;
use snekdown::parser::ParserOptions; use snekdown::parser::ParserOptions;
use snekdown::settings::Settings;
use snekdown::utils::caching::CacheStorage;
use snekdown::Parser; use snekdown::Parser;
use std::fs::{File, OpenOptions}; use std::fs::{File, OpenOptions};
use std::io::{stdout, BufWriter, Write}; use std::io::{BufWriter, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::process::exit;
use std::sync::mpsc::channel; use std::sync::mpsc::channel;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use structopt::StructOpt; use structopt::StructOpt;
#[derive(StructOpt, Debug, Clone)] #[derive(StructOpt, Debug)]
struct Opt { struct Opt {
#[structopt(subcommand)]
sub_command: SubCommand,
}
#[derive(StructOpt, Debug, Clone)]
#[structopt()]
enum SubCommand {
/// Watch the document and its imports and render on change.
Watch(WatchOptions),
/// Parse and render the document.
Render(RenderOptions),
/// Initializes the project with default settings
Init,
/// Clears the cache directory
ClearCache,
}
#[derive(StructOpt, Debug, Clone)]
#[structopt()]
struct RenderOptions {
/// Path to the input file /// Path to the input file
#[structopt(parse(from_os_str))] #[structopt(parse(from_os_str))]
input: PathBuf, input: PathBuf,
/// Path for the output file /// Path for the output file
#[structopt(parse(from_os_str))] #[structopt(parse(from_os_str))]
output: Option<PathBuf>, output: PathBuf,
/// If the output should be written to stdout instead of the output file
#[structopt(long = "stdout")]
stdout: bool,
/// the output format /// the output format
#[structopt(short, long, default_value = "html")] #[structopt(short, long, default_value = "html")]
format: String, format: String,
/// Don't use the cache
#[structopt(long)]
no_cache: bool,
#[structopt(subcommand)]
sub_command: Option<SubCommand>,
} }
#[derive(StructOpt, Debug, Clone)] #[derive(StructOpt, Debug)]
#[structopt()] #[structopt()]
struct WatchOptions { enum SubCommand {
/// The amount of time in milliseconds to wait after changes before rendering /// Watch the document and its imports and render on change.
#[structopt(long, default_value = "500")] Watch,
debounce: u64,
#[structopt(flatten)] /// Default. Parse and render the document.
render_options: RenderOptions, Render,
} }
fn main() { fn main() {
@ -98,17 +68,19 @@ fn main() {
) )
}) })
.init(); .init();
if !opt.input.exists() {
log::error!(
"The input file {} could not be found",
opt.input.to_str().unwrap()
);
return;
}
match &opt.sub_command { match &opt.sub_command {
SubCommand::Render(opt) => { Some(SubCommand::Render) | None => {
let _ = render(&opt); let _ = render(&opt);
} }
SubCommand::Watch(opt) => watch(&opt), Some(SubCommand::Watch) => watch(&opt),
SubCommand::ClearCache => {
let cache = CacheStorage::new();
cache.clear().expect("Failed to clear cache");
}
SubCommand::Init => init(),
}; };
} }
@ -122,47 +94,17 @@ fn get_level_style(level: Level) -> colored::Color {
} }
} }
fn init() {
let settings = Settings::default();
let settings_string = toml::to_string_pretty(&settings).unwrap();
let manifest_path = PathBuf::from("Manifest.toml");
let bibliography_path = PathBuf::from("Bibliography.toml");
let glossary_path = PathBuf::from("Glossary.toml");
let css_path = PathBuf::from("style.css");
if !manifest_path.exists() {
let mut file = OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.open("Manifest.toml")
.unwrap();
file.write_all(settings_string.as_bytes()).unwrap();
file.flush().unwrap();
}
if !bibliography_path.exists() {
File::create("Bibliography.toml".to_string()).unwrap();
}
if !glossary_path.exists() {
File::create("Glossary.toml".to_string()).unwrap();
}
if !css_path.exists() {
File::create("style.css".to_string()).unwrap();
}
}
/// Watches a file with all of its imports and renders on change /// Watches a file with all of its imports and renders on change
fn watch(opt: &WatchOptions) { fn watch(opt: &Opt) {
let parser = render(&opt.render_options); let parser = render(opt);
let (tx, rx) = channel(); let (tx, rx) = channel();
let mut watcher = watcher(tx, Duration::from_millis(opt.debounce)).unwrap(); let mut watcher = watcher(tx, Duration::from_millis(250)).unwrap();
for path in parser.get_paths() { for path in parser.get_paths() {
watcher.watch(path, RecursiveMode::NonRecursive).unwrap(); watcher.watch(path, RecursiveMode::NonRecursive).unwrap();
} }
while let Ok(_) = rx.recv() { while let Ok(_) = rx.recv() {
println!("---"); println!("---");
let parser = render(&opt.render_options); let parser = render(opt);
for path in parser.get_paths() { for path in parser.get_paths() {
watcher.watch(path, RecursiveMode::NonRecursive).unwrap(); watcher.watch(path, RecursiveMode::NonRecursive).unwrap();
} }
@ -170,50 +112,37 @@ fn watch(opt: &WatchOptions) {
} }
/// Renders the document to the output path /// Renders the document to the output path
fn render(opt: &RenderOptions) -> Parser { fn render(opt: &Opt) -> Parser {
if !opt.input.exists() {
log::error!(
"The input file {} could not be found",
opt.input.to_str().unwrap()
);
exit(1)
}
let start = Instant::now(); let start = Instant::now();
let mut parser = Parser::with_defaults(ParserOptions::default().add_path(opt.input.clone())); let mut parser = Parser::with_defaults(
ParserOptions::default()
.add_path(opt.input.clone())
.use_cache(!opt.no_cache),
);
let document = parser.parse(); let document = parser.parse();
log::info!("Parsing + Processing took: {:?}", start.elapsed()); log::info!("Parsing took: {:?}", start.elapsed());
let start_render = Instant::now(); let start_render = Instant::now();
if let Some(output) = &opt.output { let file = OpenOptions::new()
let file = OpenOptions::new() .read(true)
.read(true) .write(true)
.write(true) .truncate(true)
.truncate(true) .create(true)
.create(true) .open(&opt.output)
.open(output) .unwrap();
.unwrap(); let writer = BufWriter::new(file);
render_format(opt, document, BufWriter::new(file));
} else {
if !opt.stdout {
log::error!("No output file specified");
exit(1)
}
render_format(opt, document, BufWriter::new(stdout()));
}
log::info!("Rendering took: {:?}", start_render.elapsed()); render_format(opt, document, writer);
log::info!("Total: {:?}", start.elapsed()); log::info!("Rendering took: {:?}", start_render.elapsed());
log::info!("Total: {:?}", start.elapsed());
parser parser
} }
#[cfg(not(feature = "pdf"))] #[cfg(not(feature = "pdf"))]
fn render_format<W: Write + 'static>(opt: &RenderOptions, document: Document, writer: W) { fn render_format(opt: &Opt, document: Document, writer: BufWriter<File>) {
match opt.format.as_str() { match opt.format.as_str() {
"html" => render_html(document, writer), "html" => render_html(document, writer),
_ => log::error!("Unknown format {}", opt.format), _ => log::error!("Unknown format {}", opt.format),
@ -221,7 +150,7 @@ fn render_format<W: Write + 'static>(opt: &RenderOptions, document: Document, wr
} }
#[cfg(feature = "pdf")] #[cfg(feature = "pdf")]
fn render_format<W: Write + 'static>(opt: &RenderOptions, document: Document, writer: W) { fn render_format(opt: &Opt, document: Document, writer: BufWriter<File>) {
match opt.format.as_str() { match opt.format.as_str() {
"html" => render_html(document, writer), "html" => render_html(document, writer),
"pdf" => render_pdf(document, writer), "pdf" => render_pdf(document, writer),
@ -229,14 +158,14 @@ fn render_format<W: Write + 'static>(opt: &RenderOptions, document: Document, wr
} }
} }
fn render_html<W: Write + 'static>(document: Document, writer: W) { fn render_html(document: Document, writer: BufWriter<File>) {
let mut writer = HTMLWriter::new(Box::new(writer), document.config.lock().style.theme.clone()); let mut writer = HTMLWriter::new(Box::new(writer));
document.to_html(&mut writer).unwrap(); document.to_html(&mut writer).unwrap();
writer.flush().unwrap(); writer.flush().unwrap();
} }
#[cfg(feature = "pdf")] #[cfg(feature = "pdf")]
fn render_pdf<W: Write + 'static>(document: Document, mut writer: W) { fn render_pdf(document: Document, mut writer: BufWriter<File>) {
use snekdown::format::chromium_pdf::render_to_pdf; use snekdown::format::chromium_pdf::render_to_pdf;
let result = render_to_pdf(document).expect("Failed to render pdf!"); let result = render_to_pdf(document).expect("Failed to render pdf!");

@ -1,13 +1,7 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use super::ParseResult; use super::ParseResult;
use crate::elements::tokens::*; use crate::elements::tokens::*;
use crate::elements::{ use crate::elements::{
Block, CodeBlock, Import, List, ListItem, MathBlock, Metadata, Paragraph, Quote, Section, Table, Block, CodeBlock, Import, List, ListItem, MathBlock, Paragraph, Quote, Section, Table,
}; };
use crate::parser::inline::ParseInline; use crate::parser::inline::ParseInline;
use crate::parser::line::ParseLine; use crate::parser::line::ParseLine;
@ -32,7 +26,7 @@ impl ParseBlock for Parser {
fn parse_block(&mut self) -> ParseResult<Block> { fn parse_block(&mut self) -> ParseResult<Block> {
if let Some(section) = self.section_return { if let Some(section) = self.section_return {
if section <= self.section_nesting && (self.section_nesting > 0) { if section <= self.section_nesting && (self.section_nesting > 0) {
return Err(self.ctm.assert_error(None).into()); return Err(self.ctm.assert_error(None));
} else { } else {
self.section_return = None; self.section_return = None;
} }
@ -41,7 +35,7 @@ impl ParseBlock for Parser {
log::trace!("Block::Section"); log::trace!("Block::Section");
Block::Section(section) Block::Section(section)
} else if let Some(_) = self.section_return { } else if let Some(_) = self.section_return {
return Err(self.ctm.err().into()); return Err(self.ctm.err());
} else if let Ok(list) = self.parse_list() { } else if let Ok(list) = self.parse_list() {
log::trace!("Block::List"); log::trace!("Block::List");
Block::List(list) Block::List(list)
@ -66,7 +60,7 @@ impl ParseBlock for Parser {
Block::Null Block::Null
} }
} else if let Some(_) = self.section_return { } else if let Some(_) = self.section_return {
return Err(self.ctm.err().into()); return Err(self.ctm.err());
} else if let Ok(pholder) = self.parse_placeholder() { } else if let Ok(pholder) = self.parse_placeholder() {
log::trace!("Block::Placeholder"); log::trace!("Block::Placeholder");
Block::Placeholder(pholder) Block::Placeholder(pholder)
@ -74,7 +68,7 @@ impl ParseBlock for Parser {
log::trace!("Block::Paragraph"); log::trace!("Block::Paragraph");
Block::Paragraph(paragraph) Block::Paragraph(paragraph)
} else { } else {
return Err(self.ctm.err().into()); return Err(self.ctm.err());
}; };
Ok(token) Ok(token)
@ -84,7 +78,6 @@ impl ParseBlock for Parser {
fn parse_section(&mut self) -> ParseResult<Section> { fn parse_section(&mut self) -> ParseResult<Section> {
let start_index = self.ctm.get_index(); let start_index = self.ctm.get_index();
self.ctm.seek_whitespace(); self.ctm.seek_whitespace();
if self.ctm.check_char(&HASH) { if self.ctm.check_char(&HASH) {
let mut size = 1; let mut size = 1;
while let Some(_) = self.ctm.next_char() { while let Some(_) = self.ctm.next_char() {
@ -101,15 +94,13 @@ impl ParseBlock for Parser {
if size <= self.section_nesting { if size <= self.section_nesting {
self.section_return = Some(size); self.section_return = Some(size);
} }
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
let mut header = self.parse_header()?; let mut header = self.parse_header()?;
header.size = size; header.size = size;
self.section_nesting = size; self.section_nesting = size;
self.sections.push(size); self.sections.push(size);
self.section_anchors.push(header.anchor.clone());
let mut section = Section::new(header); let mut section = Section::new(header);
section.metadata = metadata; section.metadata = metadata;
self.ctm.seek_whitespace(); self.ctm.seek_whitespace();
@ -119,7 +110,6 @@ impl ParseBlock for Parser {
} }
self.sections.pop(); self.sections.pop();
self.section_anchors.pop();
if let Some(sec) = self.sections.last() { if let Some(sec) = self.sections.last() {
self.section_nesting = *sec self.section_nesting = *sec
} else { } else {
@ -127,7 +117,7 @@ impl ParseBlock for Parser {
} }
Ok(section) Ok(section)
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
@ -141,9 +131,8 @@ impl ParseBlock for Parser {
let language = self.ctm.get_string_until_any(&[LB], &[])?; let language = self.ctm.get_string_until_any(&[LB], &[])?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
let text = self.ctm.get_string_until_sequence(&[&SQ_CODE_BLOCK], &[])?; let text = self.ctm.get_string_until_sequence(&[&SQ_CODE_BLOCK], &[])?;
for _ in 0..2 { for _ in 0..2 {
self.ctm.try_seek(); self.ctm.seek_one()?;
} }
Ok(CodeBlock { Ok(CodeBlock {
@ -160,7 +149,7 @@ impl ParseBlock for Parser {
self.ctm.seek_one()?; self.ctm.seek_one()?;
let text = self.ctm.get_string_until_sequence(&[SQ_MATH], &[])?; let text = self.ctm.get_string_until_sequence(&[SQ_MATH], &[])?;
for _ in 0..1 { for _ in 0..1 {
self.ctm.try_seek(); self.ctm.seek_one()?;
} }
Ok(MathBlock { Ok(MathBlock {
expression: asciimath_rs::parse(text), expression: asciimath_rs::parse(text),
@ -171,7 +160,6 @@ impl ParseBlock for Parser {
fn parse_quote(&mut self) -> ParseResult<Quote> { fn parse_quote(&mut self) -> ParseResult<Quote> {
let start_index = self.ctm.get_index(); let start_index = self.ctm.get_index();
self.ctm.seek_whitespace(); self.ctm.seek_whitespace();
let metadata = if let Ok(meta) = self.parse_inline_metadata() { let metadata = if let Ok(meta) = self.parse_inline_metadata() {
Some(meta) Some(meta)
} else { } else {
@ -179,7 +167,7 @@ impl ParseBlock for Parser {
}; };
if self.ctm.check_char(&META_CLOSE) { if self.ctm.check_char(&META_CLOSE) {
if self.ctm.next_char() == None { if self.ctm.next_char() == None {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
let mut quote = Quote::new(metadata); let mut quote = Quote::new(metadata);
@ -197,10 +185,8 @@ impl ParseBlock for Parser {
break; break;
} }
} }
quote.strip_linebreak();
if quote.text.len() == 0 { if quote.text.len() == 0 {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
Ok(quote) Ok(quote)
@ -208,12 +194,11 @@ impl ParseBlock for Parser {
/// Parses a paragraph /// Parses a paragraph
fn parse_paragraph(&mut self) -> ParseResult<Paragraph> { fn parse_paragraph(&mut self) -> ParseResult<Paragraph> {
self.ctm.seek_whitespace();
let mut paragraph = Paragraph::new(); let mut paragraph = Paragraph::new();
while let Ok(token) = self.parse_line() {
while let Ok(element) = self.parse_line() { paragraph.add_element(token);
paragraph.add_element(element);
let start_index = self.ctm.get_index(); let start_index = self.ctm.get_index();
if self.ctm.check_any_sequence(&BLOCK_SPECIAL_CHARS) if self.ctm.check_any_sequence(&BLOCK_SPECIAL_CHARS)
|| self.ctm.check_any(&self.block_break_at) || self.ctm.check_any(&self.block_break_at)
{ {
@ -228,7 +213,7 @@ impl ParseBlock for Parser {
if paragraph.elements.len() > 0 { if paragraph.elements.len() > 0 {
Ok(paragraph) Ok(paragraph)
} else { } else {
Err(self.ctm.err().into()) Err(self.ctm.err())
} }
} }
@ -242,7 +227,6 @@ impl ParseBlock for Parser {
let ordered = self.ctm.get_current().is_numeric(); let ordered = self.ctm.get_current().is_numeric();
list.ordered = ordered; list.ordered = ordered;
let mut list_hierarchy: Vec<ListItem> = Vec::new(); let mut list_hierarchy: Vec<ListItem> = Vec::new();
while let Ok(mut item) = self.parse_list_item() { while let Ok(mut item) = self.parse_list_item() {
while let Some(parent_item) = list_hierarchy.pop() { while let Some(parent_item) = list_hierarchy.pop() {
if parent_item.level < item.level { if parent_item.level < item.level {
@ -289,7 +273,7 @@ impl ParseBlock for Parser {
if list.items.len() > 0 { if list.items.len() > 0 {
Ok(list) Ok(list)
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
@ -301,7 +285,6 @@ impl ParseBlock for Parser {
} }
let seek_index = self.ctm.get_index(); let seek_index = self.ctm.get_index();
let mut table = Table::new(header); let mut table = Table::new(header);
while let Ok(_) = self.ctm.seek_one() { while let Ok(_) = self.ctm.seek_one() {
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
if !self.ctm.check_any(&[MINUS, PIPE]) || self.ctm.check_char(&LB) { if !self.ctm.check_any(&[MINUS, PIPE]) || self.ctm.check_char(&LB) {
@ -336,7 +319,7 @@ impl ParseBlock for Parser {
path.push(character); path.push(character);
} }
if self.ctm.check_char(&LB) || path.is_empty() { if self.ctm.check_char(&LB) || path.is_empty() {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
if self.ctm.check_char(&IMPORT_CLOSE) { if self.ctm.check_char(&IMPORT_CLOSE) {
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -345,20 +328,22 @@ impl ParseBlock for Parser {
if self.section_nesting > 0 { if self.section_nesting > 0 {
self.section_return = Some(0); self.section_return = Some(0);
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
let metadata = self let metadata = self
.parse_inline_metadata() .parse_inline_metadata()
.ok() .ok()
.map(|m| m.get_string_map()) .map(|m| m.into())
.unwrap_or(HashMap::new()); .unwrap_or(HashMap::new());
self.ctm.seek_whitespace();
match self.import(path.clone(), &metadata) { match self.import(path.clone(), &metadata) {
ImportType::Document(Ok(anchor)) => Ok(Some(Import { path, anchor })), ImportType::Document(Ok(anchor)) => Ok(Some(Import { path, anchor })),
ImportType::Stylesheet(_) => Ok(None), ImportType::Stylesheet(_) => Ok(None),
ImportType::Bibliography(_) => Ok(None), ImportType::Bibliography(_) => Ok(None),
ImportType::Manifest(_) => Ok(None), ImportType::Manifest(_) => Ok(None),
_ => Err(self.ctm.err().into()), _ => Err(self.ctm.err()),
} }
} }
} }

@ -1,24 +1,16 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use super::{ParseError, ParseResult}; use super::{ParseError, ParseResult};
use crate::elements::tokens::*; use crate::elements::tokens::*;
use crate::elements::BibReference; use crate::elements::BibReference;
use crate::elements::*; use crate::elements::*;
use crate::parser::block::ParseBlock; use crate::parser::block::ParseBlock;
use crate::references::configuration::keys::{BIB_REF_DISPLAY, SMART_ARROWS};
use crate::references::glossary::GlossaryDisplay; use crate::references::glossary::GlossaryDisplay;
use crate::references::glossary::GlossaryReference; use crate::references::glossary::GlossaryReference;
use crate::references::templates::{GetTemplateVariables, Template, TemplateVariable}; use crate::references::templates::{GetTemplateVariables, Template, TemplateVariable};
use crate::utils::parsing::remove_single_backlslash;
use crate::Parser; use crate::Parser;
use bibliographix::references::bib_reference::BibRef; use bibliographix::references::bib_reference::BibRef;
use parking_lot::Mutex;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::sync::{Arc, Mutex, RwLock};
use std::sync::{Arc, RwLock};
pub(crate) trait ParseInline { pub(crate) trait ParseInline {
fn parse_surrounded(&mut self, surrounding: &char) -> ParseResult<Vec<Inline>>; fn parse_surrounded(&mut self, surrounding: &char) -> ParseResult<Vec<Inline>>;
@ -45,7 +37,6 @@ pub(crate) trait ParseInline {
fn parse_template(&mut self) -> ParseResult<Template>; fn parse_template(&mut self) -> ParseResult<Template>;
fn parse_character_code(&mut self) -> ParseResult<CharacterCode>; fn parse_character_code(&mut self) -> ParseResult<CharacterCode>;
fn parse_arrow(&mut self) -> ParseResult<Arrow>; fn parse_arrow(&mut self) -> ParseResult<Arrow>;
fn parse_anchor(&mut self) -> ParseResult<Anchor>;
} }
impl ParseInline for Parser { impl ParseInline for Parser {
@ -55,12 +46,11 @@ impl ParseInline for Parser {
self.ctm.assert_char(surrounding, Some(start_index))?; self.ctm.assert_char(surrounding, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
let mut inline = vec![self.parse_inline()?]; let mut inline = vec![self.parse_inline()?];
while !self.ctm.check_char(surrounding) { while !self.ctm.check_char(surrounding) {
if let Ok(result) = self.parse_inline() { if let Ok(result) = self.parse_inline() {
inline.push(result) inline.push(result)
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
if !self.ctm.check_eof() { if !self.ctm.check_eof() {
@ -79,10 +69,10 @@ impl ParseInline for Parser {
} }
} }
if self.ctm.check_char(&PIPE) || self.ctm.check_char(&LB) { if self.ctm.check_char(&PIPE) || self.ctm.check_char(&LB) {
Err(self.ctm.err().into()) Err(self.ctm.err())
} else if self.ctm.check_eof() { } else if self.ctm.check_eof() {
log::trace!("EOF"); log::trace!("EOF");
Err(self.ctm.err().into()) Err(self.ctm.err())
} else if let Ok(image) = self.parse_image() { } else if let Ok(image) = self.parse_image() {
log::trace!("Inline::Image {:?}", image); log::trace!("Inline::Image {:?}", image);
Ok(Inline::Image(image)) Ok(Inline::Image(image))
@ -108,7 +98,7 @@ impl ParseInline for Parser {
log::trace!("Inline::Striked"); log::trace!("Inline::Striked");
Ok(Inline::Striked(striked)) Ok(Inline::Striked(striked))
} else if let Ok(gloss) = self.parse_glossary_reference() { } else if let Ok(gloss) = self.parse_glossary_reference() {
log::trace!("Inline::GlossaryReference {}", gloss.lock().short); log::trace!("Inline::GlossaryReference {}", gloss.lock().unwrap().short);
Ok(Inline::GlossaryReference(gloss)) Ok(Inline::GlossaryReference(gloss))
} else if let Ok(superscript) = self.parse_superscript() { } else if let Ok(superscript) = self.parse_superscript() {
log::trace!("Inline::Superscript"); log::trace!("Inline::Superscript");
@ -134,9 +124,6 @@ impl ParseInline for Parser {
} else if let Ok(arrow) = self.parse_arrow() { } else if let Ok(arrow) = self.parse_arrow() {
log::trace!("Inline::Arrow {:?}", arrow); log::trace!("Inline::Arrow {:?}", arrow);
Ok(Inline::Arrow(arrow)) Ok(Inline::Arrow(arrow))
} else if let Ok(anchor) = self.parse_anchor() {
log::trace!("Inline::Anchor {:?}", anchor);
Ok(Inline::Anchor(anchor))
} else { } else {
let plain = self.parse_plain()?; let plain = self.parse_plain()?;
log::trace!("Inline::Plain {}", plain.value); log::trace!("Inline::Plain {}", plain.value);
@ -151,27 +138,25 @@ impl ParseInline for Parser {
self.ctm.seek_one()?; self.ctm.seek_one()?;
if let Ok(url) = self.parse_url(true) { if let Ok(url) = self.parse_url(true) {
let metadata = self.parse_inline_metadata().ok(); let metadata = if let Ok(meta) = self.parse_inline_metadata() {
Some(meta)
} else {
None
};
let path = url.url.clone(); let path = url.url.clone();
let pending_image = self
.options
.document
.images
.lock()
.add_image(PathBuf::from(path));
if let Some(meta) = &metadata {
pending_image.lock().assign_from_meta(meta)
}
Ok(Image { Ok(Image {
url, url,
metadata, metadata,
image_data: pending_image, download: self
.options
.document
.downloads
.lock()
.unwrap()
.add_download(path),
}) })
} else { } else {
Err(self.ctm.rewind_with_error(start_index).into()) Err(self.ctm.rewind_with_error(start_index))
} }
} }
@ -196,7 +181,7 @@ impl ParseInline for Parser {
self.inline_break_at.pop(); self.inline_break_at.pop();
self.ctm.seek_one()?; self.ctm.seek_one()?;
} else if !short_syntax { } else if !short_syntax {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
self.ctm.assert_char(&URL_OPEN, Some(start_index))?; self.ctm.assert_char(&URL_OPEN, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -229,7 +214,7 @@ impl ParseInline for Parser {
} else if self.ctm.check_char(&SPACE) { } else if self.ctm.check_char(&SPACE) {
false false
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
}; };
self.ctm.seek_one()?; self.ctm.seek_one()?;
self.ctm.assert_char(&CHECK_CLOSE, Some(start_index))?; self.ctm.assert_char(&CHECK_CLOSE, Some(start_index))?;
@ -244,12 +229,11 @@ impl ParseInline for Parser {
self.ctm.assert_sequence(&BOLD, Some(start_index))?; self.ctm.assert_sequence(&BOLD, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
let mut inline = vec![self.parse_inline()?]; let mut inline = vec![self.parse_inline()?];
while !self.ctm.check_sequence(&BOLD) { while !self.ctm.check_sequence(&BOLD) {
if let Ok(result) = self.parse_inline() { if let Ok(result) = self.parse_inline() {
inline.push(result); inline.push(result);
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -273,12 +257,12 @@ impl ParseInline for Parser {
if let Ok(result) = self.parse_inline() { if let Ok(result) = self.parse_inline() {
inline.push(result); inline.push(result);
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
self.ctm.rewind(self.ctm.get_index() - STRIKED.len()); self.ctm.rewind(self.ctm.get_index() - STRIKED.len());
if self.ctm.check_any(WHITESPACE) { if self.ctm.check_any(WHITESPACE) {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
for _ in 0..(STRIKED.len() + 1) { for _ in 0..(STRIKED.len() + 1) {
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -306,10 +290,9 @@ impl ParseInline for Parser {
let start_index = self.ctm.get_index(); let start_index = self.ctm.get_index();
self.ctm.assert_char(&BACKTICK, Some(start_index))?; self.ctm.assert_char(&BACKTICK, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
let mut content = let content = self
self.ctm .ctm
.get_string_until_any_or_rewind(&[BACKTICK, LB], &[], start_index)?; .get_string_until_any_or_rewind(&[BACKTICK, LB], &[], start_index)?;
content = remove_single_backlslash(content);
self.ctm.assert_char(&BACKTICK, Some(start_index))?; self.ctm.assert_char(&BACKTICK, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -343,7 +326,7 @@ impl ParseInline for Parser {
name, name,
}) })
} else { } else {
Err(self.ctm.rewind_with_error(start_index).into()) Err(self.ctm.rewind_with_error(start_index))
} }
} }
@ -360,7 +343,7 @@ impl ParseInline for Parser {
)?; )?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
if color.is_empty() { if color.is_empty() {
return Err(self.ctm.err().into()); return Err(self.ctm.err());
} }
Ok(Colored { Ok(Colored {
value: Box::new(self.parse_inline()?), value: Box::new(self.parse_inline()?),
@ -380,15 +363,7 @@ impl ParseInline for Parser {
let bib_ref = BibRef::new(key.clone()); let bib_ref = BibRef::new(key.clone());
let ref_entry = Arc::new(RwLock::new(BibReference::new( let ref_entry = Arc::new(RwLock::new(BibReference::new(
key, key,
Some( self.options.document.config.get_ref_entry(BIB_REF_DISPLAY),
self.options
.document
.config
.lock()
.style
.bib_ref_display
.clone(),
),
bib_ref.anchor(), bib_ref.anchor(),
))); )));
self.options self.options
@ -396,6 +371,7 @@ impl ParseInline for Parser {
.bibliography .bibliography
.root_ref_anchor() .root_ref_anchor()
.lock() .lock()
.unwrap()
.insert(bib_ref); .insert(bib_ref);
Ok(ref_entry) Ok(ref_entry)
@ -443,7 +419,7 @@ impl ParseInline for Parser {
self.ctm self.ctm
.get_string_until_any_or_rewind(&WHITESPACE, &[TILDE], start_index)?; .get_string_until_any_or_rewind(&WHITESPACE, &[TILDE], start_index)?;
if key.is_empty() { if key.is_empty() {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
while !key.is_empty() && !key.chars().last().unwrap().is_alphabetic() { while !key.is_empty() && !key.chars().last().unwrap().is_alphabetic() {
self.ctm.rewind(self.ctm.get_index() - 1); self.ctm.rewind(self.ctm.get_index() - 1);
@ -456,13 +432,14 @@ impl ParseInline for Parser {
.document .document
.glossary .glossary
.lock() .lock()
.unwrap()
.add_reference(reference)) .add_reference(reference))
} }
/// parses plain text as a string until it encounters an unescaped special inline char /// parses plain text as a string until it encounters an unescaped special inline char
fn parse_plain(&mut self) -> ParseResult<PlainText> { fn parse_plain(&mut self) -> ParseResult<PlainText> {
if self.ctm.check_char(&LB) { if self.ctm.check_char(&LB) {
return Err(self.ctm.err().into()); return Err(self.ctm.err());
} }
let mut characters = String::new(); let mut characters = String::new();
if !self.ctm.check_char(&SPECIAL_ESCAPE) { if !self.ctm.check_char(&SPECIAL_ESCAPE) {
@ -487,7 +464,7 @@ impl ParseInline for Parser {
if characters.len() > 0 { if characters.len() > 0 {
Ok(PlainText { value: characters }) Ok(PlainText { value: characters })
} else { } else {
Err(self.ctm.err().into()) Err(self.ctm.err())
} }
} }
@ -511,7 +488,7 @@ impl ParseInline for Parser {
if values.len() == 0 { if values.len() == 0 {
// if there was a linebreak (the metadata wasn't closed) or there is no inner data // if there was a linebreak (the metadata wasn't closed) or there is no inner data
// return an error // return an error
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
Ok(InlineMetadata { data: values }) Ok(InlineMetadata { data: values })
@ -526,7 +503,6 @@ impl ParseInline for Parser {
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
let mut value = MetadataValue::Bool(true); let mut value = MetadataValue::Bool(true);
if self.ctm.check_char(&EQ) { if self.ctm.check_char(&EQ) {
self.ctm.seek_one()?; self.ctm.seek_one()?;
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
@ -587,7 +563,7 @@ impl ParseInline for Parser {
{ {
name_str name_str
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
}; };
if !self.ctm.check_eof() { if !self.ctm.check_eof() {
self.ctm.seek_one()?; self.ctm.seek_one()?;
@ -611,7 +587,7 @@ impl ParseInline for Parser {
self.ctm.seek_one()?; self.ctm.seek_one()?;
if self.ctm.check_char(&TEMPLATE) { if self.ctm.check_char(&TEMPLATE) {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
let mut elements = Vec::new(); let mut elements = Vec::new();
@ -666,8 +642,15 @@ impl ParseInline for Parser {
/// Parses an arrow /// Parses an arrow
fn parse_arrow(&mut self) -> ParseResult<Arrow> { fn parse_arrow(&mut self) -> ParseResult<Arrow> {
if !self.options.document.config.lock().features.smart_arrows { if !self
Err(self.ctm.err().into()) .options
.document
.config
.get_entry(SMART_ARROWS)
.and_then(|e| e.get().as_bool())
.unwrap_or(true)
{
Err(self.ctm.err())
} else if self.ctm.check_sequence(A_LEFT_RIGHT_ARROW) { } else if self.ctm.check_sequence(A_LEFT_RIGHT_ARROW) {
self.ctm.seek_one()?; self.ctm.seek_one()?;
Ok(Arrow::LeftRightArrow) Ok(Arrow::LeftRightArrow)
@ -687,25 +670,7 @@ impl ParseInline for Parser {
self.ctm.seek_one()?; self.ctm.seek_one()?;
Ok(Arrow::BigLeftArrow) Ok(Arrow::BigLeftArrow)
} else { } else {
Err(self.ctm.err().into()) Err(self.ctm.err())
} }
} }
/// Parses an anchor elements
fn parse_anchor(&mut self) -> ParseResult<Anchor> {
let start_index = self.ctm.get_index();
self.ctm.assert_sequence(&ANCHOR_START, Some(start_index))?;
self.ctm.seek_one()?;
let key = self.ctm.get_string_until_any_or_rewind(
&[ANCHOR_STOP],
&INLINE_WHITESPACE,
start_index,
)?;
self.ctm.try_seek();
Ok(Anchor {
inner: Box::new(Line::Text(TextLine::new())),
key,
})
}
} }

@ -1,14 +1,7 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use super::ParseResult; use super::ParseResult;
use crate::elements::tokens::*; use crate::elements::tokens::*;
use crate::elements::Inline::LineBreak;
use crate::elements::{BibEntry, Metadata}; use crate::elements::{BibEntry, Metadata};
use crate::elements::{Cell, Centered, Header, Line, ListItem, Row, Ruler, TextLine}; use crate::elements::{Cell, Centered, Header, Inline, Line, ListItem, Row, Ruler, TextLine};
use crate::parser::inline::ParseInline; use crate::parser::inline::ParseInline;
use crate::Parser; use crate::Parser;
use bibliographix::bibliography::bibliography_entry::BibliographyEntry; use bibliographix::bibliography::bibliography_entry::BibliographyEntry;
@ -23,7 +16,6 @@ pub(crate) trait ParseLine {
fn parse_row(&mut self) -> ParseResult<Row>; fn parse_row(&mut self) -> ParseResult<Row>;
fn parse_centered(&mut self) -> ParseResult<Centered>; fn parse_centered(&mut self) -> ParseResult<Centered>;
fn parse_ruler(&mut self) -> ParseResult<Ruler>; fn parse_ruler(&mut self) -> ParseResult<Ruler>;
fn parse_paragraph_break(&mut self) -> ParseResult<TextLine>;
fn parse_text_line(&mut self) -> ParseResult<TextLine>; fn parse_text_line(&mut self) -> ParseResult<TextLine>;
fn parse_bib_entry(&mut self) -> ParseResult<BibEntry>; fn parse_bib_entry(&mut self) -> ParseResult<BibEntry>;
} }
@ -33,7 +25,7 @@ impl ParseLine for Parser {
fn parse_line(&mut self) -> ParseResult<Line> { fn parse_line(&mut self) -> ParseResult<Line> {
if self.ctm.check_eof() { if self.ctm.check_eof() {
log::trace!("EOF"); log::trace!("EOF");
Err(self.ctm.err().into()) Err(self.ctm.err())
} else { } else {
if let Ok(ruler) = self.parse_ruler() { if let Ok(ruler) = self.parse_ruler() {
log::trace!("Line::Ruler"); log::trace!("Line::Ruler");
@ -44,14 +36,11 @@ impl ParseLine for Parser {
} else if let Ok(bib) = self.parse_bib_entry() { } else if let Ok(bib) = self.parse_bib_entry() {
log::trace!("Line::BibEntry"); log::trace!("Line::BibEntry");
Ok(Line::BibEntry(bib)) Ok(Line::BibEntry(bib))
} else if let Ok(text) = self.parse_paragraph_break() {
log::trace!("Line::LineBreak");
Ok(Line::Text(text))
} else if let Ok(text) = self.parse_text_line() { } else if let Ok(text) = self.parse_text_line() {
log::trace!("Line::Text"); log::trace!("Line::Text");
Ok(Line::Text(text)) Ok(Line::Text(text))
} else { } else {
Err(self.ctm.err().into()) Err(self.ctm.err())
} }
} }
} }
@ -64,9 +53,6 @@ impl ParseLine for Parser {
self.ctm.get_text()[start_index..self.ctm.get_index()] self.ctm.get_text()[start_index..self.ctm.get_index()]
.iter() .iter()
.for_each(|e| anchor.push(*e)); .for_each(|e| anchor.push(*e));
if let Some(last) = self.section_anchors.last() {
anchor = format!("{}-{}", last, anchor);
}
anchor.retain(|c| !c.is_whitespace()); anchor.retain(|c| !c.is_whitespace());
log::trace!("Line::Header"); log::trace!("Line::Header");
Ok(Header::new(line, anchor)) Ok(Header::new(line, anchor))
@ -90,11 +76,11 @@ impl ParseLine for Parser {
} }
if !self.ctm.check_any(&INLINE_WHITESPACE) { if !self.ctm.check_any(&INLINE_WHITESPACE) {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
if self.ctm.check_char(&MINUS) { if self.ctm.check_char(&MINUS) {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
let item = ListItem::new(self.parse_line()?, level as u16, ordered); let item = ListItem::new(self.parse_line()?, level as u16, ordered);
@ -110,7 +96,7 @@ impl ParseLine for Parser {
self.ctm.assert_char(&PIPE, Some(start_index))?; self.ctm.assert_char(&PIPE, Some(start_index))?;
self.ctm.seek_one()?; self.ctm.seek_one()?;
if self.ctm.check_char(&PIPE) { if self.ctm.check_char(&PIPE) {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
self.inline_break_at.push(PIPE); self.inline_break_at.push(PIPE);
@ -148,7 +134,7 @@ impl ParseLine for Parser {
log::trace!("Line::TableRow"); log::trace!("Line::TableRow");
Ok(row) Ok(row)
} else { } else {
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
@ -177,8 +163,6 @@ impl ParseLine for Parser {
/// Parses a line of text /// Parses a line of text
fn parse_text_line(&mut self) -> ParseResult<TextLine> { fn parse_text_line(&mut self) -> ParseResult<TextLine> {
let mut text = TextLine::new(); let mut text = TextLine::new();
let start_index = self.ctm.get_index();
while let Ok(subtext) = self.parse_inline() { while let Ok(subtext) = self.parse_inline() {
text.add_subtext(subtext); text.add_subtext(subtext);
if self.ctm.check_eof() || self.ctm.check_any(&self.inline_break_at) { if self.ctm.check_eof() || self.ctm.check_any(&self.inline_break_at) {
@ -186,36 +170,21 @@ impl ParseLine for Parser {
} }
} }
// add a linebreak when encountering \n\n
if self.ctm.check_char(&LB) { if self.ctm.check_char(&LB) {
self.ctm.try_seek(); if let Ok(_) = self.ctm.seek_one() {
if self.ctm.check_char(&LB) {
if self.ctm.check_char(&LB) { text.add_subtext(Inline::LineBreak)
text.add_subtext(LineBreak); }
self.ctm.try_seek();
} }
} }
if text.subtext.len() > 0 { if text.subtext.len() > 0 {
Ok(text) Ok(text)
} else { } else {
Err(self.ctm.rewind_with_error(start_index).into()) Err(self.ctm.err())
} }
} }
/// Parses a paragraph break
fn parse_paragraph_break(&mut self) -> ParseResult<TextLine> {
let start_index = self.ctm.get_index();
self.ctm.assert_char(&LB, Some(start_index))?;
self.ctm.seek_one()?;
let mut line = TextLine::new();
line.subtext.push(LineBreak);
Ok(line)
}
fn parse_bib_entry(&mut self) -> ParseResult<BibEntry> { fn parse_bib_entry(&mut self) -> ParseResult<BibEntry> {
let start_index = self.ctm.get_index(); let start_index = self.ctm.get_index();
self.ctm.seek_any(&INLINE_WHITESPACE)?; self.ctm.seek_any(&INLINE_WHITESPACE)?;
@ -242,7 +211,7 @@ impl ParseLine for Parser {
msg, msg,
self.get_position_string() self.get_position_string()
); );
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
} else { } else {
@ -263,17 +232,17 @@ impl ParseLine for Parser {
msg, msg,
self.get_position_string() self.get_position_string()
); );
return Err(self.ctm.rewind_with_error(start_index).into()); return Err(self.ctm.rewind_with_error(start_index));
} }
} }
}; };
self.ctm.seek_whitespace();
self.options self.options
.document .document
.bibliography .bibliography
.entry_dictionary() .entry_dictionary()
.lock() .lock()
.unwrap()
.insert(entry); .insert(entry);
Ok(BibEntry { Ok(BibEntry {
@ -283,6 +252,7 @@ impl ParseLine for Parser {
.bibliography .bibliography
.entry_dictionary() .entry_dictionary()
.lock() .lock()
.unwrap()
.get(&key) .get(&key)
.unwrap(), .unwrap(),
key, key,

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub(crate) mod block; pub(crate) mod block;
pub(crate) mod inline; pub(crate) mod inline;
pub(crate) mod line; pub(crate) mod line;
@ -11,54 +5,31 @@ pub(crate) mod line;
use self::block::ParseBlock; use self::block::ParseBlock;
use crate::elements::tokens::LB; use crate::elements::tokens::LB;
use crate::elements::{Document, ImportAnchor}; use crate::elements::{Document, ImportAnchor};
use crate::settings::SettingsError; use crate::references::configuration::keys::{
use charred::tapemachine::{CharTapeMachine, TapeError}; IMP_BIBLIOGRAPHY, IMP_CONFIGS, IMP_GLOSSARY, IMP_IGNORE, IMP_STYLESHEETS,
};
use crate::references::configuration::Value;
use charred::tapemachine::{CharTapeMachine, TapeError, TapeResult};
use crossbeam_utils::sync::WaitGroup; use crossbeam_utils::sync::WaitGroup;
use regex::Regex; use regex::Regex;
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt;
use std::fs::{read_to_string, File}; use std::fs::{read_to_string, File};
use std::io::{self, BufReader}; use std::io::BufReader;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{Arc, Mutex, RwLock}; use std::sync::{Arc, Mutex, RwLock};
use std::thread; use std::thread;
pub type ParseResult<T> = Result<T, ParseError>; pub type ParseResult<T> = TapeResult<T>;
pub type ParseError = TapeError;
#[derive(Debug)]
pub enum ParseError {
TapeError(TapeError),
SettingsError(SettingsError),
IoError(io::Error),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseError::TapeError(e) => write!(f, "{}", e),
ParseError::SettingsError(e) => write!(f, "{}", e),
ParseError::IoError(e) => write!(f, "IO Error: {}", e),
}
}
}
impl From<TapeError> for ParseError {
fn from(e: TapeError) -> Self {
Self::TapeError(e)
}
}
impl From<SettingsError> for ParseError {
fn from(e: SettingsError) -> Self {
Self::SettingsError(e)
}
}
impl From<io::Error> for ParseError { const DEFAULT_IMPORTS: &'static [(&str, &str)] = &[
fn from(e: io::Error) -> Self { ("snekdown.toml", "manifest"),
Self::IoError(e) ("manifest.toml", "manifest"),
} ("bibliography.toml", "bibliography"),
} ("bibliography2.bib.toml", "bibliography"),
("glossary.toml", "glossary"),
("style.css", "stylesheet"),
];
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ParserOptions { pub struct ParserOptions {
@ -87,6 +58,13 @@ impl ParserOptions {
self self
} }
/// If external sources should be cached when after downloaded
pub fn use_cache(self, value: bool) -> Self {
self.document.downloads.lock().unwrap().use_cache = value;
self
}
} }
pub struct Parser { pub struct Parser {
@ -94,7 +72,6 @@ pub struct Parser {
pub(crate) ctm: CharTapeMachine, pub(crate) ctm: CharTapeMachine,
section_nesting: u8, section_nesting: u8,
sections: Vec<u8>, sections: Vec<u8>,
section_anchors: Vec<String>,
section_return: Option<u8>, section_return: Option<u8>,
wg: WaitGroup, wg: WaitGroup,
pub(crate) block_break_at: Vec<char>, pub(crate) block_break_at: Vec<char>,
@ -107,7 +84,6 @@ impl Parser {
pub fn with_defaults(options: ParserOptions) -> Self { pub fn with_defaults(options: ParserOptions) -> Self {
let text = if let Some(path) = &options.path { let text = if let Some(path) = &options.path {
let mut text = read_to_string(&path).unwrap(); let mut text = read_to_string(&path).unwrap();
text = text.replace("\r\n", "\n");
if text.chars().last() != Some('\n') { if text.chars().last() != Some('\n') {
text.push('\n'); text.push('\n');
} }
@ -119,7 +95,6 @@ impl Parser {
Self { Self {
options, options,
sections: Vec::new(), sections: Vec::new(),
section_anchors: Vec::new(),
section_nesting: 0, section_nesting: 0,
section_return: None, section_return: None,
wg: WaitGroup::new(), wg: WaitGroup::new(),
@ -143,18 +118,13 @@ impl Parser {
/// Returns a string of the current position in the file /// Returns a string of the current position in the file
pub(crate) fn get_position_string(&self) -> String { pub(crate) fn get_position_string(&self) -> String {
let char_index = self.ctm.get_index(); let char_index = self.ctm.get_index();
self.get_position_string_for_index(char_index)
}
/// Returns a string of the given index position in the file
fn get_position_string_for_index(&self, char_index: usize) -> String {
let text = self.ctm.get_text(); let text = self.ctm.get_text();
let mut text_unil = text[..char_index].to_vec(); let mut text_unil = text[..char_index].to_vec();
let line_number = text_unil.iter().filter(|c| c == &&LB).count(); let line_number = text_unil.iter().filter(|c| c == &&LB).count();
text_unil.reverse(); text_unil.reverse();
let mut inline_pos = 0; let mut inline_pos = 0;
while inline_pos < text_unil.len() && text_unil[inline_pos] != LB { while text_unil[inline_pos] != LB {
inline_pos += 1; inline_pos += 1;
} }
if let Some(path) = &self.options.path { if let Some(path) = &self.options.path {
@ -187,15 +157,27 @@ impl Parser {
path.to_str().unwrap(), path.to_str().unwrap(),
self.get_position_string(), self.get_position_string(),
); );
return Err(self.ctm.assert_error(None).into()); return Err(self.ctm.assert_error(None));
}
{
let mut paths = self.options.paths.lock().unwrap();
if paths.iter().find(|item| **item == path) != None {
log::warn!(
"Import of \"{}\" failed: Already imported.\n\t--> {}\n",
path.to_str().unwrap(),
self.get_position_string(),
);
return Err(self.ctm.assert_error(None));
}
paths.push(path.clone());
} }
let anchor = Arc::new(RwLock::new(ImportAnchor::new())); let anchor = Arc::new(RwLock::new(ImportAnchor::new()));
let anchor_clone = Arc::clone(&anchor); let anchor_clone = Arc::clone(&anchor);
let wg = self.wg.clone(); let wg = self.wg.clone();
let mut child_parser = self.create_child(path.clone()); let mut chid_parser = self.create_child(path.clone());
let _ = thread::spawn(move || { let _ = thread::spawn(move || {
let document = child_parser.parse(); let document = chid_parser.parse();
anchor_clone.write().unwrap().set_document(document); anchor_clone.write().unwrap().set_document(document);
drop(wg); drop(wg);
@ -218,7 +200,7 @@ impl Parser {
/// Returns the text of an imported text file /// Returns the text of an imported text file
fn import_text_file(&self, path: PathBuf) -> ParseResult<String> { fn import_text_file(&self, path: PathBuf) -> ParseResult<String> {
read_to_string(path).map_err(ParseError::from) read_to_string(path).map_err(|_| self.ctm.err())
} }
fn import_stylesheet(&mut self, path: PathBuf) -> ParseResult<()> { fn import_stylesheet(&mut self, path: PathBuf) -> ParseResult<()> {
@ -227,6 +209,7 @@ impl Parser {
.document .document
.downloads .downloads
.lock() .lock()
.unwrap()
.add_download(path.to_str().unwrap().to_string()), .add_download(path.to_str().unwrap().to_string()),
); );
@ -234,12 +217,13 @@ impl Parser {
} }
fn import_manifest(&mut self, path: PathBuf) -> ParseResult<()> { fn import_manifest(&mut self, path: PathBuf) -> ParseResult<()> {
self.options let contents = self.import_text_file(path)?;
.document let value = contents
.config .parse::<toml::Value>()
.lock() .map_err(|_| self.ctm.err())?;
.merge(path) self.options.document.config.set_from_toml(&value);
.map_err(ParseError::from)
Ok(())
} }
/// Imports a glossary /// Imports a glossary
@ -252,6 +236,7 @@ impl Parser {
.document .document
.glossary .glossary
.lock() .lock()
.unwrap()
.assign_from_toml(value) .assign_from_toml(value)
.unwrap_or_else(|e| log::error!("{}", e)); .unwrap_or_else(|e| log::error!("{}", e));
@ -259,7 +244,7 @@ impl Parser {
} }
/// Imports a path /// Imports a path
fn import(&mut self, path: String, args: &HashMap<String, String>) -> ImportType { fn import(&mut self, path: String, args: &HashMap<String, Value>) -> ImportType {
log::debug!( log::debug!(
"Importing file {}\n\t--> {}\n", "Importing file {}\n\t--> {}\n",
path, path,
@ -278,24 +263,23 @@ impl Parser {
.file_name() .file_name()
.and_then(|f| Some(f.to_str().unwrap().to_string())) .and_then(|f| Some(f.to_str().unwrap().to_string()))
{ {
let ignore = &self.options.document.config.lock().imports.ignored_imports; if let Some(Value::Array(ignore)) = self
if ignore.contains(&fname) { .options
return ImportType::None; .document
} .config
} .get_entry(IMP_IGNORE)
{ .and_then(|e| Some(e.get().clone()))
let mut paths = self.options.paths.lock().unwrap(); {
if paths.iter().find(|item| **item == path).is_some() { let ignore = ignore
log::warn!( .iter()
"Import of \"{}\" failed: Already imported.\n\t--> {}\n", .map(|v| v.as_string())
path.to_str().unwrap(), .collect::<Vec<String>>();
self.get_position_string(), if ignore.contains(&fname) {
); return ImportType::None;
return ImportType::None; }
} }
paths.push(path.clone());
} }
match args.get("type").cloned() { match args.get("type").map(|e| e.as_string().to_lowercase()) {
Some(s) if s == "stylesheet".to_string() => { Some(s) if s == "stylesheet".to_string() => {
ImportType::Stylesheet(self.import_stylesheet(path)) ImportType::Stylesheet(self.import_stylesheet(path))
} }
@ -344,18 +328,7 @@ impl Parser {
if self.ctm.check_eof() { if self.ctm.check_eof() {
break; break;
} }
match err { eprintln!("{}", err);
ParseError::TapeError(t) => {
log::error!(
"Parse Error: {}\n\t--> {}\n",
t,
self.get_position_string_for_index(t.get_index())
)
}
_ => {
log::error!("{}", err)
}
}
break; break;
} }
} }
@ -364,10 +337,14 @@ impl Parser {
let wg = self.wg.clone(); let wg = self.wg.clone();
self.wg = WaitGroup::new(); self.wg = WaitGroup::new();
if !self.options.is_child { if !self.options.is_child {
self.import( for (path, file_type) in DEFAULT_IMPORTS {
"Manifest.toml".to_string(), if self.transform_path(path.to_string()).exists() {
&maplit::hashmap! {"type".to_string() => "manifest".to_string()}, self.import(
); path.to_string(),
&maplit::hashmap! {"type".to_string() => Value::String(file_type.to_string())},
);
}
}
} }
wg.wait(); wg.wait();
if !self.options.is_child { if !self.options.is_child {
@ -385,25 +362,57 @@ impl Parser {
/// Imports files from the configs import values /// Imports files from the configs import values
fn import_from_config(&mut self) { fn import_from_config(&mut self) {
let config = Arc::clone(&self.options.document.config); if let Some(Value::Array(mut imp)) = self
.options
let mut stylesheets = config.lock().imports.included_stylesheets.clone(); .document
let args = maplit::hashmap! {"type".to_string() => "stylesheet".to_string()}; .config
while let Some(s) = stylesheets.pop() { .get_entry(IMP_STYLESHEETS)
self.import(s, &args); .and_then(|e| Some(e.get().clone()))
{
let args =
maplit::hashmap! {"type".to_string() => Value::String("stylesheet".to_string())};
while let Some(Value::String(s)) = imp.pop() {
self.import(s, &args);
}
} }
if let Some(Value::Array(mut imp)) = self
let mut bibliography = config.lock().imports.included_bibliography.clone(); .options
let args = maplit::hashmap! {"type".to_string() => "bibliography".to_string()}; .document
while let Some(s) = bibliography.pop() { .config
self.import(s, &args); .get_entry(IMP_CONFIGS)
.and_then(|e| Some(e.get().clone()))
{
let args = maplit::hashmap! {"type".to_string() => Value::String("config".to_string())};
while let Some(Value::String(s)) = imp.pop() {
self.import(s, &args);
}
}
if let Some(Value::Array(mut imp)) = self
.options
.document
.config
.get_entry(IMP_BIBLIOGRAPHY)
.and_then(|e| Some(e.get().clone()))
{
let args =
maplit::hashmap! {"type".to_string() => Value::String("bibliography".to_string())};
while let Some(Value::String(s)) = imp.pop() {
self.import(s, &args);
}
} }
let mut glossaries = config.lock().imports.included_glossaries.clone(); if let Some(Value::Array(mut imp)) = self
.options
let args = maplit::hashmap! {"type".to_string() =>"glossary".to_string()}; .document
while let Some(s) = glossaries.pop() { .config
self.import(s, &args); .get_entry(IMP_GLOSSARY)
.and_then(|e| Some(e.get().clone()))
{
let args =
maplit::hashmap! {"type".to_string() => Value::String("glossary".to_string())};
while let Some(Value::String(s)) = imp.pop() {
self.import(s, &args);
}
} }
} }
} }

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::{Anchor, BoldText, ItalicText, Line, List, ListItem, PlainText, TextLine}; use crate::elements::{Anchor, BoldText, ItalicText, Line, List, ListItem, PlainText, TextLine};
use crate::elements::{Inline, Url}; use crate::elements::{Inline, Url};
use bibliographix::bibliography::bib_types::article::Article; use bibliographix::bibliography::bib_types::article::Article;
@ -39,6 +33,7 @@ pub fn create_bib_list(entries: Vec<BibliographyEntryReference>) -> List {
for entry in entries { for entry in entries {
entry entry
.lock() .lock()
.unwrap()
.raw_fields .raw_fields
.insert("ord".to_string(), count.to_string()); .insert("ord".to_string(), count.to_string());
list.add_item(get_item_for_entry(entry)); list.add_item(get_item_for_entry(entry));
@ -50,7 +45,7 @@ pub fn create_bib_list(entries: Vec<BibliographyEntryReference>) -> List {
/// Returns the list item for a bib entry /// Returns the list item for a bib entry
fn get_item_for_entry(entry: BibliographyEntryReference) -> ListItem { fn get_item_for_entry(entry: BibliographyEntryReference) -> ListItem {
let entry = entry.lock(); let entry = entry.lock().unwrap();
match &entry.bib_type { match &entry.bib_type {
BibliographyType::Article(a) => get_item_for_article(&*entry, a), BibliographyType::Article(a) => get_item_for_article(&*entry, a),

@ -0,0 +1,26 @@
#![allow(unused)]
pub const BIB_REF_DISPLAY: &str = "bib-ref-display";
pub const META_LANG: &str = "language";
// import and include options
pub const IMP_IGNORE: &str = "ignored-imports";
pub const IMP_STYLESHEETS: &str = "included-stylesheets";
pub const IMP_CONFIGS: &str = "included-configs";
pub const IMP_BIBLIOGRAPHY: &str = "included-bibliography";
pub const IMP_GLOSSARY: &str = "included-glossary";
pub const EMBED_EXTERNAL: &str = "embed-external";
pub const SMART_ARROWS: &str = "smart-arrows";
pub const INCLUDE_MATHJAX: &str = "include-math-jax";
// PDF options
pub const PDF_DISPLAY_HEADER_FOOTER: &str = "pfd-display-header-footer";
pub const PDF_HEADER_TEMPLATE: &str = "pdf-header-template";
pub const PDF_FOOTER_TEMPLATE: &str = "pdf-footer-template";
pub const PDF_MARGIN_TOP: &str = "pdf-margin-top";
pub const PDF_MARGIN_BOTTOM: &str = "pdf-margin-bottom";
pub const PDF_MARGIN_LEFT: &str = "pdf-margin-left";
pub const PDF_MARGIN_RIGHT: &str = "pdf-margin-right";
pub const PDF_PAGE_HEIGHT: &str = "pdf-page-height";
pub const PDF_PAGE_WIDTH: &str = "pdf-page-width";
pub const PDF_PAGE_SCALE: &str = "pdf-page-scale";

@ -0,0 +1,188 @@
use crate::elements::MetadataValue;
use crate::references::configuration::keys::{
BIB_REF_DISPLAY, META_LANG, PDF_DISPLAY_HEADER_FOOTER, PDF_FOOTER_TEMPLATE,
PDF_HEADER_TEMPLATE, PDF_MARGIN_BOTTOM, PDF_MARGIN_TOP,
};
use crate::references::templates::Template;
use serde::export::TryFrom;
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
pub(crate) mod keys;
#[derive(Clone, Debug)]
pub enum Value {
String(String),
Bool(bool),
Float(f64),
Integer(i64),
Template(Template),
Array(Vec<Value>),
}
#[derive(Clone, Debug)]
pub struct ConfigEntry {
inner: Value,
}
pub type ConfigRefEntry = Arc<RwLock<ConfigEntry>>;
#[derive(Clone, Debug)]
pub struct Configuration {
config: Arc<RwLock<HashMap<String, ConfigRefEntry>>>,
}
impl Value {
pub fn as_string(&self) -> String {
match self {
Value::String(string) => string.clone(),
Value::Integer(int) => format!("{}", int),
Value::Float(f) => format!("{:02}", f),
Value::Bool(b) => format!("{}", b),
Value::Array(a) => a.iter().fold("".to_string(), |a, b| {
format!("{} \"{}\"", a, b.as_string())
}),
_ => "".to_string(),
}
}
/// Returns the bool value if the value is a boolean
pub fn as_bool(&self) -> Option<bool> {
match self {
Value::Bool(b) => Some(*b),
_ => None,
}
}
pub fn as_float(&self) -> Option<f64> {
match self {
Value::Float(v) => Some(*v),
_ => None,
}
}
}
impl ConfigEntry {
pub fn new(value: Value) -> Self {
Self { inner: value }
}
pub fn set(&mut self, value: Value) {
self.inner = value;
}
pub fn get(&self) -> &Value {
&self.inner
}
}
impl Default for Configuration {
fn default() -> Self {
let mut self_config = Self::new();
self_config.set(BIB_REF_DISPLAY, Value::String("{{number}}".to_string()));
self_config.set(META_LANG, Value::String("en".to_string()));
self_config.set(PDF_MARGIN_BOTTOM, Value::Float(0.5));
self_config.set(PDF_MARGIN_TOP, Value::Float(0.5));
self_config.set(PDF_DISPLAY_HEADER_FOOTER, Value::Bool(true));
self_config.set(
PDF_HEADER_TEMPLATE,
Value::String("<div></div>".to_string()),
);
self_config.set(
PDF_FOOTER_TEMPLATE,
Value::String(
include_str!("../../format/chromium_pdf/assets/default-footer-template.html")
.to_string(),
),
);
self_config
}
}
impl Configuration {
pub fn new() -> Self {
Self {
config: Arc::new(RwLock::new(HashMap::new())),
}
}
/// returns the value of a config entry
pub fn get_entry(&self, key: &str) -> Option<ConfigEntry> {
let config = self.config.read().unwrap();
if let Some(entry) = config.get(key) {
let value = entry.read().unwrap();
Some(value.clone())
} else {
None
}
}
/// returns a config entry that is a reference to a value
pub fn get_ref_entry(&self, key: &str) -> Option<ConfigRefEntry> {
let config = self.config.read().unwrap();
if let Some(entry) = config.get(&key.to_string()) {
Some(Arc::clone(entry))
} else {
None
}
}
/// Sets a config parameter
pub fn set(&mut self, key: &str, value: Value) {
let mut config = self.config.write().unwrap();
if let Some(entry) = config.get(&key.to_string()) {
entry.write().unwrap().set(value)
} else {
config.insert(
key.to_string(),
Arc::new(RwLock::new(ConfigEntry::new(value))),
);
}
}
/// Sets a config value based on a metadata value
pub fn set_from_meta(&mut self, key: &str, value: MetadataValue) {
match value {
MetadataValue::String(string) => self.set(key, Value::String(string)),
MetadataValue::Bool(bool) => self.set(key, Value::Bool(bool)),
MetadataValue::Float(f) => self.set(key, Value::Float(f)),
MetadataValue::Integer(i) => self.set(key, Value::Integer(i)),
MetadataValue::Template(t) => self.set(key, Value::Template(t)),
_ => {}
}
}
pub fn set_from_toml(&mut self, value: &toml::Value) -> Option<()> {
let table = value.as_table().cloned()?;
table.iter().for_each(|(k, v)| {
match v {
toml::Value::Table(_) => self.set_from_toml(v).unwrap_or(()),
_ => self.set(k, Value::try_from(v.clone()).unwrap()),
};
});
Some(())
}
}
impl TryFrom<toml::Value> for Value {
type Error = ();
fn try_from(value: toml::Value) -> Result<Self, Self::Error> {
match value {
toml::Value::Table(_) => Err(()),
toml::Value::Float(f) => Ok(Value::Float(f)),
toml::Value::Integer(i) => Ok(Value::Integer(i)),
toml::Value::String(s) => Ok(Value::String(s)),
toml::Value::Boolean(b) => Ok(Value::Bool(b)),
toml::Value::Datetime(dt) => Ok(Value::String(dt.to_string())),
toml::Value::Array(a) => Ok(Value::Array(
a.iter()
.cloned()
.filter_map(|e| Value::try_from(e).ok())
.collect::<Vec<Value>>(),
)),
}
}
}

@ -1,16 +1,9 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::{ use crate::elements::{
Anchor, BoldText, Inline, ItalicText, Line, List, ListItem, PlainText, TextLine, Anchor, BoldText, Inline, ItalicText, Line, List, ListItem, PlainText, TextLine,
}; };
use parking_lot::Mutex;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::{Arc, Mutex};
use crate::bold_text; use crate::bold_text;
use crate::italic_text; use crate::italic_text;
@ -117,11 +110,11 @@ impl GlossaryManager {
/// Assignes entries to references /// Assignes entries to references
pub fn assign_entries_to_references(&self) { pub fn assign_entries_to_references(&self) {
for reference in &self.references { for reference in &self.references {
let mut reference = reference.lock(); let mut reference = reference.lock().unwrap();
if let Some(entry) = self.entries.get(&reference.short) { if let Some(entry) = self.entries.get(&reference.short) {
reference.entry = Some(Arc::clone(entry)); reference.entry = Some(Arc::clone(entry));
let mut entry = entry.lock(); let mut entry = entry.lock().unwrap();
if !entry.is_assigned { if !entry.is_assigned {
entry.is_assigned = true; entry.is_assigned = true;
@ -137,13 +130,13 @@ impl GlossaryManager {
let mut entries = self let mut entries = self
.entries .entries
.values() .values()
.filter(|e| e.lock().is_assigned) .filter(|e| e.lock().unwrap().is_assigned)
.cloned() .cloned()
.collect::<Vec<Arc<Mutex<GlossaryEntry>>>>(); .collect::<Vec<Arc<Mutex<GlossaryEntry>>>>();
entries.sort_by(|a, b| { entries.sort_by(|a, b| {
let a = a.lock(); let a = a.lock().unwrap();
let b = b.lock(); let b = b.lock().unwrap();
if a.short > b.short { if a.short > b.short {
Ordering::Greater Ordering::Greater
} else if a.short < b.short { } else if a.short < b.short {
@ -153,7 +146,7 @@ impl GlossaryManager {
} }
}); });
for entry in &entries { for entry in &entries {
let entry = entry.lock(); let entry = entry.lock().unwrap();
let mut line = TextLine::new(); let mut line = TextLine::new();
line.subtext.push(bold_text!(entry.short.clone())); line.subtext.push(bold_text!(entry.short.clone()));
line.subtext.push(plain_text!(" - ".to_string())); line.subtext.push(plain_text!(" - ".to_string()));

@ -1,10 +1,5 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub mod bibliography; pub mod bibliography;
pub mod configuration;
pub mod glossary; pub mod glossary;
pub mod placeholders; pub mod placeholders;
pub mod templates; pub mod templates;

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::*; use crate::elements::*;
use crate::references::bibliography::create_bib_list; use crate::references::bibliography::create_bib_list;
use chrono::prelude::*; use chrono::prelude::*;
@ -41,8 +35,6 @@ const P_GLS: &str = "gls";
const P_DATE: &str = "date"; const P_DATE: &str = "date";
const P_TIME: &str = "time"; const P_TIME: &str = "time";
const P_DATETIME: &str = "datetime"; const P_DATETIME: &str = "datetime";
const P_AUTHOR: &str = "author";
const P_TITLE: &str = "title";
impl ProcessPlaceholders for Document { impl ProcessPlaceholders for Document {
/// parses all placeholders and assigns values to them /// parses all placeholders and assigns values to them
@ -62,7 +54,7 @@ impl ProcessPlaceholders for Document {
self.bibliography.get_entry_list_by_occurrence() self.bibliography.get_entry_list_by_occurrence()
)))), )))),
P_GLS => pholder.set_value(block!(Block::List( P_GLS => pholder.set_value(block!(Block::List(
self.glossary.lock().create_glossary_list() self.glossary.lock().unwrap().create_glossary_list()
))), ))),
P_DATE => pholder.set_value(inline!(Inline::Plain(PlainText { P_DATE => pholder.set_value(inline!(Inline::Plain(PlainText {
value: get_date_string() value: get_date_string()
@ -73,24 +65,10 @@ impl ProcessPlaceholders for Document {
P_DATETIME => pholder.set_value(inline!(Inline::Plain(PlainText { P_DATETIME => pholder.set_value(inline!(Inline::Plain(PlainText {
value: format!("{} {}", get_date_string(), get_time_string()) value: format!("{} {}", get_date_string(), get_time_string())
}))), }))),
P_AUTHOR => {
if let Some(value) = self.config.lock().metadata.author.clone() {
pholder.set_value(inline!(Inline::Plain(PlainText { value })))
}
}
P_TITLE => {
if let Some(value) = self.config.lock().metadata.title.clone() {
pholder.set_value(inline!(Inline::Plain(PlainText { value })))
}
}
_ => { _ => {
if let Some(value) = self if let Some(entry) = self.config.get_entry(pholder.name.to_lowercase().as_str())
.config
.lock()
.custom_attributes
.get(pholder.name.to_lowercase().as_str())
.cloned()
{ {
let value = entry.get().as_string();
pholder.set_value(inline!(Inline::Plain(PlainText { value }))) pholder.set_value(inline!(Inline::Plain(PlainText { value })))
} }
} }
@ -116,7 +94,7 @@ impl ProcessPlaceholders for Document {
}))); })));
if let Some(meta) = &pholder.metadata { if let Some(meta) = &pholder.metadata {
if let Some(value) = meta.data.get(S_VALUE) { if let Some(value) = meta.data.get(S_VALUE) {
self.config.lock().set_from_meta(key, value.clone()) self.config.set_from_meta(key, value.clone())
} }
} }
} }

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::{Block, Element, Inline, Line, ListItem}; use crate::elements::{Block, Element, Inline, Line, ListItem};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};

@ -1,24 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct FeatureSettings {
pub embed_external: bool,
pub smart_arrows: bool,
pub include_mathjax: bool,
}
impl Default for FeatureSettings {
fn default() -> Self {
Self {
embed_external: true,
smart_arrows: true,
include_mathjax: true,
}
}
}

@ -1,24 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ImageSettings {
pub format: Option<String>,
pub max_width: Option<u32>,
pub max_height: Option<u32>,
}
impl Default for ImageSettings {
fn default() -> Self {
Self {
format: None,
max_height: None,
max_width: None,
}
}
}

@ -1,26 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ImportSettings {
pub ignored_imports: Vec<String>,
pub included_stylesheets: Vec<String>,
pub included_bibliography: Vec<String>,
pub included_glossaries: Vec<String>,
}
impl Default for ImportSettings {
fn default() -> Self {
Self {
ignored_imports: Vec::with_capacity(0),
included_stylesheets: vec!["style.css".to_string()],
included_bibliography: vec!["Bibliography.toml".to_string()],
included_glossaries: vec!["Glossary.toml".to_string()],
}
}
}

@ -1,28 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MetadataSettings {
pub title: Option<String>,
pub author: Option<String>,
pub description: Option<String>,
pub keywords: Vec<String>,
pub language: String,
}
impl Default for MetadataSettings {
fn default() -> Self {
Self {
title: None,
author: None,
description: None,
keywords: Vec::new(),
language: "en".to_string(),
}
}
}

@ -1,131 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::{Metadata, MetadataValue};
use crate::settings::feature_settings::FeatureSettings;
use crate::settings::image_settings::ImageSettings;
use crate::settings::import_settings::ImportSettings;
use crate::settings::metadata_settings::MetadataSettings;
use crate::settings::pdf_settings::PDFSettings;
use crate::settings::style_settings::StyleSettings;
use config::{ConfigError, Source};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{self, Display};
use std::io;
use std::mem;
use std::path::PathBuf;
pub mod feature_settings;
pub mod image_settings;
pub mod import_settings;
pub mod metadata_settings;
pub mod pdf_settings;
pub mod style_settings;
pub type SettingsResult<T> = Result<T, SettingsError>;
#[derive(Debug)]
pub enum SettingsError {
IoError(io::Error),
ConfigError(ConfigError),
TomlError(toml::ser::Error),
}
impl Display for SettingsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::IoError(e) => write!(f, "IO Error: {}", e),
Self::ConfigError(e) => write!(f, "Config Error: {}", e),
Self::TomlError(e) => write!(f, "Toml Error: {}", e),
}
}
}
impl Error for SettingsError {}
impl From<io::Error> for SettingsError {
fn from(e: io::Error) -> Self {
Self::IoError(e)
}
}
impl From<ConfigError> for SettingsError {
fn from(e: ConfigError) -> Self {
Self::ConfigError(e)
}
}
impl From<toml::ser::Error> for SettingsError {
fn from(e: toml::ser::Error) -> Self {
Self::TomlError(e)
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct Settings {
pub metadata: MetadataSettings,
pub features: FeatureSettings,
pub imports: ImportSettings,
pub pdf: PDFSettings,
pub images: ImageSettings,
pub style: StyleSettings,
pub custom_attributes: HashMap<String, String>,
}
impl Source for Settings {
fn clone_into_box(&self) -> Box<dyn Source + Send + Sync> {
Box::new(self.clone())
}
fn collect(&self) -> Result<HashMap<String, config::Value>, config::ConfigError> {
let source_str =
toml::to_string(&self).map_err(|e| config::ConfigError::Foreign(Box::new(e)))?;
let result = toml::de::from_str(&source_str)
.map_err(|e| config::ConfigError::Foreign(Box::new(e)))?;
Ok(result)
}
}
impl Settings {
/// Loads the settings from the specified path
pub fn load(path: PathBuf) -> SettingsResult<Self> {
let mut settings = config::Config::default();
settings
.merge(Self::default())?
.merge(config::File::from(path))?;
let settings: Self = settings.try_into()?;
Ok(settings)
}
/// Merges the current settings with the settings from the given path
/// returning updated settings
pub fn merge(&mut self, path: PathBuf) -> SettingsResult<()> {
let mut settings = config::Config::default();
settings
.merge(self.clone())?
.merge(config::File::from(path))?;
let mut settings: Self = settings.try_into()?;
mem::swap(self, &mut settings); // replace the old settings with the new ones
Ok(())
}
pub fn append_metadata<M: Metadata>(&mut self, metadata: M) {
let entries = metadata.get_string_map();
for (key, value) in entries {
self.custom_attributes.insert(key, value);
}
}
pub fn set_from_meta(&mut self, key: &str, value: MetadataValue) {
self.custom_attributes
.insert(key.to_string(), value.to_string());
}
}

@ -1,54 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PDFSettings {
pub display_header_footer: bool,
pub header_template: Option<String>,
pub footer_template: Option<String>,
pub page_height: Option<f32>,
pub page_width: Option<f32>,
pub page_scale: f32,
pub margin: PDFMarginSettings,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PDFMarginSettings {
pub top: Option<f32>,
pub bottom: Option<f32>,
pub left: Option<f32>,
pub right: Option<f32>,
}
impl Default for PDFMarginSettings {
fn default() -> Self {
Self {
top: Some(0.5),
bottom: Some(0.5),
left: None,
right: None,
}
}
}
impl Default for PDFSettings {
fn default() -> Self {
Self {
display_header_footer: true,
header_template: Some("<div></div>".to_string()),
footer_template: Some(
include_str!("../format/chromium_pdf/assets/default-footer-template.html")
.to_string(),
),
page_height: None,
page_width: None,
page_scale: 1.0,
margin: Default::default(),
}
}
}

@ -1,32 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct StyleSettings {
pub bib_ref_display: String,
pub theme: Theme,
}
impl Default for StyleSettings {
fn default() -> Self {
Self {
bib_ref_display: "{{number}}".to_string(),
theme: Theme::GitHub,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum Theme {
GitHub,
SolarizedDark,
SolarizedLight,
OceanDark,
OceanLight,
MagicDark,
}

@ -1,69 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use platform_dirs::{AppDirs, AppUI};
use sha2::Digest;
use std::fs;
use std::io;
use std::path::PathBuf;
#[derive(Clone, Debug)]
pub struct CacheStorage {
location: PathBuf,
}
impl CacheStorage {
pub fn new() -> Self {
lazy_static::lazy_static! {
static ref APP_DIRS: AppDirs = AppDirs::new(Some("snekdown"), AppUI::CommandLine).unwrap();
}
Self {
location: APP_DIRS.cache_dir.clone(),
}
}
/// Returns the cache path for a given file
pub fn get_file_path(&self, path: &PathBuf) -> PathBuf {
let mut hasher = sha2::Sha256::default();
hasher.update(path.to_string_lossy().as_bytes());
let mut file_name = PathBuf::from(format!("{:x}", hasher.finalize()));
if let Some(extension) = path.extension() {
file_name.set_extension(extension);
}
log::trace!("Cache path is {:?}", path);
return self.location.join(PathBuf::from(file_name));
}
/// Returns if the given file exists in the cache
pub fn has_file(&self, path: &PathBuf) -> bool {
let cache_path = self.get_file_path(path);
cache_path.exists()
}
/// Writes into the corresponding cache file
pub fn read(&self, path: &PathBuf) -> io::Result<Vec<u8>> {
let cache_path = self.get_file_path(path);
fs::read(cache_path)
}
/// Reads the corresponding cache file
pub fn write<R: AsRef<[u8]>>(&self, path: &PathBuf, contents: R) -> io::Result<()> {
let cache_path = self.get_file_path(path);
fs::write(cache_path, contents)
}
/// Clears the cache directory by deleting and recreating it
pub fn clear(&self) -> io::Result<()> {
fs::remove_dir_all(&self.location)?;
fs::create_dir(&self.location)
}
}

@ -1,21 +1,18 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::utils::caching::CacheStorage;
use indicatif::{ProgressBar, ProgressStyle}; use indicatif::{ProgressBar, ProgressStyle};
use parking_lot::Mutex; use platform_dirs::{AppDirs, AppUI};
use rayon::prelude::*; use rayon::prelude::*;
use std::collections::hash_map::DefaultHasher;
use std::fs;
use std::fs::read; use std::fs::read;
use std::hash::{Hash, Hasher};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::{Arc, Mutex};
/// A manager for downloading urls in parallel /// A manager for downloading urls in parallel
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct DownloadManager { pub struct DownloadManager {
downloads: Vec<Arc<Mutex<PendingDownload>>>, downloads: Vec<Arc<Mutex<PendingDownload>>>,
pub use_cache: bool,
} }
impl DownloadManager { impl DownloadManager {
@ -23,12 +20,14 @@ impl DownloadManager {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
downloads: Vec::new(), downloads: Vec::new(),
use_cache: true,
} }
} }
/// Adds a new pending download /// Adds a new pending download
pub fn add_download(&mut self, path: String) -> Arc<Mutex<PendingDownload>> { pub fn add_download(&mut self, path: String) -> Arc<Mutex<PendingDownload>> {
let download = PendingDownload::new(path.clone()); let mut download = PendingDownload::new(path.clone());
download.use_cache = self.use_cache;
let pending = Arc::new(Mutex::new(download)); let pending = Arc::new(Mutex::new(download));
self.downloads.push(Arc::clone(&pending)); self.downloads.push(Arc::clone(&pending));
log::debug!("Added download {}", path); log::debug!("Added download {}", path);
@ -39,7 +38,7 @@ impl DownloadManager {
/// Downloads all download entries /// Downloads all download entries
pub fn download_all(&self) { pub fn download_all(&self) {
let pb = Arc::new(Mutex::new(ProgressBar::new(self.downloads.len() as u64))); let pb = Arc::new(Mutex::new(ProgressBar::new(self.downloads.len() as u64)));
pb.lock().set_style( pb.lock().unwrap().set_style(
ProgressStyle::default_bar() ProgressStyle::default_bar()
.template("Fetching Embeds: [{bar:40.cyan/blue}]") .template("Fetching Embeds: [{bar:40.cyan/blue}]")
.progress_chars("=> "), .progress_chars("=> "),
@ -47,10 +46,10 @@ impl DownloadManager {
let pb_cloned = Arc::clone(&pb); let pb_cloned = Arc::clone(&pb);
self.downloads.par_iter().for_each_with(pb_cloned, |pb, d| { self.downloads.par_iter().for_each_with(pb_cloned, |pb, d| {
d.lock().download(); d.lock().unwrap().download();
pb.lock().inc(1); pb.lock().unwrap().inc(1);
}); });
pb.lock().finish_and_clear(); pb.lock().unwrap().finish_and_clear();
} }
} }
@ -61,7 +60,6 @@ pub struct PendingDownload {
pub(crate) path: String, pub(crate) path: String,
pub(crate) data: Option<Vec<u8>>, pub(crate) data: Option<Vec<u8>>,
pub(crate) use_cache: bool, pub(crate) use_cache: bool,
cache: CacheStorage,
} }
impl PendingDownload { impl PendingDownload {
@ -70,7 +68,6 @@ impl PendingDownload {
path, path,
data: None, data: None,
use_cache: true, use_cache: true,
cache: CacheStorage::new(),
} }
} }
@ -101,18 +98,22 @@ impl PendingDownload {
/// Stores the data to a cache file to retrieve it later /// Stores the data to a cache file to retrieve it later
fn store_to_cache(&self, data: &Vec<u8>) { fn store_to_cache(&self, data: &Vec<u8>) {
if self.use_cache { if self.use_cache {
let path = PathBuf::from(&self.path); let cache_file = get_cached_path(PathBuf::from(&self.path));
self.cache log::debug!("Writing to cache {} -> {:?}", self.path, cache_file);
.write(&path, data.clone()) fs::write(&cache_file, data.clone()).unwrap_or_else(|_| {
.unwrap_or_else(|_| log::warn!("Failed to write file to cache: {}", self.path)); log::warn!(
"Failed to write file to cache: {} -> {:?}",
self.path,
cache_file
)
});
} }
} }
fn read_from_cache(&self) -> Option<Vec<u8>> { fn read_from_cache(&self) -> Option<Vec<u8>> {
let path = PathBuf::from(&self.path); let cache_path = get_cached_path(PathBuf::from(&self.path));
if cache_path.exists() && self.use_cache {
if self.cache.has_file(&path) && self.use_cache { read(cache_path).ok()
self.cache.read(&path).ok()
} else { } else {
None None
} }
@ -120,14 +121,26 @@ impl PendingDownload {
/// Downloads the content from the given url /// Downloads the content from the given url
fn download_content(&self) -> Option<Vec<u8>> { fn download_content(&self) -> Option<Vec<u8>> {
download_path(self.path.clone()) reqwest::blocking::get(&self.path)
.ok()
.map(|c| c.bytes())
.and_then(|b| b.ok())
.map(|b| b.to_vec())
} }
} }
pub fn download_path(path: String) -> Option<Vec<u8>> { pub fn get_cached_path(path: PathBuf) -> PathBuf {
reqwest::blocking::get(&path) lazy_static::lazy_static! {
.ok() static ref APP_DIRS: AppDirs = AppDirs::new(Some("snekdown"), AppUI::CommandLine).unwrap();
.map(|c| c.bytes()) }
.and_then(|b| b.ok()) let mut hasher = DefaultHasher::new();
.map(|b| b.to_vec()) path.hash(&mut hasher);
let file_name = PathBuf::from(format!("{:x}", hasher.finish()));
if !APP_DIRS.cache_dir.is_dir() {
fs::create_dir(&APP_DIRS.cache_dir)
.unwrap_or_else(|_| log::warn!("Failed to create cache dir {:?}", APP_DIRS.cache_dir))
}
APP_DIRS.cache_dir.join(file_name)
} }

@ -1,249 +0,0 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use crate::elements::Metadata;
use crate::utils::caching::CacheStorage;
use crate::utils::downloads::download_path;
use image::imageops::FilterType;
use image::io::Reader as ImageReader;
use image::{GenericImageView, ImageFormat, ImageResult};
use indicatif::{ProgressBar, ProgressStyle};
use mime::Mime;
use parking_lot::Mutex;
use rayon::prelude::*;
use std::io;
use std::io::Cursor;
use std::path::PathBuf;
use std::sync::Arc;
#[derive(Clone, Debug)]
pub struct ImageConverter {
images: Vec<Arc<Mutex<PendingImage>>>,
target_format: Option<ImageFormat>,
target_size: Option<(u32, u32)>,
}
impl ImageConverter {
pub fn new() -> Self {
Self {
images: Vec::new(),
target_format: None,
target_size: None,
}
}
pub fn set_target_size(&mut self, target_size: (u32, u32)) {
self.target_size = Some(target_size)
}
pub fn set_target_format(&mut self, target_format: ImageFormat) {
self.target_format = Some(target_format);
}
/// Adds an image to convert
pub fn add_image(&mut self, path: PathBuf) -> Arc<Mutex<PendingImage>> {
let image = Arc::new(Mutex::new(PendingImage::new(path)));
self.images.push(image.clone());
image
}
/// Converts all images
pub fn convert_all(&mut self) {
let pb = Arc::new(Mutex::new(ProgressBar::new(self.images.len() as u64)));
pb.lock().set_style(
ProgressStyle::default_bar()
.template("Processing images: [{bar:40.cyan/blue}]")
.progress_chars("=> "),
);
self.images.par_iter().for_each(|image| {
let mut image = image.lock();
if let Err(e) = image.convert(self.target_format.clone(), self.target_size.clone()) {
log::error!("Failed to embed image {:?}: {}", image.path, e)
}
pb.lock().tick();
});
pb.lock().finish_and_clear();
}
}
#[derive(Clone, Debug)]
pub struct PendingImage {
pub path: PathBuf,
pub data: Option<Vec<u8>>,
cache: CacheStorage,
pub mime: Mime,
brightness: Option<i32>,
contrast: Option<f32>,
huerotate: Option<i32>,
grayscale: bool,
invert: bool,
}
impl PendingImage {
pub fn new(path: PathBuf) -> Self {
let mime = get_mime(&path);
Self {
path,
data: None,
cache: CacheStorage::new(),
mime,
brightness: None,
contrast: None,
grayscale: false,
invert: false,
huerotate: None,
}
}
pub fn assign_from_meta<M: Metadata>(&mut self, meta: &M) {
if let Some(brightness) = meta.get_integer("brightness") {
self.brightness = Some(brightness as i32);
}
if let Some(contrast) = meta.get_float("contrast") {
self.contrast = Some(contrast as f32);
}
if let Some(huerotate) = meta.get_float("huerotate") {
self.huerotate = Some(huerotate as i32);
}
self.grayscale = meta.get_bool("grayscale");
self.invert = meta.get_bool("invert");
}
/// Converts the image to the specified target format (specified by target_extension)
pub fn convert(
&mut self,
target_format: Option<ImageFormat>,
target_size: Option<(u32, u32)>,
) -> ImageResult<()> {
let format = target_format
.or_else(|| {
self.path
.extension()
.and_then(|extension| ImageFormat::from_extension(extension))
})
.unwrap_or(ImageFormat::Png);
let output_path = self.get_output_path(format, target_size);
self.mime = get_mime(&output_path);
if self.cache.has_file(&output_path) {
self.data = Some(self.cache.read(&output_path)?)
} else {
self.convert_image(format, target_size)?;
if let Some(data) = &self.data {
self.cache.write(&output_path, data)?;
}
}
Ok(())
}
/// Converts the image
fn convert_image(
&mut self,
format: ImageFormat,
target_size: Option<(u32, u32)>,
) -> ImageResult<()> {
let mut image = ImageReader::open(self.get_path()?)?.decode()?;
if let Some((width, height)) = target_size {
let dimensions = image.dimensions();
if dimensions.0 > width || dimensions.1 > height {
image = image.resize(width, height, FilterType::Lanczos3);
}
}
if let Some(brightness) = self.brightness {
image = image.brighten(brightness);
}
if let Some(contrast) = self.contrast {
image = image.adjust_contrast(contrast);
}
if let Some(rotate) = self.huerotate {
image = image.huerotate(rotate);
}
if self.grayscale {
image = image.grayscale();
}
if self.invert {
image.invert();
}
let data = Vec::new();
let mut writer = Cursor::new(data);
image.write_to(&mut writer, format)?;
self.data = Some(writer.into_inner());
Ok(())
}
/// Returns the path of the file
fn get_path(&self) -> io::Result<PathBuf> {
if !self.path.exists() {
if self.cache.has_file(&self.path) {
return Ok(self.cache.get_file_path(&self.path));
}
if let Some(data) = download_path(self.path.to_string_lossy().to_string()) {
self.cache.write(&self.path, data)?;
return Ok(self.cache.get_file_path(&self.path));
}
}
Ok(self.path.clone())
}
/// Returns the output file name after converting the image
fn get_output_path(
&self,
target_format: ImageFormat,
target_size: Option<(u32, u32)>,
) -> PathBuf {
let mut path = self.path.clone();
let mut file_name = path.file_stem().unwrap().to_string_lossy().to_string();
let extension = target_format.extensions_str()[0];
let type_name = format!("{:?}", target_format);
if let Some(target_size) = target_size {
file_name += &*format!("-w{}-h{}", target_size.0, target_size.1);
}
if let Some(b) = self.brightness {
file_name += &*format!("-b{}", b);
}
if let Some(c) = self.contrast {
file_name += &*format!("-c{}", c);
}
if let Some(h) = self.huerotate {
file_name += &*format!("-h{}", h);
}
file_name += &*format!("{}-{}", self.invert, self.grayscale);
file_name += format!("-{}", type_name).as_str();
path.set_file_name(file_name);
path.set_extension(extension);
path
}
}
fn get_mime(path: &PathBuf) -> Mime {
let mime = mime_guess::from_ext(
path.clone()
.extension()
.and_then(|e| e.to_str())
.unwrap_or("png"),
)
.first()
.unwrap_or(mime::IMAGE_PNG);
mime
}

@ -1,9 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
#[macro_export] #[macro_export]
macro_rules! plain_text { macro_rules! plain_text {
($e:expr) => { ($e:expr) => {

@ -1,11 +1,3 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
pub mod caching;
pub mod downloads; pub mod downloads;
pub mod image_converting;
pub mod macros; pub mod macros;
pub mod parsing; pub mod parsing;

@ -1,21 +1,6 @@
/*
* Snekdown - Custom Markdown flavour and parser
* Copyright (C) 2021 Trivernis
* See LICENSE for more information.
*/
use regex::Regex;
#[macro_export] #[macro_export]
macro_rules! parse { macro_rules! parse {
($str:expr) => { ($str:expr) => {
Parser::new($str.to_string(), None).parse() Parser::new($str.to_string(), None).parse()
}; };
} }
/// Removes a single backslash from the given content
pub(crate) fn remove_single_backlslash<S: ToString>(content: S) -> String {
let content = content.to_string();
lazy_static::lazy_static! {static ref R: Regex = Regex::new(r"\\(?P<c>[^\\])").unwrap();}
R.replace_all(&*content, "$c").to_string()
}

Loading…
Cancel
Save