Compare commits

..

5 Commits

12
Cargo.lock generated

@ -154,6 +154,17 @@ version = "4.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae"
[[package]]
name = "async-trait"
version = "0.1.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "async-walkdir" name = "async-walkdir"
version = "0.2.0" version = "0.2.0"
@ -2006,6 +2017,7 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
name = "viki" name = "viki"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-trait",
"async-walkdir", "async-walkdir",
"clap", "clap",
"comrak", "comrak",

@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-trait = "0.1.68"
async-walkdir = "0.2.0" async-walkdir = "0.2.0"
clap = { version = "4.3.8", features = ["derive"] } clap = { version = "4.3.8", features = ["derive"] }
comrak = { version = "0.18.0", features = ["emojis"] } comrak = { version = "0.18.0", features = ["emojis"] }

@ -0,0 +1,2 @@
mod save_file;
pub use save_file::*;

@ -0,0 +1,36 @@
use std::path::PathBuf;
use async_trait::async_trait;
use miette::{IntoDiagnostic, Result};
use tokio::fs;
use crate::pipeline::ProcessingStep;
pub struct SaveFile;
pub struct SaveFileParams {
pub path: PathBuf,
pub contents: Vec<u8>,
}
#[async_trait]
impl ProcessingStep for SaveFile {
type Input = SaveFileParams;
type Output = ();
#[tracing::instrument(name = "save file", level = "trace", skip_all)]
async fn process(
&self,
SaveFileParams { path, contents }: Self::Input,
) -> Result<Self::Output> {
if let Some(parent) = path.parent() {
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
}
fs::write(path, contents).await.into_diagnostic()?;
Ok(())
}
}

@ -1,6 +1,12 @@
use std::path::PathBuf; use std::path::PathBuf;
#[derive(Clone, Debug)]
pub struct Context { pub struct Context {
pub dirs: Dirs,
}
#[derive(Clone, Debug)]
pub struct Dirs {
pub content_dir: PathBuf, pub content_dir: PathBuf,
pub template_dir: PathBuf, pub template_dir: PathBuf,
pub stylesheet_dir: PathBuf, pub stylesheet_dir: PathBuf,

@ -3,7 +3,7 @@ use std::{path::Path, sync::Arc};
use args::BuildArgs; use args::BuildArgs;
use clap::Parser; use clap::Parser;
use config::{read_config, Config}; use config::{read_config, Config};
use context::Context; use context::{Context, Dirs};
use data::DirLoader; use data::DirLoader;
use miette::Result; use miette::Result;
use rendering::ContentRenderer; use rendering::ContentRenderer;
@ -13,9 +13,11 @@ use tracing_subscriber::fmt::format::FmtSpan;
use crate::args::Args; use crate::args::Args;
mod args; mod args;
mod common;
mod config; mod config;
mod context; mod context;
pub mod data; pub mod data;
mod pipeline;
mod processors; mod processors;
mod rendering; mod rendering;
@ -36,7 +38,7 @@ async fn build(args: &Args, _build_args: &BuildArgs, cfg: Config) -> Result<()>
let base_path = &args.directory; let base_path = &args.directory;
let ctx = Arc::new(build_context(&base_path, &cfg)); let ctx = Arc::new(build_context(&base_path, &cfg));
let dirs = DirLoader::new(ctx.content_dir.to_owned()) let dirs = DirLoader::new(ctx.dirs.content_dir.to_owned())
.read_content() .read_content()
.await?; .await?;
@ -53,10 +55,12 @@ fn build_context(base_path: &Path, config: &Config) -> Context {
let stylesheet_dir = base_path.join(folders.stylesheets.unwrap_or("styles".into())); let stylesheet_dir = base_path.join(folders.stylesheets.unwrap_or("styles".into()));
Context { Context {
content_dir, dirs: Dirs {
template_dir, content_dir,
stylesheet_dir, template_dir,
output_dir, stylesheet_dir,
output_dir,
},
} }
} }

@ -0,0 +1,141 @@
use async_trait::async_trait;
use futures::future;
use miette::Result;
/// The result of combining two processing steps
pub struct Chained<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>>(S1, S2);
/// An adapter to execute a step with multiple inputs in parallel
pub struct Parallel<S: ProcessingStep>(S);
/// An adapter to map the result of the pipeline
pub struct Map<S: ProcessingStep, T: Send + Sync>(S, Box<dyn Fn(S::Output) -> T + Send + Sync>);
/// An adapter to dynamically construct the next step mapper depending on the previous one
pub struct Construct<S1: ProcessingStep, S2: ProcessingStep<Input = T>, T>(
S1,
Box<dyn Fn(S1::Output) -> (T, S2) + Send + Sync>,
);
/// A generic wrapper for processing pipelines
pub struct ProcessingPipeline<I: Send + Sync, O: Send + Sync>(
Box<dyn ProcessingStep<Input = I, Output = O>>,
);
#[async_trait]
pub trait ProcessingStep: Send + Sync {
type Input: Send + Sync;
type Output: Send + Sync;
async fn process(&self, input: Self::Input) -> Result<Self::Output>;
}
#[async_trait]
impl<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>> ProcessingStep
for Chained<S1, S2>
{
type Input = S1::Input;
type Output = S2::Output;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let first = self.0.process(input).await?;
self.1.process(first).await
}
}
#[async_trait]
impl<S: ProcessingStep> ProcessingStep for Parallel<S> {
type Input = Vec<S::Input>;
type Output = Vec<S::Output>;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
future::try_join_all(input.into_iter().map(|i| self.0.process(i))).await
}
}
pub trait ProcessingChain: Sized + ProcessingStep {
fn chain<S: ProcessingStep<Input = Self::Output>>(self, other: S) -> Chained<Self, S> {
Chained(self, other)
}
}
impl<S: ProcessingStep> ProcessingChain for S {}
pub trait ProcessingParallel: Sized + ProcessingStep {
fn parallel(self) -> Parallel<Self> {
Parallel(self)
}
}
impl<S: ProcessingStep> ProcessingParallel for S {}
pub trait IntoPipeline: Sized + ProcessingStep + 'static {
fn into_pipeline(self) -> ProcessingPipeline<Self::Input, Self::Output> {
ProcessingPipeline(Box::new(self))
}
}
pub trait ProcessingMap: ProcessingStep + Sized {
fn map<F: Fn(Self::Output) -> T + Send + Sync + 'static, T: Send + Sync>(
self,
map_fn: F,
) -> Map<Self, T> {
Map(self, Box::new(map_fn))
}
}
impl<S: ProcessingStep> ProcessingMap for S {}
#[async_trait]
impl<S: ProcessingStep, T: Send + Sync> ProcessingStep for Map<S, T> {
type Input = S::Input;
type Output = T;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let inner_result = self.0.process(input).await?;
Ok(self.1(inner_result))
}
}
pub trait ProcessingConstruct: ProcessingStep + Sized {
fn construct<
F: Fn(Self::Output) -> (T, S) + Send + Sync + 'static,
S: ProcessingStep<Input = T>,
T: Send + Sync,
>(
self,
construct_fn: F,
) -> Construct<Self, S, T> {
Construct(self, Box::new(construct_fn))
}
}
impl<S: ProcessingStep> ProcessingConstruct for S {}
#[async_trait]
impl<S1: ProcessingStep, S2: ProcessingStep<Input = T>, T: Send + Sync> ProcessingStep
for Construct<S1, S2, T>
{
type Input = S1::Input;
type Output = S2::Output;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let inner_output = self.0.process(input).await?;
let (new_input, step) = self.1(inner_output);
step.process(new_input).await
}
}
#[async_trait]
impl<I: Send + Sync, O: Send + Sync> ProcessingStep for ProcessingPipeline<I, O> {
type Input = I;
type Output = O;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
self.0.process(input).await
}
}
impl<S: ProcessingStep + 'static> IntoPipeline for S {}

@ -0,0 +1,32 @@
use std::path::PathBuf;
use async_trait::async_trait;
use miette::Result;
use crate::{data::FolderData, pipeline::ProcessingStep};
pub struct LoadDirContent;
#[async_trait]
impl ProcessingStep for LoadDirContent {
type Input = FolderData;
type Output = (Vec<PathBuf>, String);
#[tracing::instrument(name = "load dir", level = "trace", skip_all)]
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let dir_name = input
.path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy();
let default_template = input
.index
.default_template
.to_owned()
.unwrap_or(dir_name.into());
Ok((input.pages, default_template))
}
}

@ -1,19 +1,26 @@
use std::{path::PathBuf, sync::Arc}; use std::{path::PathBuf, sync::Arc};
use futures::future;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use tera::{Context as TeraContext, Tera}; use tera::Tera;
use tokio::{fs, sync::Mutex}; use tokio::{fs, sync::Mutex};
use crate::{ use crate::{
common::{SaveFile, SaveFileParams},
context::Context, context::Context,
data::{load_page, FolderData}, data::FolderData,
}; };
use crate::pipeline::*;
use self::style::{load_stylesheets, Stylesheets}; use self::style::{load_stylesheets, Stylesheets};
mod load_dir_content;
mod render_page;
mod style; mod style;
use load_dir_content::*;
use render_page::*;
// renders content using the given template folder // renders content using the given template folder
pub struct ContentRenderer { pub struct ContentRenderer {
template_glob: String, template_glob: String,
@ -23,8 +30,8 @@ pub struct ContentRenderer {
impl ContentRenderer { impl ContentRenderer {
pub async fn new(ctx: Arc<Context>) -> Result<Self> { pub async fn new(ctx: Arc<Context>) -> Result<Self> {
let template_glob = format!("{}/**/*", ctx.template_dir.to_string_lossy()); let template_glob = format!("{}/**/*", ctx.dirs.template_dir.to_string_lossy());
let styles = load_stylesheets(&ctx.stylesheet_dir).await?; let styles = load_stylesheets(&ctx.dirs.stylesheet_dir).await?;
Ok(Self { Ok(Self {
template_glob, template_glob,
@ -35,92 +42,47 @@ impl ContentRenderer {
#[tracing::instrument(level = "trace", skip_all)] #[tracing::instrument(level = "trace", skip_all)]
pub async fn render_all(&self, dirs: Vec<FolderData>) -> Result<()> { pub async fn render_all(&self, dirs: Vec<FolderData>) -> Result<()> {
if self.ctx.output_dir.exists() { if self.ctx.dirs.output_dir.exists() {
fs::remove_dir_all(&self.ctx.output_dir) fs::remove_dir_all(&self.ctx.dirs.output_dir)
.await .await
.into_diagnostic()?; .into_diagnostic()?;
} }
let mut tera = Tera::new(&self.template_glob).into_diagnostic()?; let mut tera = Tera::new(&self.template_glob).into_diagnostic()?;
super::processors::register_all(&mut tera); super::processors::register_all(&mut tera);
future::try_join_all(dirs.into_iter().map(|data| self.render_folder(&tera, data))).await?;
Ok(()) let out_dir = self.ctx.dirs.output_dir.to_owned();
} let styles = Arc::clone(&self.styles);
let ctx = Arc::clone(&self.ctx);
LoadDirContent
.construct(move |(files, default_template)| {
let step = RenderPage {
tera: tera.clone(),
styles: styles.clone(),
ctx: ctx.clone(),
default_template,
}
.map(map_path_to_output(out_dir.clone()))
.chain(SaveFile)
.parallel();
#[tracing::instrument(level = "trace", skip_all)] (files, step)
async fn render_folder(&self, tera: &Tera, data: FolderData) -> Result<()> { })
let dir_name = data .parallel()
.path .process(dirs)
.components() .await?;
.last()
.unwrap()
.as_os_str()
.to_string_lossy();
let default_template = data
.index
.default_template
.to_owned()
.unwrap_or(dir_name.into());
future::try_join_all(
data.pages
.into_iter()
.map(|page| self.render_page(tera, default_template.clone(), page)),
)
.await?;
Ok(()) Ok(())
} }
}
#[tracing::instrument(level = "trace", skip_all)] fn map_path_to_output(out_dir: PathBuf) -> impl Fn((PathBuf, String)) -> SaveFileParams {
async fn render_page( move |(path, contents)| {
&self, let path = out_dir.join(path).with_extension("html");
tera: &Tera,
default_template: String,
page_path: PathBuf,
) -> Result<()> {
tracing::debug!("Rendering {page_path:?}");
let page = load_page(&page_path).await?;
let mut context = TeraContext::new();
let mut template_name = default_template;
let mut style_name = template_name.to_owned();
match page {
crate::data::Page::Data(data) => {
if let Some(tmpl) = data.metadata.template {
template_name = tmpl.to_owned();
style_name = tmpl;
}
context.insert("data", &data.data);
}
crate::data::Page::Content(content) => context.insert("content", &content),
}
{
let mut styles = self.styles.lock().await;
let style_embed = styles
.get_style_embed(&style_name, &self.ctx.output_dir)
.await?;
context.insert("style", &style_embed);
};
tracing::debug!("context = {context:?}");
let html = tera
.render(&format!("{template_name}.html"), &context)
.into_diagnostic()?;
let rel_path = page_path
.strip_prefix(&self.ctx.content_dir)
.into_diagnostic()?;
let mut out_path = self.ctx.output_dir.join(rel_path);
out_path.set_extension("html");
let parent = out_path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
fs::write(out_path, html).await.into_diagnostic()?;
Ok(()) SaveFileParams {
path,
contents: contents.into_bytes(),
}
} }
} }

@ -0,0 +1,58 @@
use std::{path::PathBuf, sync::Arc};
use async_trait::async_trait;
use miette::{IntoDiagnostic, Result};
use tera::{Context as TeraContext, Tera};
use tokio::sync::Mutex;
use crate::{context::Context, data::load_page, pipeline::ProcessingStep};
use super::style::Stylesheets;
pub struct RenderPage {
pub tera: Tera,
pub styles: Arc<Mutex<Stylesheets>>,
pub ctx: Arc<Context>,
pub default_template: String,
}
#[async_trait]
impl ProcessingStep for RenderPage {
type Input = PathBuf;
type Output = (PathBuf, String);
#[tracing::instrument(name = "render page", level = "trace", skip_all)]
async fn process(&self, page_path: Self::Input) -> Result<Self::Output> {
let page = load_page(&page_path).await?;
let mut context = TeraContext::new();
let mut template_name = None;
match page {
crate::data::Page::Data(data) => {
template_name = data.metadata.template;
context.insert("data", &data.data);
}
crate::data::Page::Content(content) => context.insert("content", &content),
}
let template_name = template_name.as_ref().unwrap_or(&self.default_template);
{
let mut styles = self.styles.lock().await;
let style_embed = styles
.get_style_embed(template_name, &self.ctx.dirs.output_dir)
.await?;
context.insert("style", &style_embed);
};
tracing::debug!("context = {context:?}");
let html = self
.tera
.render(&format!("{template_name}.html"), &context)
.into_diagnostic()?;
let rel_path = page_path
.strip_prefix(&self.ctx.dirs.content_dir)
.into_diagnostic()?;
Ok((rel_path.to_owned(), html))
}
}

@ -12,6 +12,7 @@ use tokio::fs;
const DEFAULT_SHEET_NAME: &str = "style"; const DEFAULT_SHEET_NAME: &str = "style";
const EMBED_THRESHOLD: usize = 512; const EMBED_THRESHOLD: usize = 512;
#[derive(Default)]
pub struct Stylesheets { pub struct Stylesheets {
page_styles: HashMap<String, PathBuf>, page_styles: HashMap<String, PathBuf>,
processed_styles: HashMap<String, String>, processed_styles: HashMap<String, String>,
@ -19,6 +20,9 @@ pub struct Stylesheets {
#[tracing::instrument(level = "trace")] #[tracing::instrument(level = "trace")]
pub async fn load_stylesheets(base_dir: &PathBuf) -> Result<Stylesheets> { pub async fn load_stylesheets(base_dir: &PathBuf) -> Result<Stylesheets> {
if !base_dir.exists() {
return Ok(Stylesheets::default());
}
let mut entries = WalkDir::new(base_dir); let mut entries = WalkDir::new(base_dir);
let mut page_styles = HashMap::new(); let mut page_styles = HashMap::new();
let empty_path = PathBuf::new(); let empty_path = PathBuf::new();

Loading…
Cancel
Save