Streamline render pipeline by moving everything to submodules

feature/processing-pipeline
trivernis 11 months ago
parent 3655ad2c5f
commit a2eeb4fe73
WARNING! Although there is a key with this ID in the database it does not verify this commit! This commit is SUSPICIOUS.
GPG Key ID: DFFFCC2C7A02DB45

@ -0,0 +1,2 @@
mod save_file;
pub use save_file::*;

@ -0,0 +1,36 @@
use std::path::PathBuf;
use async_trait::async_trait;
use miette::{IntoDiagnostic, Result};
use tokio::fs;
use crate::pipeline::ProcessingStep;
pub struct SaveFile;
pub struct SaveFileParams {
pub path: PathBuf,
pub contents: Vec<u8>,
}
#[async_trait]
impl ProcessingStep for SaveFile {
type Input = SaveFileParams;
type Output = ();
#[tracing::instrument(name = "save file", level = "trace", skip_all)]
async fn process(
&self,
SaveFileParams { path, contents }: Self::Input,
) -> Result<Self::Output> {
if let Some(parent) = path.parent() {
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
}
fs::write(path, contents).await.into_diagnostic()?;
Ok(())
}
}

@ -13,6 +13,7 @@ use tracing_subscriber::fmt::format::FmtSpan;
use crate::args::Args;
mod args;
mod common;
mod config;
mod context;
pub mod data;

@ -3,10 +3,13 @@ use futures::future;
use miette::Result;
/// The result of combining two processing steps
pub struct ProcessingChain<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>>(S1, S2);
pub struct Chained<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>>(S1, S2);
/// An adapter to execute a step with multiple inputs in parallel
pub struct ParallelPipeline<S: ProcessingStep>(S);
pub struct Parallel<S: ProcessingStep>(S);
/// An adapter to map the result of the pipeline
pub struct Map<S: ProcessingStep, T: Send + Sync>(S, Box<dyn Fn(S::Output) -> T + Send + Sync>);
/// A generic wrapper for processing pipelines
pub struct ProcessingPipeline<I: Send + Sync, O: Send + Sync>(
@ -23,7 +26,7 @@ pub trait ProcessingStep: Send + Sync {
#[async_trait]
impl<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>> ProcessingStep
for ProcessingChain<S1, S2>
for Chained<S1, S2>
{
type Input = S1::Input;
type Output = S2::Output;
@ -35,7 +38,7 @@ impl<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>> ProcessingStep
}
#[async_trait]
impl<S: ProcessingStep> ProcessingStep for ParallelPipeline<S> {
impl<S: ProcessingStep> ProcessingStep for Parallel<S> {
type Input = Vec<S::Input>;
type Output = Vec<S::Output>;
@ -44,21 +47,21 @@ impl<S: ProcessingStep> ProcessingStep for ParallelPipeline<S> {
}
}
pub trait ProcessingStepChain: Sized + ProcessingStep {
fn chain<S: ProcessingStep<Input = Self::Output>>(self, other: S) -> ProcessingChain<Self, S> {
ProcessingChain(self, other)
pub trait ProcessingChain: Sized + ProcessingStep {
fn chain<S: ProcessingStep<Input = Self::Output>>(self, other: S) -> Chained<Self, S> {
Chained(self, other)
}
}
impl<S: ProcessingStep> ProcessingStepChain for S {}
impl<S: ProcessingStep> ProcessingChain for S {}
pub trait ProcessingStepParallel: Sized + ProcessingStep {
fn parallel(self) -> ParallelPipeline<Self> {
ParallelPipeline(self)
pub trait ProcessingParallel: Sized + ProcessingStep {
fn parallel(self) -> Parallel<Self> {
Parallel(self)
}
}
impl<S: ProcessingStep> ProcessingStepParallel for S {}
impl<S: ProcessingStep> ProcessingParallel for S {}
pub trait IntoPipeline: Sized + ProcessingStep + 'static {
fn into_pipeline(self) -> ProcessingPipeline<Self::Input, Self::Output> {
@ -66,7 +69,27 @@ pub trait IntoPipeline: Sized + ProcessingStep + 'static {
}
}
impl<S: ProcessingStep + 'static> IntoPipeline for S {}
pub trait ProcessingMap: ProcessingStep + Sized {
fn map<F: Fn(Self::Output) -> T + Send + Sync + 'static, T: Send + Sync>(
self,
map_fn: F,
) -> Map<Self, T> {
Map(self, Box::new(map_fn))
}
}
impl<S: ProcessingStep> ProcessingMap for S {}
#[async_trait]
impl<S: ProcessingStep, T: Send + Sync> ProcessingStep for Map<S, T> {
type Input = S::Input;
type Output = T;
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let inner_result = self.0.process(input).await?;
Ok(self.1(inner_result))
}
}
#[async_trait]
impl<I: Send + Sync, O: Send + Sync> ProcessingStep for ProcessingPipeline<I, O> {
@ -77,3 +100,5 @@ impl<I: Send + Sync, O: Send + Sync> ProcessingStep for ProcessingPipeline<I, O>
self.0.process(input).await
}
}
impl<S: ProcessingStep + 'static> IntoPipeline for S {}

@ -0,0 +1,36 @@
use std::path::PathBuf;
use async_trait::async_trait;
use miette::Result;
use crate::{data::FolderData, pipeline::ProcessingStep};
pub struct LoadDirContent;
#[async_trait]
impl ProcessingStep for LoadDirContent {
type Input = FolderData;
type Output = Vec<(PathBuf, String)>;
#[tracing::instrument(name = "load dir", level = "trace", skip_all)]
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let dir_name = input
.path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy();
let default_template = input
.index
.default_template
.to_owned()
.unwrap_or(dir_name.into());
Ok(input
.pages
.into_iter()
.map(|p| (p, default_template.clone()))
.collect())
}
}

@ -1,20 +1,26 @@
use std::{path::PathBuf, sync::Arc};
use async_trait::async_trait;
use miette::{IntoDiagnostic, Result};
use tera::{Context as TeraContext, Tera};
use tera::Tera;
use tokio::{fs, sync::Mutex};
use crate::{
common::{SaveFile, SaveFileParams},
context::Context,
data::{load_page, FolderData},
pipeline::{ProcessingStep, ProcessingStepChain, ProcessingStepParallel},
data::FolderData,
};
use crate::pipeline::*;
use self::style::{load_stylesheets, Stylesheets};
mod load_dir_content;
mod render_page;
mod style;
use load_dir_content::*;
use render_page::*;
// renders content using the given template folder
pub struct ContentRenderer {
template_glob: String,
@ -22,111 +28,6 @@ pub struct ContentRenderer {
styles: Arc<Mutex<Stylesheets>>,
}
pub struct LoadDir;
#[async_trait]
impl ProcessingStep for LoadDir {
type Input = FolderData;
type Output = Vec<(PathBuf, String)>;
#[tracing::instrument(name = "load dir", level = "trace", skip_all)]
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
let dir_name = input
.path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy();
let default_template = input
.index
.default_template
.to_owned()
.unwrap_or(dir_name.into());
Ok(input
.pages
.into_iter()
.map(|p| (p, default_template.clone()))
.collect())
}
}
struct RenderPage {
tera: Tera,
styles: Arc<Mutex<Stylesheets>>,
ctx: Arc<Context>,
}
#[async_trait]
impl ProcessingStep for RenderPage {
type Input = (PathBuf, String);
type Output = (PathBuf, String);
#[tracing::instrument(name = "render page", level = "trace", skip_all)]
async fn process(&self, (page_path, default_template): Self::Input) -> Result<Self::Output> {
let page = load_page(&page_path).await?;
let mut context = TeraContext::new();
let mut template_name = default_template;
let mut style_name = template_name.to_owned();
match page {
crate::data::Page::Data(data) => {
if let Some(tmpl) = data.metadata.template {
template_name = tmpl.to_owned();
style_name = tmpl;
}
context.insert("data", &data.data);
}
crate::data::Page::Content(content) => context.insert("content", &content),
}
{
let mut styles = self.styles.lock().await;
let style_embed = styles
.get_style_embed(&style_name, &self.ctx.dirs.output_dir)
.await?;
context.insert("style", &style_embed);
};
tracing::debug!("context = {context:?}");
let html = self
.tera
.render(&format!("{template_name}.html"), &context)
.into_diagnostic()?;
let rel_path = page_path
.strip_prefix(&self.ctx.dirs.content_dir)
.into_diagnostic()?;
Ok((rel_path.to_owned(), html))
}
}
pub struct SaveOutput {
out_dir: PathBuf,
extension: &'static str,
}
#[async_trait]
impl ProcessingStep for SaveOutput {
type Input = (PathBuf, String);
type Output = ();
#[tracing::instrument(name = "save output", level = "trace", skip_all)]
async fn process(&self, (rel_path, content): Self::Input) -> Result<Self::Output> {
let mut out_path = self.out_dir.join(rel_path);
out_path.set_extension(self.extension);
let parent = out_path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
fs::write(out_path, content).await.into_diagnostic()?;
Ok(())
}
}
impl ContentRenderer {
pub async fn new(ctx: Arc<Context>) -> Result<Self> {
let template_glob = format!("{}/**/*", ctx.dirs.template_dir.to_string_lossy());
@ -148,18 +49,17 @@ impl ContentRenderer {
}
let mut tera = Tera::new(&self.template_glob).into_diagnostic()?;
super::processors::register_all(&mut tera);
let out_dir = self.ctx.dirs.output_dir.to_owned();
LoadDir
LoadDirContent
.chain(
RenderPage {
tera,
styles: self.styles.clone(),
ctx: self.ctx.clone(),
}
.chain(SaveOutput {
out_dir: self.ctx.dirs.output_dir.to_owned(),
extension: "html",
})
.map(map_path_to_output(out_dir))
.chain(SaveFile)
.parallel(),
)
.parallel()
@ -169,3 +69,14 @@ impl ContentRenderer {
Ok(())
}
}
fn map_path_to_output(out_dir: PathBuf) -> impl Fn((PathBuf, String)) -> SaveFileParams {
move |(path, contents)| {
let path = out_dir.join(path).with_extension("html");
SaveFileParams {
path,
contents: contents.into_bytes(),
}
}
}

@ -0,0 +1,60 @@
use std::{path::PathBuf, sync::Arc};
use async_trait::async_trait;
use miette::{IntoDiagnostic, Result};
use tera::{Context as TeraContext, Tera};
use tokio::sync::Mutex;
use crate::{context::Context, data::load_page, pipeline::ProcessingStep};
use super::style::Stylesheets;
pub struct RenderPage {
pub tera: Tera,
pub styles: Arc<Mutex<Stylesheets>>,
pub ctx: Arc<Context>,
}
#[async_trait]
impl ProcessingStep for RenderPage {
type Input = (PathBuf, String);
type Output = (PathBuf, String);
#[tracing::instrument(name = "render page", level = "trace", skip_all)]
async fn process(&self, (page_path, default_template): Self::Input) -> Result<Self::Output> {
let page = load_page(&page_path).await?;
let mut context = TeraContext::new();
let mut template_name = default_template;
let mut style_name = template_name.to_owned();
match page {
crate::data::Page::Data(data) => {
if let Some(tmpl) = data.metadata.template {
template_name = tmpl.to_owned();
style_name = tmpl;
}
context.insert("data", &data.data);
}
crate::data::Page::Content(content) => context.insert("content", &content),
}
{
let mut styles = self.styles.lock().await;
let style_embed = styles
.get_style_embed(&style_name, &self.ctx.dirs.output_dir)
.await?;
context.insert("style", &style_embed);
};
tracing::debug!("context = {context:?}");
let html = self
.tera
.render(&format!("{template_name}.html"), &context)
.into_diagnostic()?;
let rel_path = page_path
.strip_prefix(&self.ctx.dirs.content_dir)
.into_diagnostic()?;
Ok((rel_path.to_owned(), html))
}
}

@ -12,6 +12,7 @@ use tokio::fs;
const DEFAULT_SHEET_NAME: &str = "style";
const EMBED_THRESHOLD: usize = 512;
#[derive(Default)]
pub struct Stylesheets {
page_styles: HashMap<String, PathBuf>,
processed_styles: HashMap<String, String>,
@ -19,6 +20,9 @@ pub struct Stylesheets {
#[tracing::instrument(level = "trace")]
pub async fn load_stylesheets(base_dir: &PathBuf) -> Result<Stylesheets> {
if !base_dir.exists() {
return Ok(Stylesheets::default());
}
let mut entries = WalkDir::new(base_dir);
let mut page_styles = HashMap::new();
let empty_path = PathBuf::new();

Loading…
Cancel
Save