Compare commits
5 Commits
main
...
feature/pr
Author | SHA1 | Date |
---|---|---|
trivernis | 6178a045e7 | 1 year ago |
trivernis | a2eeb4fe73 | 1 year ago |
trivernis | 3655ad2c5f | 1 year ago |
trivernis | b8021c6faf | 1 year ago |
trivernis | 143868b4ae | 1 year ago |
@ -0,0 +1,2 @@
|
|||||||
|
mod save_file;
|
||||||
|
pub use save_file::*;
|
@ -0,0 +1,36 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use tokio::fs;
|
||||||
|
|
||||||
|
use crate::pipeline::ProcessingStep;
|
||||||
|
|
||||||
|
pub struct SaveFile;
|
||||||
|
|
||||||
|
pub struct SaveFileParams {
|
||||||
|
pub path: PathBuf,
|
||||||
|
pub contents: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ProcessingStep for SaveFile {
|
||||||
|
type Input = SaveFileParams;
|
||||||
|
type Output = ();
|
||||||
|
|
||||||
|
#[tracing::instrument(name = "save file", level = "trace", skip_all)]
|
||||||
|
async fn process(
|
||||||
|
&self,
|
||||||
|
SaveFileParams { path, contents }: Self::Input,
|
||||||
|
) -> Result<Self::Output> {
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
if !parent.exists() {
|
||||||
|
fs::create_dir_all(parent).await.into_diagnostic()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::write(path, contents).await.into_diagnostic()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,141 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use futures::future;
|
||||||
|
use miette::Result;
|
||||||
|
|
||||||
|
/// The result of combining two processing steps
|
||||||
|
pub struct Chained<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>>(S1, S2);
|
||||||
|
|
||||||
|
/// An adapter to execute a step with multiple inputs in parallel
|
||||||
|
pub struct Parallel<S: ProcessingStep>(S);
|
||||||
|
|
||||||
|
/// An adapter to map the result of the pipeline
|
||||||
|
pub struct Map<S: ProcessingStep, T: Send + Sync>(S, Box<dyn Fn(S::Output) -> T + Send + Sync>);
|
||||||
|
|
||||||
|
/// An adapter to dynamically construct the next step mapper depending on the previous one
|
||||||
|
pub struct Construct<S1: ProcessingStep, S2: ProcessingStep<Input = T>, T>(
|
||||||
|
S1,
|
||||||
|
Box<dyn Fn(S1::Output) -> (T, S2) + Send + Sync>,
|
||||||
|
);
|
||||||
|
|
||||||
|
/// A generic wrapper for processing pipelines
|
||||||
|
pub struct ProcessingPipeline<I: Send + Sync, O: Send + Sync>(
|
||||||
|
Box<dyn ProcessingStep<Input = I, Output = O>>,
|
||||||
|
);
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait ProcessingStep: Send + Sync {
|
||||||
|
type Input: Send + Sync;
|
||||||
|
type Output: Send + Sync;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S1: ProcessingStep, S2: ProcessingStep<Input = S1::Output>> ProcessingStep
|
||||||
|
for Chained<S1, S2>
|
||||||
|
{
|
||||||
|
type Input = S1::Input;
|
||||||
|
type Output = S2::Output;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
let first = self.0.process(input).await?;
|
||||||
|
self.1.process(first).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S: ProcessingStep> ProcessingStep for Parallel<S> {
|
||||||
|
type Input = Vec<S::Input>;
|
||||||
|
type Output = Vec<S::Output>;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
future::try_join_all(input.into_iter().map(|i| self.0.process(i))).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ProcessingChain: Sized + ProcessingStep {
|
||||||
|
fn chain<S: ProcessingStep<Input = Self::Output>>(self, other: S) -> Chained<Self, S> {
|
||||||
|
Chained(self, other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: ProcessingStep> ProcessingChain for S {}
|
||||||
|
|
||||||
|
pub trait ProcessingParallel: Sized + ProcessingStep {
|
||||||
|
fn parallel(self) -> Parallel<Self> {
|
||||||
|
Parallel(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: ProcessingStep> ProcessingParallel for S {}
|
||||||
|
|
||||||
|
pub trait IntoPipeline: Sized + ProcessingStep + 'static {
|
||||||
|
fn into_pipeline(self) -> ProcessingPipeline<Self::Input, Self::Output> {
|
||||||
|
ProcessingPipeline(Box::new(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ProcessingMap: ProcessingStep + Sized {
|
||||||
|
fn map<F: Fn(Self::Output) -> T + Send + Sync + 'static, T: Send + Sync>(
|
||||||
|
self,
|
||||||
|
map_fn: F,
|
||||||
|
) -> Map<Self, T> {
|
||||||
|
Map(self, Box::new(map_fn))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: ProcessingStep> ProcessingMap for S {}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S: ProcessingStep, T: Send + Sync> ProcessingStep for Map<S, T> {
|
||||||
|
type Input = S::Input;
|
||||||
|
type Output = T;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
let inner_result = self.0.process(input).await?;
|
||||||
|
|
||||||
|
Ok(self.1(inner_result))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ProcessingConstruct: ProcessingStep + Sized {
|
||||||
|
fn construct<
|
||||||
|
F: Fn(Self::Output) -> (T, S) + Send + Sync + 'static,
|
||||||
|
S: ProcessingStep<Input = T>,
|
||||||
|
T: Send + Sync,
|
||||||
|
>(
|
||||||
|
self,
|
||||||
|
construct_fn: F,
|
||||||
|
) -> Construct<Self, S, T> {
|
||||||
|
Construct(self, Box::new(construct_fn))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: ProcessingStep> ProcessingConstruct for S {}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<S1: ProcessingStep, S2: ProcessingStep<Input = T>, T: Send + Sync> ProcessingStep
|
||||||
|
for Construct<S1, S2, T>
|
||||||
|
{
|
||||||
|
type Input = S1::Input;
|
||||||
|
type Output = S2::Output;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
let inner_output = self.0.process(input).await?;
|
||||||
|
let (new_input, step) = self.1(inner_output);
|
||||||
|
|
||||||
|
step.process(new_input).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl<I: Send + Sync, O: Send + Sync> ProcessingStep for ProcessingPipeline<I, O> {
|
||||||
|
type Input = I;
|
||||||
|
type Output = O;
|
||||||
|
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
self.0.process(input).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: ProcessingStep + 'static> IntoPipeline for S {}
|
@ -0,0 +1,32 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use miette::Result;
|
||||||
|
|
||||||
|
use crate::{data::FolderData, pipeline::ProcessingStep};
|
||||||
|
|
||||||
|
pub struct LoadDirContent;
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ProcessingStep for LoadDirContent {
|
||||||
|
type Input = FolderData;
|
||||||
|
type Output = (Vec<PathBuf>, String);
|
||||||
|
|
||||||
|
#[tracing::instrument(name = "load dir", level = "trace", skip_all)]
|
||||||
|
async fn process(&self, input: Self::Input) -> Result<Self::Output> {
|
||||||
|
let dir_name = input
|
||||||
|
.path
|
||||||
|
.components()
|
||||||
|
.last()
|
||||||
|
.unwrap()
|
||||||
|
.as_os_str()
|
||||||
|
.to_string_lossy();
|
||||||
|
let default_template = input
|
||||||
|
.index
|
||||||
|
.default_template
|
||||||
|
.to_owned()
|
||||||
|
.unwrap_or(dir_name.into());
|
||||||
|
|
||||||
|
Ok((input.pages, default_template))
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,58 @@
|
|||||||
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use tera::{Context as TeraContext, Tera};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
use crate::{context::Context, data::load_page, pipeline::ProcessingStep};
|
||||||
|
|
||||||
|
use super::style::Stylesheets;
|
||||||
|
|
||||||
|
pub struct RenderPage {
|
||||||
|
pub tera: Tera,
|
||||||
|
pub styles: Arc<Mutex<Stylesheets>>,
|
||||||
|
pub ctx: Arc<Context>,
|
||||||
|
pub default_template: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl ProcessingStep for RenderPage {
|
||||||
|
type Input = PathBuf;
|
||||||
|
type Output = (PathBuf, String);
|
||||||
|
|
||||||
|
#[tracing::instrument(name = "render page", level = "trace", skip_all)]
|
||||||
|
async fn process(&self, page_path: Self::Input) -> Result<Self::Output> {
|
||||||
|
let page = load_page(&page_path).await?;
|
||||||
|
let mut context = TeraContext::new();
|
||||||
|
let mut template_name = None;
|
||||||
|
|
||||||
|
match page {
|
||||||
|
crate::data::Page::Data(data) => {
|
||||||
|
template_name = data.metadata.template;
|
||||||
|
context.insert("data", &data.data);
|
||||||
|
}
|
||||||
|
crate::data::Page::Content(content) => context.insert("content", &content),
|
||||||
|
}
|
||||||
|
let template_name = template_name.as_ref().unwrap_or(&self.default_template);
|
||||||
|
{
|
||||||
|
let mut styles = self.styles.lock().await;
|
||||||
|
let style_embed = styles
|
||||||
|
.get_style_embed(template_name, &self.ctx.dirs.output_dir)
|
||||||
|
.await?;
|
||||||
|
context.insert("style", &style_embed);
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!("context = {context:?}");
|
||||||
|
|
||||||
|
let html = self
|
||||||
|
.tera
|
||||||
|
.render(&format!("{template_name}.html"), &context)
|
||||||
|
.into_diagnostic()?;
|
||||||
|
let rel_path = page_path
|
||||||
|
.strip_prefix(&self.ctx.dirs.content_dir)
|
||||||
|
.into_diagnostic()?;
|
||||||
|
|
||||||
|
Ok((rel_path.to_owned(), html))
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue