Compare commits

...

2 Commits

1
Cargo.lock generated

@ -2011,6 +2011,7 @@ dependencies = [
"comrak", "comrak",
"futures", "futures",
"globset", "globset",
"lazy_static",
"miette", "miette",
"rsass", "rsass",
"serde", "serde",

@ -3,6 +3,10 @@ name = "viki"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
[[bin]]
name = "viki"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
@ -11,6 +15,7 @@ clap = { version = "4.3.8", features = ["derive"] }
comrak = { version = "0.18.0", features = ["emojis"] } comrak = { version = "0.18.0", features = ["emojis"] }
futures = "0.3.28" futures = "0.3.28"
globset = { version = "0.4.10", features = ["serde", "serde1"] } globset = { version = "0.4.10", features = ["serde", "serde1"] }
lazy_static = "1.4.0"
miette = { version = "5.9.0", features = ["serde", "fancy"] } miette = { version = "5.9.0", features = ["serde", "fancy"] }
rsass = "0.27.0" rsass = "0.27.0"
serde = { version = "1.0.164", features = ["derive"] } serde = { version = "1.0.164", features = ["derive"] }

@ -1,14 +1,55 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use lazy_static::lazy_static;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use serde::Deserialize; use serde::Deserialize;
use std::sync::{Arc, RwLock};
use tokio::fs; use tokio::fs;
lazy_static! {
static ref CONFIG: Arc<RwLock<Option<Config>>> = Arc::new(RwLock::new(None));
}
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub struct Config { pub struct Config {
pub folders: Folders, pub folders: Folders,
} }
impl Config {
pub fn get() -> Self {
CONFIG
.read()
.unwrap()
.clone()
.expect("Config hasn't been read yet")
}
}
/// Accessor struct to load the config into the global config variable
/// and later reload it when required
pub struct ConfigLoader {
dir: PathBuf,
}
impl ConfigLoader {
pub async fn load(dir: PathBuf) -> Result<Self> {
Self::load_config(&dir).await?;
Ok(Self { dir })
}
pub async fn reload(&self) -> Result<()> {
Self::load_config(&self.dir).await
}
async fn load_config(dir: &Path) -> Result<()> {
let config = read_config(dir).await?;
*CONFIG.write().unwrap() = Some(config);
Ok(())
}
}
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub struct Folders { pub struct Folders {
pub content: Option<PathBuf>, pub content: Option<PathBuf>,
@ -17,7 +58,7 @@ pub struct Folders {
pub output: Option<PathBuf>, pub output: Option<PathBuf>,
} }
pub async fn read_config(dir: &Path) -> Result<Config> { async fn read_config(dir: &Path) -> Result<Config> {
let cfg_string = fs::read_to_string(dir.join("viki.toml")) let cfg_string = fs::read_to_string(dir.join("viki.toml"))
.await .await
.into_diagnostic()?; .into_diagnostic()?;

@ -0,0 +1,56 @@
use std::{collections::HashMap, path::PathBuf};
use async_walkdir::{Filtering, WalkDir};
use futures::{future, StreamExt};
use serde::Deserialize;
#[derive(Deserialize)]
pub struct TemplateName(String);
impl AsRef<str> for TemplateName {
fn as_ref(&self) -> &str {
&self.0
}
}
#[derive(Deserialize)]
pub struct Page {
template: TemplateName,
#[serde(flatten)]
data: HashMap<String, toml::Value>,
}
pub struct ContentLoader {
path: PathBuf,
}
impl ContentLoader {
pub fn new(path: PathBuf) -> Self {
Self { path }
}
async fn load_pages(&self) -> Vec<Page> {
todo!()
}
async fn find_files(&self) -> Vec<PathBuf> {
WalkDir::new(&self.path)
.filter(|e| async move {
e.path()
.extension()
.map(|e| {
if e == "toml" {
Filtering::Continue
} else {
Filtering::Ignore
}
})
.unwrap_or(Filtering::Ignore)
})
.map(|e| e.expect("failed to read dir").path())
.collect::<Vec<_>>()
.await
}
}
fn parse_page(path: PathBuf) {}

@ -1,8 +0,0 @@
use std::path::PathBuf;
pub struct Context {
pub content_dir: PathBuf,
pub template_dir: PathBuf,
pub stylesheet_dir: PathBuf,
pub output_dir: PathBuf,
}

@ -1,124 +0,0 @@
use std::path::{Path, PathBuf};
use async_walkdir::WalkDir;
use futures::StreamExt;
use globset::{Glob, GlobSetBuilder};
use miette::{Context, IntoDiagnostic, Result};
use tokio::fs;
use super::IndexData;
/// loads directory data
pub struct DirLoader {
base_path: PathBuf,
}
#[derive(Clone, Debug)]
pub struct FolderData {
pub path: PathBuf,
pub index: IndexData,
pub pages: Vec<PathBuf>,
}
impl DirLoader {
pub fn new(base_path: PathBuf) -> Self {
Self { base_path }
}
/// Asynchronously reads all the entries at the given content location
#[tracing::instrument(level = "trace", skip(self))]
pub async fn read_content(&self) -> Result<Vec<FolderData>> {
let mut entries = WalkDir::new(&self.base_path);
let mut paths = Vec::new();
paths.push(self.base_path.to_owned());
while let Some(res) = entries.next().await {
match res {
Ok(entry) => {
let entry_path = entry.path();
if entry_path.is_dir() {
paths.push(entry_path)
}
}
Err(e) => return Err(e).into_diagnostic(),
}
}
let folder_data =
futures::future::try_join_all(paths.into_iter().map(|p| self.read_dir(p)))
.await?
.into_iter()
.filter_map(|f| f)
.collect();
Ok(folder_data)
}
#[tracing::instrument(level = "trace", skip(self))]
async fn read_dir(&self, path: PathBuf) -> Result<Option<FolderData>> {
let index_path = path.join("_index.toml");
if !index_path.exists() {
return Ok(None);
}
let index_data = read_index_data(&index_path).await?;
let pages = find_pages(&path, &index_data).await?;
Ok(Some(FolderData {
path,
index: index_data,
pages,
}))
}
}
#[tracing::instrument(level = "trace")]
async fn read_index_data(path: &Path) -> Result<IndexData> {
let index_str = fs::read_to_string(path)
.await
.into_diagnostic()
.context("reading index file")?;
toml::from_str(&index_str).into_diagnostic()
}
#[tracing::instrument(level = "trace")]
async fn find_pages(dir: &Path, index_data: &IndexData) -> Result<Vec<PathBuf>> {
let include_set = build_glob_set(&index_data.include_files)
.build()
.into_diagnostic()?;
let excluded_set = build_glob_set(&index_data.excluded_files)
.build()
.into_diagnostic()?;
let mut read_dir = fs::read_dir(dir).await.into_diagnostic()?;
let mut pages = Vec::new();
while let Some(entry) = read_dir.next_entry().await.into_diagnostic()? {
let entry_path = entry.path();
if entry_path.is_file()
&& !entry_path
.file_name()
.unwrap()
.to_string_lossy()
.starts_with("_")
&& include_set.is_match(&entry_path)
&& !excluded_set.is_match(&entry_path)
{
pages.push(entry_path);
}
}
Ok(pages)
}
#[tracing::instrument(level = "trace")]
fn build_glob_set(globs: &Vec<String>) -> GlobSetBuilder {
let mut builder = GlobSetBuilder::new();
globs
.iter()
.filter_map(|pattern| Glob::new(pattern).ok())
.fold(&mut builder, |b, g| b.add(g));
builder
}

@ -1,15 +0,0 @@
use serde::Deserialize;
#[derive(Clone, Debug, Deserialize)]
pub struct IndexData {
/// the default template that is used for rendering
pub default_template: Option<String>,
/// files that are included for rendering
#[serde(default)]
pub include_files: Vec<String>,
/// files that are explicitly excluded from rendering
#[serde(default)]
pub excluded_files: Vec<String>,
}

@ -1,9 +0,0 @@
mod dir_loader;
mod index;
mod page;
mod page_loader;
pub use dir_loader::*;
pub use index::*;
pub use page::*;
pub use page_loader::*;

@ -1,25 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum Page {
Data(PageData),
Content(String),
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PageData {
/// Metadata for this page
#[serde(default)]
pub metadata: PageMetadata,
/// remaining data of this page
/// passed to the templates when rendering
#[serde(flatten)]
pub data: toml::Value,
}
#[derive(Default, Clone, Debug, Deserialize, Serialize)]
pub struct PageMetadata {
/// template used to render this page
pub template: Option<String>,
}

@ -1,34 +0,0 @@
use std::path::Path;
use miette::{Context, IntoDiagnostic, Result};
use tokio::fs;
use super::Page;
pub struct PageLoader;
/// loads a page and parses the data depending on the extension
#[tracing::instrument(level = "trace")]
pub async fn load_page(path: &Path) -> Result<Page> {
let string_content = load_string_content(path).await?;
if let Some(extension) = path.extension() {
let extension_lower = extension.to_string_lossy().to_lowercase();
match extension_lower.as_str() {
"toml" => Ok(Page::Data(
toml::from_str(&string_content).into_diagnostic()?,
)),
_ => Ok(Page::Content(string_content)),
}
} else {
Ok(Page::Content(string_content))
}
}
#[tracing::instrument(level = "trace")]
async fn load_string_content(path: &Path) -> Result<String> {
fs::read_to_string(path)
.await
.into_diagnostic()
.context("reading page content")
}

@ -0,0 +1,33 @@
use std::path::PathBuf;
use config::ConfigLoader;
use miette::Result;
mod config;
mod content_loader;
#[derive(Debug)]
pub struct Paths {
pub config: PathBuf,
}
pub struct Viki {
config_loader: ConfigLoader,
}
impl Viki {
#[tracing::instrument(level = "trace")]
pub async fn load(paths: Paths) -> Result<Self> {
let config_loader = ConfigLoader::load(paths.config).await?;
Ok(Self { config_loader })
}
#[tracing::instrument(level = "trace", skip_all)]
pub async fn reload(&mut self) -> Result<()> {
self.config_loader.reload().await?;
Ok(())
}
}

@ -1,23 +1,11 @@
use std::{path::Path, sync::Arc};
use args::BuildArgs;
use clap::Parser; use clap::Parser;
use config::{read_config, Config};
use context::Context;
use data::DirLoader;
use miette::Result; use miette::Result;
use rendering::ContentRenderer;
use tracing::metadata::LevelFilter; use tracing::metadata::LevelFilter;
use tracing_subscriber::fmt::format::FmtSpan; use tracing_subscriber::fmt::format::FmtSpan;
use crate::args::Args; use crate::args::Args;
mod args; mod args;
mod config;
mod context;
pub mod data;
mod processors;
mod rendering;
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
@ -25,41 +13,12 @@ async fn main() -> Result<()> {
init_tracing(); init_tracing();
match &args.command { match &args.command {
args::Command::Build(build_args) => { args::Command::Build(_build_args) => {}
let cfg = read_config(&args.directory).await?;
build(&args, &build_args, cfg).await
}
} }
}
async fn build(args: &Args, _build_args: &BuildArgs, cfg: Config) -> Result<()> {
let base_path = &args.directory;
let ctx = Arc::new(build_context(&base_path, &cfg));
let dirs = DirLoader::new(ctx.content_dir.to_owned())
.read_content()
.await?;
ContentRenderer::new(ctx).await?.render_all(dirs).await?;
Ok(()) Ok(())
} }
fn build_context(base_path: &Path, config: &Config) -> Context {
let folders = config.folders.clone();
let content_dir = base_path.join(folders.content.unwrap_or("content".into()));
let template_dir = base_path.join(folders.templates.unwrap_or("templates".into()));
let output_dir = base_path.join(folders.output.unwrap_or("dist".into()));
let stylesheet_dir = base_path.join(folders.stylesheets.unwrap_or("styles".into()));
Context {
content_dir,
template_dir,
stylesheet_dir,
output_dir,
}
}
fn init_tracing() { fn init_tracing() {
tracing_subscriber::fmt::SubscriberBuilder::default() tracing_subscriber::fmt::SubscriberBuilder::default()
.with_max_level(LevelFilter::TRACE) .with_max_level(LevelFilter::TRACE)

@ -1,17 +0,0 @@
use comrak::ComrakOptions;
use tera::{try_get_value, Filter};
pub struct Markdown;
impl Filter for Markdown {
fn filter(
&self,
value: &tera::Value,
_args: &std::collections::HashMap<String, tera::Value>,
) -> tera::Result<tera::Value> {
let string_content = try_get_value!("markdown", "value", String, value);
let html = comrak::markdown_to_html(&string_content, &ComrakOptions::default());
Ok(tera::Value::String(html))
}
}

@ -1,7 +0,0 @@
use tera::Tera;
mod markdown;
pub fn register_all(tera: &mut Tera) {
tera_text_filters::register_all(tera);
tera.register_filter("markdown", markdown::Markdown);
}

@ -1,7 +0,0 @@
use tera::Tera;
mod filters;
pub fn register_all(tera: &mut Tera) {
filters::register_all(tera);
}

@ -1,126 +0,0 @@
use std::{path::PathBuf, sync::Arc};
use futures::future;
use miette::{IntoDiagnostic, Result};
use tera::{Context as TeraContext, Tera};
use tokio::{fs, sync::Mutex};
use crate::{
context::Context,
data::{load_page, FolderData},
};
use self::style::{load_stylesheets, Stylesheets};
mod style;
// renders content using the given template folder
pub struct ContentRenderer {
template_glob: String,
ctx: Arc<Context>,
styles: Arc<Mutex<Stylesheets>>,
}
impl ContentRenderer {
pub async fn new(ctx: Arc<Context>) -> Result<Self> {
let template_glob = format!("{}/**/*", ctx.template_dir.to_string_lossy());
let styles = load_stylesheets(&ctx.stylesheet_dir).await?;
Ok(Self {
template_glob,
ctx,
styles: Arc::new(Mutex::new(styles)),
})
}
#[tracing::instrument(level = "trace", skip_all)]
pub async fn render_all(&self, dirs: Vec<FolderData>) -> Result<()> {
if self.ctx.output_dir.exists() {
fs::remove_dir_all(&self.ctx.output_dir)
.await
.into_diagnostic()?;
}
let mut tera = Tera::new(&self.template_glob).into_diagnostic()?;
super::processors::register_all(&mut tera);
future::try_join_all(dirs.into_iter().map(|data| self.render_folder(&tera, data))).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all)]
async fn render_folder(&self, tera: &Tera, data: FolderData) -> Result<()> {
let dir_name = data
.path
.components()
.last()
.unwrap()
.as_os_str()
.to_string_lossy();
let default_template = data
.index
.default_template
.to_owned()
.unwrap_or(dir_name.into());
future::try_join_all(
data.pages
.into_iter()
.map(|page| self.render_page(tera, default_template.clone(), page)),
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all)]
async fn render_page(
&self,
tera: &Tera,
default_template: String,
page_path: PathBuf,
) -> Result<()> {
tracing::debug!("Rendering {page_path:?}");
let page = load_page(&page_path).await?;
let mut context = TeraContext::new();
let mut template_name = default_template;
let mut style_name = template_name.to_owned();
match page {
crate::data::Page::Data(data) => {
if let Some(tmpl) = data.metadata.template {
template_name = tmpl.to_owned();
style_name = tmpl;
}
context.insert("data", &data.data);
}
crate::data::Page::Content(content) => context.insert("content", &content),
}
{
let mut styles = self.styles.lock().await;
let style_embed = styles
.get_style_embed(&style_name, &self.ctx.output_dir)
.await?;
context.insert("style", &style_embed);
};
tracing::debug!("context = {context:?}");
let html = tera
.render(&format!("{template_name}.html"), &context)
.into_diagnostic()?;
let rel_path = page_path
.strip_prefix(&self.ctx.content_dir)
.into_diagnostic()?;
let mut out_path = self.ctx.output_dir.join(rel_path);
out_path.set_extension("html");
let parent = out_path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
fs::write(out_path, html).await.into_diagnostic()?;
Ok(())
}
}

@ -1,103 +0,0 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use async_walkdir::WalkDir;
use futures::StreamExt;
use miette::{IntoDiagnostic, Result};
use rsass::output::Format;
use tokio::fs;
const DEFAULT_SHEET_NAME: &str = "style";
const EMBED_THRESHOLD: usize = 512;
pub struct Stylesheets {
page_styles: HashMap<String, PathBuf>,
processed_styles: HashMap<String, String>,
}
#[tracing::instrument(level = "trace")]
pub async fn load_stylesheets(base_dir: &PathBuf) -> Result<Stylesheets> {
let mut entries = WalkDir::new(base_dir);
let mut page_styles = HashMap::new();
let empty_path = PathBuf::new();
while let Some(res) = entries.next().await {
match res {
Ok(entry) => {
let entry_path = entry.path();
if entry_path.is_file() {
let rel_path = entry_path.strip_prefix(base_dir).into_diagnostic()?;
if let Some(file_name) = entry_path.file_stem() {
let file_name = rel_path.parent().unwrap_or(&empty_path).join(file_name);
let file_name = file_name.to_string_lossy().into_owned();
page_styles.insert(file_name, entry_path.to_owned());
}
}
}
Err(e) => return Err(e).into_diagnostic(),
}
}
tracing::debug!("Styles {page_styles:?}");
Ok(Stylesheets {
page_styles,
processed_styles: HashMap::new(),
})
}
impl Stylesheets {
#[tracing::instrument(level = "trace", skip(self, out_dir))]
pub async fn get_style_embed(&mut self, name: &str, out_dir: &Path) -> Result<String> {
let mut styles: Vec<String> = Vec::with_capacity(2);
if let Some(default_style) = self
.get_processed_style(DEFAULT_SHEET_NAME, out_dir)
.await?
{
styles.push(default_style);
}
if let Some(style) = self.get_processed_style(name, out_dir).await? {
styles.push(style);
}
Ok(styles.join(""))
}
#[tracing::instrument(level = "trace", skip(self, out_dir))]
async fn get_processed_style(&mut self, name: &str, out_dir: &Path) -> Result<Option<String>> {
if let Some(processed) = self.processed_styles.get(name) {
Ok(Some(processed.to_owned()))
} else if let Some(source) = self.page_styles.get(name) {
let format = Format {
style: rsass::output::Style::Compressed,
..Default::default()
};
let style_contents = rsass::compile_scss_path(source, format).into_diagnostic()?;
let style_html = if style_contents.len() < EMBED_THRESHOLD {
let utf_contents = String::from_utf8(style_contents).into_diagnostic()?;
format!(r#"<style type="text/css">{utf_contents}</style>"#)
} else {
let output_path = out_dir.join(name).with_extension("css");
let parent = output_path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).await.into_diagnostic()?;
}
fs::write(output_path, style_contents)
.await
.into_diagnostic()?;
format!(r#"<link rel="stylesheet" href="/{name}.css">"#)
};
self.processed_styles
.insert(name.to_owned(), style_html.to_owned());
Ok(Some(style_html))
} else {
Ok(None)
}
}
}
Loading…
Cancel
Save