Compare commits

...

7 commits

Author SHA1 Message Date
KitsuneCafe d77e4d4b81 finally 2024-02-14 20:17:32 -05:00
KitsuneCafe 26f74906c1 finally fix context :neofox_googly_shocked: 2024-02-13 09:34:07 -05:00
KitsuneCafe 09ffd82157 Add new filter; fixing context (as always xwx) 2024-02-11 06:45:28 -05:00
KitsuneCafe 90ac6e07c9 remove unnecessary library 2024-02-11 00:29:14 -05:00
KitsuneCafe 0fc188dbb8 improve context storage 2024-02-10 23:38:29 -05:00
KitsuneCafe a5889e0368 oops 2024-02-05 05:40:23 -05:00
KitsuneCafe 6f8a28575d borrow checker doesnt let me have fun 2024-02-05 05:35:12 -05:00
10 changed files with 425 additions and 82 deletions

42
Cargo.lock generated
View file

@ -48,9 +48,9 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.5"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2faccea4cc4ab4a667ce676a30e8ec13922a692c99bb8f5b11f1502c72e04220"
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
[[package]]
name = "anstyle-parse"
@ -598,9 +598,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.7.6"
version = "2.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f200d8d83c44a45b21764d1916299752ca035d15ecd46faca3e9a2a2bf6ad06"
checksum = "219c0dcc30b6a27553f9cc242972b67f75b60eb0db71f0b5462f38b058c41546"
dependencies = [
"memchr",
"thiserror",
@ -609,9 +609,9 @@ dependencies = [
[[package]]
name = "pest_derive"
version = "2.7.6"
version = "2.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcd6ab1236bbdb3a49027e920e693192ebfe8913f6d60e294de57463a493cfde"
checksum = "22e1288dbd7786462961e69bfd4df7848c1e37e8b74303dbdab82c3a9cdd2809"
dependencies = [
"pest",
"pest_generator",
@ -619,9 +619,9 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.7.6"
version = "2.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a31940305ffc96863a735bef7c7994a00b325a7138fdbc5bda0f1a0476d3275"
checksum = "1381c29a877c6d34b8c176e734f35d7f7f5b3adaefe940cb4d1bb7af94678e2e"
dependencies = [
"pest",
"pest_meta",
@ -632,9 +632,9 @@ dependencies = [
[[package]]
name = "pest_meta"
version = "2.7.6"
version = "2.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7ff62f5259e53b78d1af898941cdcdccfae7385cf7d793a6e55de5d05bb4b7d"
checksum = "d0934d6907f148c22a3acbda520c7eed243ad7487a30f51f6ce52b58b7077a8a"
dependencies = [
"once_cell",
"pest",
@ -827,6 +827,7 @@ dependencies = [
"roxy_syntect",
"roxy_tera_parser",
"serde",
"slugify",
"syntect",
"tera",
"toml",
@ -835,7 +836,7 @@ dependencies = [
[[package]]
name = "roxy_core"
version = "0.1.0"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-core.git#3059a3b77492be751cba36d012557bd9622c62a7"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-core.git#7839db8b062698adfe81a86d2a0cf041a6711456"
[[package]]
name = "roxy_markdown_parser"
@ -859,7 +860,7 @@ dependencies = [
[[package]]
name = "roxy_syntect"
version = "0.1.0"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-syntect.git#39c3a9e4b2f294936df5141b717efcbbe22deea7"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-syntect.git#48601fc5e6e0ee0e753c892f0eb42a9b0b48be99"
dependencies = [
"once_cell",
"regex",
@ -870,7 +871,7 @@ dependencies = [
[[package]]
name = "roxy_tera_parser"
version = "0.1.0"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-tera-parser.git#d7a364b1af1c2ec400d951fcd41560f929feb12c"
source = "git+https://fem.mint.lgbt/kitsunecafe/roxy-tera-parser.git#13868747c22d09c6cd61bebb4dbb69e299be6bdb"
dependencies = [
"once_cell",
"regex",
@ -966,6 +967,15 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "slugify"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6b8cf203d2088b831d7558f8e5151bfa420c57a34240b28cee29d0ae5f2ac8b"
dependencies = [
"unidecode",
]
[[package]]
name = "strsim"
version = "0.10.0"
@ -1194,6 +1204,12 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
[[package]]
name = "unidecode"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "402bb19d8e03f1d1a7450e2bd613980869438e0666331be3e073089124aa1adc"
[[package]]
name = "utf8parse"
version = "0.2.1"

View file

@ -15,4 +15,5 @@ clap = { version = "4.4.17", features = ["derive"] }
toml = "0.8.8"
tera = "1.19.1"
serde = "1.0.195"
slugify = "0.1.0"

View file

@ -15,6 +15,8 @@ Roxy will read each file from `INPUT` for valid files. It will look for `toml` f
Currently, Roxy only has two configuration keys
```toml
# config.toml
[syntect]
theme = "base16-ocean.dark" # the name of the theme for syntax highlighting
themes = ["./themes/base16-ocean.dark.tmTheme"]
```

View file

@ -1,27 +1,88 @@
use serde::Deserialize;
use std::str::FromStr;
use serde::Deserialize;
use crate::DEFAULT_THEME;
#[derive(Debug, Default, Deserialize)]
pub(crate) struct Config {
pub theme: Option<String>,
pub themes: Vec<String>,
pub(crate) trait Merge {
fn merge(self, other: Self) -> Self;
}
impl Config {
pub fn merge(self, other: Config) -> Self {
fn merge_opts<M: Merge>(a: Option<M>, b: Option<M>) -> Option<M> {
match (a, b) {
(Some(a), Some(b)) => Some(a.merge(b)),
(None, Some(b)) => Some(b),
(Some(a), None) => Some(a),
_ => None,
}
}
#[derive(Debug)]
pub(crate) struct Config {
pub roxy: RoxyConfig,
pub syntect: SyntectConfig
}
impl From<ConfigDeserializer> for Config {
fn from(value: ConfigDeserializer) -> Self {
Self {
theme: self.theme.or(other.theme),
themes: if self.themes.is_empty() {
other.themes
} else {
self.themes
},
roxy: value.roxy.map_or_else(Default::default, From::from),
syntect: value.syntect.map_or_else(Default::default, From::from)
}
}
}
impl FromStr for Config {
#[derive(Debug)]
pub(crate) struct RoxyConfig {
pub slug_word_limit: usize
}
impl From<RoxyConfigDeserializer> for RoxyConfig {
fn from(value: RoxyConfigDeserializer) -> Self {
Self {
slug_word_limit: value.slug_word_limit.unwrap_or(24usize)
}
}
}
impl Default for RoxyConfig {
fn default() -> Self {
Self {
slug_word_limit: 24usize
}
}
}
#[derive(Debug)]
pub(crate) struct SyntectConfig {
pub theme: String,
pub theme_dir: Option<String>
}
impl From<SyntectConfigDeserializer> for SyntectConfig {
fn from(value: SyntectConfigDeserializer) -> Self {
Self {
theme: value.theme.unwrap_or(DEFAULT_THEME.into()),
theme_dir: value.theme_dir
}
}
}
impl Default for SyntectConfig {
fn default() -> Self {
Self {
theme: DEFAULT_THEME.into(),
theme_dir: None
}
}
}
#[derive(Debug, Default, Deserialize)]
pub(crate) struct ConfigDeserializer {
pub roxy: Option<RoxyConfigDeserializer>,
pub syntect: Option<SyntectConfigDeserializer>,
}
impl FromStr for ConfigDeserializer {
type Err = toml::de::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
@ -29,3 +90,46 @@ impl FromStr for Config {
}
}
impl Merge for ConfigDeserializer {
fn merge(self, other: Self) -> Self {
Self {
roxy: merge_opts(self.roxy, other.roxy),
syntect: merge_opts(self.syntect, other.syntect),
}
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct RoxyConfigDeserializer {
pub slug_word_limit: Option<usize>,
}
impl Default for RoxyConfigDeserializer {
fn default() -> Self {
Self {
slug_word_limit: Some(8usize),
}
}
}
impl Merge for RoxyConfigDeserializer {
fn merge(self, other: Self) -> Self {
Self {
slug_word_limit: self.slug_word_limit.or(other.slug_word_limit),
}
}
}
#[derive(Debug, Default, Deserialize)]
pub(crate) struct SyntectConfigDeserializer {
pub theme: Option<String>,
pub theme_dir: Option<String>,
}
impl Merge for SyntectConfigDeserializer {
fn merge(self, other: SyntectConfigDeserializer) -> Self {
Self {
theme: self.theme.or(other.theme),
theme_dir: self.theme_dir.or(other.theme_dir),
}
}
}

View file

@ -1,39 +1,97 @@
use std::path::{Path, MAIN_SEPARATOR_STR};
use std::{path::Path, borrow::BorrowMut};
#[derive(Debug)]
use tera::{Map, Value};
use crate::iter_ext::Head;
fn merge(a: &mut Value, b: Value) {
match (a, b) {
(&mut Value::Object(ref mut a), Value::Object(b)) => {
b.into_iter().for_each(|(k, v)| {
merge(a.entry(k).or_insert(Value::Null), v);
});
}
(a, b) => {
*a = b;
}
}
}
#[derive(Clone, Debug)]
pub(crate) struct Context {
inner: tera::Context,
inner: tera::Value,
}
impl Context {
pub fn new() -> Self {
Self {
inner: tera::Context::new(),
inner: tera::Value::Null
}
}
pub fn to_inner(&self) -> &tera::Context {
&self.inner
pub fn merge(&mut self, other: tera::Context) {
merge(
self.inner.borrow_mut(),
other.into_json()
)
}
pub fn normalize_path<P: AsRef<Path>>(path: &P) -> String {
path.as_ref()
fn path_to_string<P: AsRef<Path>>(path: &P) -> String {
path.as_ref().to_string_lossy().to_string()
}
fn create_path<P: AsRef<Path>>(path: &P, value: &Value) -> Option<Value> {
let path = path.as_ref();
let (head, tail) = path.components().head()?;
let mut map = Map::new();
if tail.clone().count() > 0 {
let child = Self::create_path(&tail, value)?;
map.insert(Self::path_to_string(&head), child);
} else {
let key = Self::path_to_string(&path.with_extension("").file_name()?);
map.insert(key, value.to_owned());
}
Some(map.into())
}
pub fn insert<P: AsRef<Path>>(&mut self, path: &P, value: &Value) {
let path = path.as_ref();
let path = if path
.with_extension("")
.to_string_lossy()
.trim()
.split(MAIN_SEPARATOR_STR)
.fold(String::new(), |a, b| format!("{a}.{b}"))
.trim_matches('.')
.to_string()
.file_name()
.map_or(false, |f| f.to_os_string() == "index")
{
path.parent()
} else {
Some(path)
};
if let Some(v) = Self::create_path(&path.unwrap(), value) {
if let Ok(ctx) = tera::Context::from_value(v) {
self.merge(ctx);
}
}
}
pub fn insert<P: AsRef<Path>>(&mut self, path: &P, value: &tera::Value) {
self.inner
.insert(Self::normalize_path(path).trim_start_matches('.'), &value);
}
pub fn get<P: AsRef<Path>>(&self, path: &P) -> Option<&tera::Value> {
self.inner.get(&Self::normalize_path(path))
pub fn get<P: AsRef<Path>>(&self, _path: &P) -> Option<&Value> {
None
}
}
impl Into<tera::Value> for Context {
fn into(self) -> tera::Value {
self.inner
}
}
impl TryInto<tera::Context> for Context {
type Error = tera::Error;
fn try_into(self) -> Result<tera::Context, Self::Error> {
tera::Context::from_value(self.inner)
}
}

View file

@ -1,11 +1,16 @@
use std::path::{PathBuf, Path, StripPrefixError};
use roxy_core::error::Error;
use slugify::slugify;
use std::{
ffi::OsStr,
path::{Path, PathBuf, StripPrefixError},
};
#[derive(Debug, Clone)]
pub(crate) struct FilePath<'a, P: AsRef<Path>> {
pub input: PathBuf,
pub root_dir: PathBuf,
pub output: &'a P,
pub slug_word_limit: usize,
}
impl<'a, P: AsRef<Path> + 'a> FilePath<'a, P> {
@ -14,6 +19,7 @@ impl<'a, P: AsRef<Path> + 'a> FilePath<'a, P> {
input: Self::make_recursive(input),
root_dir: Self::strip_wildcards(input),
output,
slug_word_limit: Default::default(),
}
}
@ -34,10 +40,37 @@ impl<'a, P: AsRef<Path> + 'a> FilePath<'a, P> {
.map_or_else(|| PathBuf::new(), PathBuf::from)
}
pub fn as_slug<P2: AsRef<Path> + ?Sized>(&self, path: &P2) -> Option<PathBuf> {
let path = path.as_ref();
let ext = path.extension();
let file_name: Option<PathBuf> = path
.with_extension("")
.file_name()
.or_else(|| Some(OsStr::new("")))
.and_then(OsStr::to_str)
.map(|name| slugify!(name, separator = "-"))
.map(|f| {
f.split_terminator('-')
.take(self.slug_word_limit)
.collect::<Vec<&str>>()
.join("-")
})
.map(PathBuf::from)
.map(|f| f.with_extension(ext.unwrap_or_default()));
match (path.parent(), file_name) {
(Some(parent), Some(name)) => Some(parent.join(name)),
(None, Some(name)) => Some(PathBuf::from(name)),
(Some(parent), None) => Some(parent.to_path_buf()),
_ => None,
}
}
pub fn to_output<P2: AsRef<Path>>(&self, value: &'a P2) -> Result<PathBuf, Error> {
value
.as_ref()
.strip_prefix(&self.root_dir)
.map(|p| self.as_slug(p).expect("could not slugify path"))
.map(|path| self.output.as_ref().join(path))
.map_err(Error::from)
}
@ -46,6 +79,3 @@ impl<'a, P: AsRef<Path> + 'a> FilePath<'a, P> {
value.as_ref().strip_prefix(&self.root_dir)
}
}

15
src/functions.rs Normal file
View file

@ -0,0 +1,15 @@
use std::collections::HashMap;
use tera::{to_value, try_get_value, Map, Result, Tera, Value};
pub fn values(value: &Value, _args: &HashMap<String, Value>) -> Result<Value> {
let arr = try_get_value!("values", "value", Map<String, Value>, value)
.into_iter()
.map(|(_, x)| x)
.collect();
Ok(to_value::<Value>(arr)?)
}
pub fn register_functions(tera: &mut Tera) {
tera.register_filter("values", values);
}

56
src/iter_ext.rs Normal file
View file

@ -0,0 +1,56 @@
pub(crate) trait Head: Iterator {
fn head(self) -> Option<(Self::Item, Self)>
where
Self: Sized;
}
impl<I: Iterator> Head for I {
fn head(mut self) -> Option<(Self::Item, Self)> {
match self.next() {
Some(x) => Some((x, self)),
None => None,
}
}
}
#[derive(Debug)]
pub(crate) struct MapFold<I, F, V> {
iter: I,
f: F,
accum: V,
}
impl<I, F, V> MapFold<I, F, V> {
pub fn new(iter: I, init: V, f: F) -> MapFold<I, F, V> {
Self {
iter,
f,
accum: init,
}
}
}
impl<I: Iterator, F: FnMut(&V, &I::Item) -> V, V: Clone> Iterator for MapFold<I, F, V> {
type Item = V;
fn next(&mut self) -> Option<Self::Item> {
self.accum = (self.f)(&self.accum, &self.iter.next()?);
Some(self.accum.clone())
}
}
pub(crate) trait MapFoldExt {
type Item;
fn map_fold<B, F>(self, init: B, f: F) -> MapFold<Self, F, B>
where
Self: Sized,
F: FnMut(&B, &Self::Item) -> B,
{
MapFold::new(self, init, f)
}
}
impl<I: Iterator> MapFoldExt for I {
type Item = I::Item;
}

View file

@ -1,10 +1,14 @@
pub mod config;
pub mod context;
mod file_path;
pub mod functions;
mod iter_ext;
mod result_ext;
use config::Config;
use config::{Config, ConfigDeserializer, Merge};
use context::Context;
use file_path::FilePath;
use functions::register_functions;
use roxy_core::error::Error;
use roxy_markdown_parser::MarkdownParser;
@ -19,11 +23,17 @@ use std::{
io::{BufReader, Read},
path::{Path, PathBuf},
};
use tera::to_value;
use syntect::{highlighting::ThemeSet, parsing::SyntaxSet};
use toml::Table;
use glob::glob;
use roxy_core::roxy::{Parser, Roxy};
const DEFAULT_THEME: &'static str = "base16-ocean.dark";
const CONTENT_EXT: [&'static str; 4] = ["md", "tera", "html", "htm"];
fn handle_err<E: std::error::Error + 'static>(err: E) -> Error {
Error::new(err.to_string(), err)
}
@ -53,15 +63,40 @@ fn get_files<P: AsRef<Path> + std::fmt::Debug>(path: &P) -> Result<Vec<PathBuf>,
Ok(files)
}
fn try_find_file(path: &Path) -> Option<PathBuf> {
if let Some(file_name) = path.file_name() {
let mut path = path;
let mut result = None;
while let Some(parent) = path.parent() {
let file = parent.with_file_name(file_name);
if file.is_file() {
result = Some(file);
break;
}
path = parent;
}
result
} else {
None
}
}
fn load_config(path: &Path) -> Config {
fs::read_to_string(path).map_or_else(
|_| Config::default(),
|f| {
toml::from_str::<Config>(f.as_str())
.unwrap()
.merge(Config::default())
},
)
try_find_file(path)
.and_then(|p| fs::read_to_string(p).ok())
.map_or_else(
|| ConfigDeserializer::default(),
|f| {
toml::from_str::<ConfigDeserializer>(f.as_str())
.unwrap()
.merge(ConfigDeserializer::default())
},
)
.into()
}
fn context_from_meta_files<'a, T: AsRef<Path>>(
@ -78,16 +113,26 @@ fn context_from_meta_files<'a, T: AsRef<Path>>(
let mut str = String::from_utf8(buf).map_err(handle_err)?;
let toml: Table = toml::from_str(&mut str).map_err(handle_err)?;
context.insert(
&file_path.strip_root(path)?,
&tera::to_value(toml).map_err(handle_err)?,
);
let path = file_path.strip_root(path)?;
context.insert(&path, &tera::to_value(toml).map_err(handle_err)?);
let path = path.with_file_name("");
if let Some(slug) = file_path.as_slug(&path) {
let slug = PathBuf::from("/").join(slug);
if let Ok(slug) = to_value(slug) {
context.insert(&path.join("path"), &slug);
}
}
}
Ok(context)
}
fn copy_static<T: AsRef<Path>>(files: &Vec<&PathBuf>, file_path: &FilePath<T>) -> Result<(), Error> {
fn copy_static<T: AsRef<Path>>(
files: &Vec<&PathBuf>,
file_path: &FilePath<T>,
) -> Result<(), Error> {
for file in files {
let output = file_path.to_output(file)?;
fs::create_dir_all(output.parent().unwrap())?;
@ -97,31 +142,37 @@ fn copy_static<T: AsRef<Path>>(files: &Vec<&PathBuf>, file_path: &FilePath<T>) -
Ok(())
}
const DEFAULT_THEME: &'static str = "base16-ocean.dark";
const CONTENT_EXT: [&'static str; 4] = ["md", "tera", "html", "htm"];
fn main() -> Result<(), Box<dyn std::error::Error>> {
let opts = Options::parse();
let file_path = FilePath::new(&opts.input, &opts.output);
let mut file_path = FilePath::new(&opts.input, &opts.output);
let config_path = file_path.input.with_file_name("config.toml");
let config = load_config(&config_path);
file_path.slug_word_limit = config.roxy.slug_word_limit;
let file_path = file_path;
let files = get_files(&file_path.input)?;
let (meta, files): (Vec<&PathBuf>, Vec<&PathBuf>) =
files.iter().partition(|f| f.extension().unwrap() == "toml");
let (content, files): (Vec<&PathBuf>, Vec<&PathBuf>) = files
.iter()
.partition(|f| {
let ext = f.extension().and_then(ffi::OsStr::to_str).unwrap();
CONTENT_EXT.contains(&ext)
});
let (content, files): (Vec<&PathBuf>, Vec<&PathBuf>) = files.iter().partition(|f| {
let ext = f.extension().and_then(ffi::OsStr::to_str).unwrap();
CONTENT_EXT.contains(&ext)
});
let mut context = context_from_meta_files(&meta, &file_path)?;
let mut context: Context = context_from_meta_files(&meta, &file_path)?;
let theme = config.theme.unwrap_or(DEFAULT_THEME.to_string());
let theme = config.syntect.theme;
let syntax_set = SyntaxSet::load_defaults_newlines();
let theme_set = if let Some(dir) = config.syntect.theme_dir {
ThemeSet::load_from_folder(dir)?
} else {
ThemeSet::load_defaults()
};
for file in content {
let file_name = file.with_extension("html");
@ -131,7 +182,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut preformatter = MarkdownTeraPreformatter::new();
parser.push(&mut preformatter);
let mut syntect = SyntectParser::new(theme.as_str());
let mut syntect = SyntectParser::new(&syntax_set, &theme_set, theme.as_str());
parser.push(&mut syntect);
let mut md = MarkdownParser::new();
@ -140,8 +191,6 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut rewriter = MarkdownTeraRewriter::new();
parser.push(&mut rewriter);
let file_name = file.with_extension("html");
if let Ok(path) = &file_path.strip_root(&file_name) {
if let Some(current_context) = context.get(path) {
context.insert(&"this", &current_context.clone());
@ -149,10 +198,13 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
}
let mut tera = tera::Tera::default();
register_functions(&mut tera);
let mut html = TeraParser::new(&mut tera, TeraParserOptions::default());
html.add_context(context.to_inner());
let ctx: tera::Context = context.clone().try_into().unwrap();
html.add_context(&ctx);
parser.push(&mut html);
//println!("{output_path:?}");
Roxy::process_file(&file, &output_path, &mut parser).unwrap();
}

9
src/result_ext.rs Normal file
View file

@ -0,0 +1,9 @@
pub(crate) trait ResultExt<T, E>: Sized {
fn then_err_into<U, E2: From<E>>(self, op: impl FnOnce(T) -> Result<U, E2>) -> Result<U, E2>;
}
impl<T, E> ResultExt<T, E> for Result<T, E> {
fn then_err_into<U, E2: From<E>>(self, op: impl FnOnce(T) -> Result<U, E2>) -> Result<U, E2> {
op(self?)
}
}