Tholp's bespoke website generator

Named sections

+1 -1
src/args.rs
···
-
use clap::{Args, Parser, Subcommand};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
···
+
use clap::{Parser, Subcommand};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
-78
src/closures.rs
···
-
// Closures are essentally blocked macros that change behavior on symbol instead of name
-
// Instances of most types of closures can be named as sections ...
-
// ... to work with !insert() to pick certain parts out of a file
-
-
use boa_engine::Context;
-
-
use crate::{
-
project::Project,
-
types::{SkidContext, Token},
-
};
-
-
type ClosureFunction = fn(&[Token], &mut Project, &mut SkidContext) -> Vec<Token>;
-
-
pub struct Closure {
-
pub opener: &'static str,
-
pub opener2: &'static str,
-
pub closer: &'static str,
-
pub function: ClosureFunction,
-
}
-
-
// (opener) name (opener2) ... (closer)
-
-
// << js ! .. >>
-
// <!-- comment -->
-
// [ name {{ .. }}]
-
-
// <!-- ... --> comment
-
// ?name<< >> named js
-
// ?name[[ ]] named section
-
// ?<< >> js
-
// ?[[ ]] section
-
// ?name^[[]] named emphemeral section
-
// ?name-[[]] named inverted section
-
-
pub static CLOSURE_LIST: &'static [Closure] = &[
-
Closure {
-
opener: "?",
-
opener2: "<<",
-
closer: ">>",
-
function: closure_js,
-
},
-
Closure {
-
opener: "<!--",
-
opener2: "", // blank means it doesnt accept a name
-
closer: "-->",
-
function: closure_comment,
-
},
-
Closure {
-
opener: "?",
-
opener2: "{{",
-
closer: "}}",
-
function: closure_section,
-
},
-
];
-
-
fn closure_comment(
-
_tokens: &[Token],
-
_project_context: &mut Project,
-
_skid_context: &mut SkidContext,
-
) -> Vec<Token> {
-
Vec::new()
-
}
-
-
fn closure_section(
-
tokens: &[Token],
-
_project_context: &mut Project,
-
_skid_context: &mut SkidContext,
-
) -> Vec<Token> {
-
tokens.to_vec()
-
}
-
-
fn closure_js(
-
tokens: &[Token],
-
project_context: &mut Project,
-
skid_context: &mut SkidContext,
-
) -> Vec<Token> {
-
Vec::new()
-
}
···
+4
src/console.rs
···
msg
);
}
···
msg
);
}
+
+
pub fn info_generic(msg: &String) {
+
println!("{} {}", "[INFO]".purple(), msg);
+
}
+35 -7
src/macros/insert.rs
···
use crate::{
console::error_skid,
-
macros::template::SkidTemplate,
project::{Indexing, Project},
stringtools::split_to_tokens,
types::{SkidContext, Token},
···
pub fn macro_insert(
origin_index: usize,
origin_line: usize,
-
context: &mut Project,
-
_skid_context: &mut SkidContext,
args: &Vec<String>,
_scope: &[Token],
) -> Vec<Token> {
-
let origin_file = context
.file_for_index_canonical(origin_index)
.expect("Macro 'Insert' was given a bad origin index")
.clone();
let mut arg = args[0].clone();
let mut search_from_root = arg.starts_with("//");
let mut ok = false;
···
}
if search_from_root {
-
let mut include_path = context.input_folder.clone();
include_path.push(&arg);
if include_path.exists() && include_path.is_file() {
···
}
if !ok {
-
error_skid(context, origin_index, origin_line, &format!("Insert was unable to find the file \"{}\" relative to its origin or in project root.", arg));
}
let mut output = fs::read_to_string(&include_file).expect("File unreadable or missing");
···
output.pop();
} //remove trailing newlines
-
return split_to_tokens(output, context.index_of_file(&PathBuf::from(&include_file)));
}
···
use crate::{
console::error_skid,
+
process_skid,
project::{Indexing, Project},
stringtools::split_to_tokens,
types::{SkidContext, Token},
···
pub fn macro_insert(
origin_index: usize,
origin_line: usize,
+
proj_context: &mut Project,
+
skid_context: &mut SkidContext,
args: &Vec<String>,
_scope: &[Token],
) -> Vec<Token> {
+
let origin_file = proj_context
.file_for_index_canonical(origin_index)
.expect("Macro 'Insert' was given a bad origin index")
.clone();
+
let mut sections_ids_to_keep = Vec::new();
+
+
if args.len() > 1 {
+
for a in &args[1..] {
+
let id = proj_context.index_of_section_name(a);
+
sections_ids_to_keep.push(id);
+
}
+
}
+
let mut arg = args[0].clone();
let mut search_from_root = arg.starts_with("//");
let mut ok = false;
···
}
if search_from_root {
+
let mut include_path = proj_context.input_folder.clone();
include_path.push(&arg);
if include_path.exists() && include_path.is_file() {
···
}
if !ok {
+
error_skid(proj_context, origin_index, origin_line, &format!("Insert was unable to find the file \"{}\" relative to its origin or in project root.", arg));
}
let mut output = fs::read_to_string(&include_file).expect("File unreadable or missing");
···
output.pop();
} //remove trailing newlines
+
if sections_ids_to_keep.len() > 0 {
+
let mut processed = process_skid(
+
&split_to_tokens(
+
output,
+
proj_context.index_of_file(&PathBuf::from(&include_file)),
+
),
+
proj_context,
+
skid_context,
+
);
+
processed.retain(|t| sections_ids_to_keep.contains(&t.section_index));
+
for t in &mut processed {
+
t.pre_proccessed = true;
+
}
+
return processed;
+
} else {
+
return split_to_tokens(
+
output,
+
proj_context.index_of_file(&PathBuf::from(&include_file)),
+
);
+
}
}
+1 -1
src/macros/mod.rs
···
expansion: macro_insert,
takes_block: false,
min_args: 1,
-
max_args: 1,
},
Macro {
symbol: "time",
···
expansion: macro_insert,
takes_block: false,
min_args: 1,
+
max_args: usize::max_value(),
},
Macro {
symbol: "time",
+15 -7
src/macros/simple_blocks.rs
···
// This file for implementations of short blocks, im qualifying that as less than 30ish lines
use crate::{
-
console::*,
-
project::Project,
-
stringtools::TokenTools,
types::{SkidContext, Token},
};
···
pub fn macro_section(
_origin_index: usize,
_origin_line: usize,
-
_context: &mut Project,
_skid_context: &mut SkidContext,
-
_args: &Vec<String>,
scope: &[Token],
) -> Vec<Token> {
let mut tokens = Vec::new();
-
for tok in scope {
-
tokens.push(tok.clone());
}
return tokens;
}
···
// This file for implementations of short blocks, im qualifying that as less than 30ish lines
use crate::{
+
project::{Indexing, Project},
types::{SkidContext, Token},
};
···
pub fn macro_section(
_origin_index: usize,
_origin_line: usize,
+
proj_context: &mut Project,
_skid_context: &mut SkidContext,
+
args: &Vec<String>,
scope: &[Token],
) -> Vec<Token> {
let mut tokens = Vec::new();
+
if args.len() == 1 {
+
let section_index = proj_context.index_of_section_name(&args[0]);
+
for tok in scope {
+
let mut new = tok.clone();
+
new.section_index = section_index;
+
tokens.push(new);
+
}
+
} else {
+
for tok in scope {
+
tokens.push(tok.clone());
+
}
}
+
return tokens;
}
-1
src/macros/simple_macros.rs
···
use chrono::Local;
use crate::{
-
args,
console::{error_skid, reminder_skid, warn_skid},
project::{Indexing, Project},
stringtools::split_to_tokens,
···
use chrono::Local;
use crate::{
console::{error_skid, reminder_skid, warn_skid},
project::{Indexing, Project},
stringtools::split_to_tokens,
+9 -5
src/main.rs
···
mod args;
-
mod closures;
mod console;
mod macros;
mod project;
···
path::PathBuf,
};
use stringtools::{collect_arguments, collect_block, split_to_tokens};
-
use types::{InputFile, Token};
// really need to change this whole thing to work with characters rather than
// strings split on kind of abitrary chars..
···
project_path = project_folder.clone();
project_path.push("skidmark.toml");
}
-
println!("Operatting on {:?}", &project_path.as_os_str());
assert!(env::set_current_dir(&project_folder).is_ok());
let mut project = parse_project(&project_path);
···
num = num + group.files.len();
}
-
println!("Proccesing {} files.", num);
// for group in &mut project.filegroups {
// for infile in &mut group.files {
// process_skid(infile, group.convert_html, &mut project.context);
···
None
}
-
fn process_skid(
tokens_in: &[Token],
proj_context: &mut Project,
skid_context: &mut SkidContext,
···
let mut working_index = 0;
while working_index < tokens.len() {
if tokens[working_index] == '\\' && !escaped {
tokens[working_index].contents = '\0'; // skip over this later when outputting to avoid shifting memory rn
escaped = true;
···
mod args;
mod console;
mod macros;
mod project;
···
path::PathBuf,
};
use stringtools::{collect_arguments, collect_block, split_to_tokens};
+
use types::Token;
// really need to change this whole thing to work with characters rather than
// strings split on kind of abitrary chars..
···
project_path = project_folder.clone();
project_path.push("skidmark.toml");
}
+
info_generic(&format!("Operatting on {:?}", &project_path.as_os_str()));
assert!(env::set_current_dir(&project_folder).is_ok());
let mut project = parse_project(&project_path);
···
num = num + group.files.len();
}
+
info_generic(&format!("Proccesing {} files.", num));
// for group in &mut project.filegroups {
// for infile in &mut group.files {
// process_skid(infile, group.convert_html, &mut project.context);
···
None
}
+
pub fn process_skid(
tokens_in: &[Token],
proj_context: &mut Project,
skid_context: &mut SkidContext,
···
let mut working_index = 0;
while working_index < tokens.len() {
+
if tokens[working_index].pre_proccessed {
+
working_index += 1;
+
continue;
+
}
+
if tokens[working_index] == '\\' && !escaped {
tokens[working_index].contents = '\0'; // skip over this later when outputting to avoid shifting memory rn
escaped = true;
+24 -4
src/project.rs
···
pub global_post_insert: PathBuf,
pub filemap: Vec<PathBuf>, // mapped to index
}
pub struct FileGroup {
···
let mut project: Project = Project {
filegroups: Vec::new(),
-
//context: ProjectContext {
input_folder: PathBuf::new(),
output_folder: PathBuf::new(),
global_pre_insert: PathBuf::new(),
global_post_insert: PathBuf::new(),
filemap: Vec::new(),
-
//},
};
let config = tomlfile
.parse::<Table>()
···
fn file_for_index(&self, i: usize) -> Option<PathBuf>;
fn file_for_index_canonical(&self, i: usize) -> Option<&PathBuf>;
-
// fn index_of_section_name(&mut self, name: String) -> usize;
-
// fn section_name_for_index(&self, index: usize) -> String;
}
impl Indexing for Project {
···
return None;
}
return Some(&self.filemap[i]);
}
}
···
pub global_post_insert: PathBuf,
pub filemap: Vec<PathBuf>, // mapped to index
+
pub section_name_map: Vec<String>,
}
pub struct FileGroup {
···
let mut project: Project = Project {
filegroups: Vec::new(),
input_folder: PathBuf::new(),
output_folder: PathBuf::new(),
global_pre_insert: PathBuf::new(),
global_post_insert: PathBuf::new(),
filemap: Vec::new(),
+
section_name_map: Vec::new(),
};
let config = tomlfile
.parse::<Table>()
···
fn file_for_index(&self, i: usize) -> Option<PathBuf>;
fn file_for_index_canonical(&self, i: usize) -> Option<&PathBuf>;
+
fn index_of_section_name(&mut self, name: &String) -> usize;
+
fn section_name_for_index(&self, index: usize) -> Option<&String>;
}
impl Indexing for Project {
···
return None;
}
return Some(&self.filemap[i]);
+
}
+
+
// Some weirdly placed + and - 1 because 0 is the default index
+
fn index_of_section_name(&mut self, name: &String) -> usize {
+
let mut index = 0;
+
while index < self.section_name_map.len() {
+
if *name == self.section_name_map[index] {
+
return index + 1;
+
}
+
index += 1;
+
}
+
self.section_name_map.push(name.clone());
+
return self.section_name_map.len();
+
}
+
+
fn section_name_for_index(&self, index: usize) -> Option<&String> {
+
if (index - 1) >= self.section_name_map.len() {
+
return None;
+
}
+
return Some(&self.section_name_map[index - 1]);
}
}
+3
src/stringtools.rs
···
use super::DELIMITERS;
use crate::types::Token;
pub fn collect_arguments(tokens: &[Token]) -> Option<(Vec<String>, usize)> {
// Returns arguments vec and number of tokens to be consumed
//let mut output = Vec::new();
···
use super::DELIMITERS;
use crate::types::Token;
+
//TODO: Theres a couple functions that are still written like tokens are strings not chars, they work fine
+
// for now but they may need to be changed later
+
pub fn collect_arguments(tokens: &[Token]) -> Option<(Vec<String>, usize)> {
// Returns arguments vec and number of tokens to be consumed
//let mut output = Vec::new();
+7 -9
src/types.rs
···
pub origin_index: usize,
pub template_origin: usize,
pub origin_line: usize,
-
pub section_name_index: usize,
}
impl PartialEq<char> for Token {
···
impl Token {
pub fn new(contents: char, origin_file: usize, line_number: usize) -> Token {
Token {
-
contents: contents,
origin_index: origin_file,
template_origin: origin_file,
origin_line: line_number,
-
section_name_index: 0,
}
}
}
-
// impl ToString for Token {
-
// fn to_string(&self) -> String {
-
// return self.contents.clone();
-
// }
-
// }
-
impl Clone for Token {
fn clone(&self) -> Self {
let mut t = Token::new(
···
self.origin_index.clone(),
self.origin_line,
);
t.template_origin = self.template_origin;
return t;
}
}
···
pub origin_index: usize,
pub template_origin: usize,
pub origin_line: usize,
+
pub section_index: usize,
+
pub pre_proccessed: bool,
}
impl PartialEq<char> for Token {
···
impl Token {
pub fn new(contents: char, origin_file: usize, line_number: usize) -> Token {
Token {
+
contents,
origin_index: origin_file,
template_origin: origin_file,
origin_line: line_number,
+
section_index: 0,
+
pre_proccessed: false,
}
}
}
impl Clone for Token {
fn clone(&self) -> Self {
let mut t = Token::new(
···
self.origin_index.clone(),
self.origin_line,
);
+
t.section_index = self.section_index;
t.template_origin = self.template_origin;
+
t.pre_proccessed = self.pre_proccessed;
return t;
}
}