Tholp's bespoke website generator

Allow dangerous html in conversion

Tholp1 86bf06e7 aa51ac10

Changed files
+44 -11
src
+17 -4
src/main.rs
···
include::{self, macro_include},
MACRO_LIST,
};
use std::{
env,
fs::{self, File},
io::Write,
process::{exit, Output},
};
-
use stringtools::{collect_arguments, split_keep_delimiters, strings_to_tokens};
use types::{InputFile, Macro, Token};
-
use markdown::{to_html_with_options, CompileOptions, Options};
static DELIMITERS: [char; 7] = [' ', '\n', '\t', '(', ')', '{', '}'];
···
let contents = fs::read_to_string(&file.filename_input).expect("File unreadable or missing");
//println!("{}\n {}", f.filename_out, contents);
-
file.tokens = strings_to_tokens(split_keep_delimiters(contents), file.filename_input.clone());
let mut index = 0;
···
}
fs::write(&file.filename_skidout, &skid_output).expect("Couldn't write skid to file");
-
let html_output = markdown::to_html_with_options(&skid_output, &Options::gfm()).unwrap();
fs::write(&file.filename_htmlout, &html_output).expect("Couldn't write html to file");
}
···
include::{self, macro_include},
MACRO_LIST,
};
+
use markdown::{to_html_with_options, CompileOptions, Options};
use std::{
env,
fs::{self, File},
io::Write,
process::{exit, Output},
};
+
use stringtools::{collect_arguments, split_keep_delimiters, split_to_tokens, strings_to_tokens};
use types::{InputFile, Macro, Token};
static DELIMITERS: [char; 7] = [' ', '\n', '\t', '(', ')', '{', '}'];
···
let contents = fs::read_to_string(&file.filename_input).expect("File unreadable or missing");
//println!("{}\n {}", f.filename_out, contents);
+
//file.tokens = strings_to_tokens(split_keep_delimiters(contents), file.filename_input.clone());
+
file.tokens = split_to_tokens(contents, file.filename_input.clone());
let mut index = 0;
···
}
fs::write(&file.filename_skidout, &skid_output).expect("Couldn't write skid to file");
+
//let html_output = markdown::to_html(&skid_output);
+
let html_output = markdown::to_html_with_options(
+
&skid_output,
+
&Options {
+
compile: CompileOptions {
+
allow_dangerous_html: true,
+
allow_dangerous_protocol: true,
+
..CompileOptions::gfm()
+
},
+
..Options::gfm()
+
},
+
)
+
.unwrap();
fs::write(&file.filename_htmlout, &html_output).expect("Couldn't write html to file");
}
+27 -7
src/stringtools.rs
···
use core::fmt;
-
use std::{fmt::Arguments, ops::Index, process::exit};
use super::DELIMITERS;
use crate::types::Token;
···
return output;
}
-
pub fn strings_to_tokens(instrings: Vec<String>, origin_file: String) -> Vec<Token> {
let mut tokens = Vec::new();
-
let mut linecount: u32 = 1;
-
for str in instrings {
-
let currentline = linecount;
for char in str.chars() {
if char == '\n' {
-
linecount += 1;
}
}
-
let token: Token = Token::new(str, origin_file.clone(), currentline);
tokens.push(token);
}
return tokens;
}
pub fn next_nonwhitespace_token(tokens: &Vec<Token>, index: usize) -> (bool, usize) {
while index < tokens.len() {
if tokens[index].contents.starts_with([' ', '\t', '\n']) {
···
use core::fmt;
+
use std::{fmt::Arguments, ops::Index, process::exit, thread::sleep};
use super::DELIMITERS;
use crate::types::Token;
···
return output;
}
+
pub fn strings_to_tokens(in_strings: Vec<String>, origin_file: String) -> Vec<Token> {
let mut tokens = Vec::new();
+
let mut line_count: u32 = 1;
+
for str in in_strings {
+
let current_line = line_count;
for char in str.chars() {
if char == '\n' {
+
line_count += 1;
}
}
+
let token: Token = Token::new(str, origin_file.clone(), current_line);
tokens.push(token);
}
return tokens;
}
+
+
// Need to do some special case stuff so you can macros without spaces between
+
pub fn split_to_tokens(instr: String, origin_file: String) -> Vec<Token> {
+
let split = split_keep_delimiters(instr);
+
let mut new_split: Vec<String> = Vec::new();
+
for s in split {
+
let prefix_offset = s.find(&['!', '&']).unwrap_or(s.len() + 1);
+
if prefix_offset != 0 && prefix_offset != s.len() + 1 {
+
let (first, second) = s.split_at(prefix_offset);
+
println!("\"{}\", \"{}\"", first, second);
+
new_split.push(first.to_string());
+
new_split.push(second.to_string());
+
} else {
+
new_split.push(s);
+
}
+
//sleep(std::time::Duration::from_millis(10));
+
}
+
return strings_to_tokens(new_split, origin_file);
+
}
+
pub fn next_nonwhitespace_token(tokens: &Vec<Token>, index: usize) -> (bool, usize) {
while index < tokens.len() {
if tokens[index].contents.starts_with([' ', '\t', '\n']) {