Tholp's bespoke website generator
1use core::fmt;
2use std::{ascii::escape_default, error, fmt::Arguments, ops::Index, process::exit, thread::sleep};
3
4use super::DELIMITERS;
5use crate::types::Token;
6
7pub fn collect_arguments(tokens: &[Token]) -> (Vec<String>, usize) {
8 // Arguments vec and number of tokens consumed
9 //let mut output = Vec::new();
10 let mut split_tokens = Vec::new();
11 for tok in tokens {
12 for s in split_keep_delimiters(tok.contents.clone()) {
13 split_tokens.push(s);
14 }
15 }
16
17 let mut quoted: bool = false;
18 let mut entered: bool = false;
19 let mut arg = "".to_string();
20 let mut args: Vec<String> = Vec::new();
21
22 let mut in_token_count = 0;
23
24 for tok in split_tokens {
25 in_token_count += 1; // This could be a problem if it something got split above..
26 if tok.starts_with([' ', '\t']) && !quoted {
27 continue;
28 }
29
30 if !entered && tok.starts_with('(') {
31 entered = true;
32 continue;
33 }
34
35 if !entered {
36 continue;
37 }
38
39 if !quoted && tok.starts_with(')') {
40 break;
41 }
42
43 let mut i = 0;
44 while i < tok.len() {
45 let c = tok.chars().nth(i).unwrap();
46 i += 1;
47
48 if c == '\"' {
49 quoted = !quoted;
50 continue;
51 }
52
53 arg.push(c);
54 }
55
56 if !quoted {
57 args.push(arg.clone());
58 arg.clear();
59 }
60 }
61
62 return (args, in_token_count);
63}
64
65pub fn collect_block(tokens: &[Token]) -> (Vec<Token>, usize) {
66 let mut entered = false;
67 let mut tokens_consumed: usize = 0;
68 let mut entering_bracket_count = 0;
69 let mut exiting_bracket_count = 0;
70 let mut scope_count = 0; //incremented by '{{{', decremented by '}}}'
71 let mut escaped = false;
72
73 let mut block: Vec<Token> = Vec::new();
74
75 // We dont really care about doing anything that in the block right now
76 // maybe have the Token struct contain scope level later?
77 for tok in tokens {
78 tokens_consumed += 1;
79 if !entered {
80 if tok.contents.is_only_whitespace() {
81 continue;
82 }
83 if tok.contents != "{"
84 // Expected block start, got garbage
85 {
86 // println!("Expected block start, got {}",tok.contents);
87 // for t in &block
88 // {
89 // print!("{} ", t.contents);
90 // }
91 // exit(1);
92 return (Vec::new(), 0);
93 }
94 }
95
96 if escaped {
97 escaped = false;
98 entering_bracket_count = 0;
99 exiting_bracket_count = 0;
100 block.push(tok.clone());
101 continue;
102 }
103
104 // Scope Start
105 if tok.contents == "{" {
106 entering_bracket_count += 1;
107 if entering_bracket_count == 3 {
108 scope_count += 1;
109 entering_bracket_count = 0;
110 if !entered {
111 entered = true;
112 }
113 }
114 } else {
115 entering_bracket_count = 0;
116 }
117 // Scope End
118 if tok.contents == "}" {
119 exiting_bracket_count += 1;
120 if exiting_bracket_count == 3 {
121 scope_count -= 1;
122 entering_bracket_count = 0;
123 }
124 if scope_count == 0 {
125 break;
126 }
127 } else {
128 exiting_bracket_count = 0;
129 }
130 if tok.contents == "\\" {
131 escaped = true;
132 } else {
133 block.push(tok.clone());
134 }
135 }
136 return (block, tokens_consumed);
137}
138
139// Theres no std function to have the delimiters be their own element in the out vector
140// clean it up a bit here
141pub fn split_keep_delimiters(instr: String) -> Vec<String> {
142 let split: Vec<&str> = instr.split_inclusive(DELIMITERS).collect();
143 let mut output = Vec::new();
144
145 for s in split {
146 if s.ends_with(DELIMITERS) {
147 let (token, ending) = s.split_at(s.len() - 1);
148 if token.len() > 0 {
149 output.push(token.to_string());
150 }
151 output.push(ending.to_string());
152 } else {
153 output.push(s.to_string());
154 }
155 }
156 return output;
157}
158
159pub fn strings_to_tokens(in_strings: Vec<String>, origin_file: usize) -> Vec<Token> {
160 let mut tokens = Vec::new();
161 let mut line_count: u32 = 1;
162
163 for str in in_strings {
164 let current_line = line_count;
165 for char in str.chars() {
166 if char == '\n' {
167 line_count += 1;
168 }
169 }
170 let token: Token = Token::new(str, origin_file, current_line);
171 tokens.push(token);
172 }
173
174 return tokens;
175}
176
177// Need to do some special case stuff so you can macros without spaces between
178// (something like "stuff!insert(..)" is split to ["stuff","!insert(..)"] so it can be acted on later)
179pub fn split_to_tokens(instr: String, origin_file: usize) -> Vec<Token> {
180 let split = split_keep_delimiters(instr);
181 let mut new_split: Vec<String> = Vec::new();
182 for s in split {
183 let prefix_offset = s.find(&['!', '&']).unwrap_or(s.len() + 1);
184 if prefix_offset != 0 && prefix_offset != s.len() + 1 {
185 let (first, second) = s.split_at(prefix_offset);
186 println!("\"{}\", \"{}\"", first, second);
187 new_split.push(first.to_string());
188 new_split.push(second.to_string());
189 } else {
190 new_split.push(s);
191 }
192 //sleep(std::time::Duration::from_millis(10));
193 }
194 return strings_to_tokens(new_split, origin_file);
195}
196
197pub fn next_nonwhitespace_token(tokens: &Vec<Token>, index: usize) -> (bool, usize) {
198 while index < tokens.len() {
199 if tokens[index].contents.is_only_whitespace() {
200 continue;
201 }
202 return (true, index);
203 }
204 return (false, 0);
205}
206
207//trim whitespace from the ends
208pub fn trim_whitespace_tokens(tokens: &[Token]) -> &[Token] {
209 let mut start: usize = 0;
210 let mut end: usize = tokens.len();
211 for tok in tokens {
212 if !tok.contents.is_only_whitespace() {
213 break;
214 }
215 start = start + 1;
216 }
217
218 for tok in tokens.iter().rev() {
219 if !tok.contents.is_only_whitespace() {
220 break;
221 }
222 end = end - 1;
223 }
224
225 return &tokens[start..end];
226}
227
228pub trait OnlyWhitespace {
229 fn is_only_whitespace(&self) -> bool;
230}
231
232impl OnlyWhitespace for String {
233 fn is_only_whitespace(&self) -> bool {
234 for c in self.chars() {
235 if !c.is_whitespace() {
236 return false;
237 }
238 }
239 return true;
240 }
241}