diff options
-rw-r--r-- | Cargo.lock | 206 | ||||
-rw-r--r-- | Cargo.toml | 10 | ||||
-rw-r--r-- | src/collect_files.rs | 84 | ||||
-rw-r--r-- | src/generate_html.rs | 104 | ||||
-rw-r--r-- | src/main.rs | 125 |
5 files changed, 413 insertions, 116 deletions
@@ -3,9 +3,85 @@ version = 4 [[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] +name = "fancy-regex" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298" +dependencies = [ + "bit-set", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "highlight" +version = "1.0.1" +source = "git+git://benbridle.com/highlight?tag=v1.0.1#a7282a8de548fd14bc60519453e9a6e43b23acde" +dependencies = [ + "fancy-regex", +] + +[[package]] +name = "inked" +version = "1.0.0" +source = "git+git://benbridle.com/inked?tag=v1.0.0#2954d37b638fa2c1dd3d51ff53f08f475aea6ea3" +dependencies = [ + "termcolor", +] + +[[package]] +name = "log" +version = "1.1.1" +source = "git+git://benbridle.com/log?tag=v1.1.1#930f3d0e2b82df1243f423c092a38546ea7533c3" + +[[package]] +name = "log" +version = "2.0.0" +source = "git+git://benbridle.com/log?tag=v2.0.0#a38d3dd487594f41151db57625410d1b786bebe4" +dependencies = [ + "inked", +] + +[[package]] name = "markdown" -version = "3.2.0" -source = "git+git://benbridle.com/markdown?tag=v3.2.0#883a2a63023ea9b1e4b2bb51831ea1dafcb7346a" +version = "3.3.0" +source = "git+git://benbridle.com/markdown?tag=v3.3.0#df45ffb3affb7cb1d53b567b70fef721353ccffe" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "recipe" @@ -13,31 +89,135 @@ version = "1.4.0" source = "git+git://benbridle.com/recipe?tag=v1.4.0#652aaee3130e2ee02742fdcc248ddd1bee285737" [[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "switchboard" +version = "1.0.0" +source = "git+git://benbridle.com/switchboard?tag=v1.0.0#ea70fa89659e5cf1a9d4ca6ea31fb67f7a2cc633" +dependencies = [ + "log 1.1.1", + "paste", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] name = "toaster" -version = "1.7.0" +version = "1.12.0" dependencies = [ + "highlight", + "log 2.0.0", "markdown", "recipe", + "switchboard", "vagabond", - "xflags", ] [[package]] name = "vagabond" -version = "1.1.0" -source = "git+git://benbridle.com/vagabond?tag=v1.1.0#6e759a3abb3bc3e5da42d69a6f20ec2c31eb33de" +version = "1.1.1" +source = "git+git://benbridle.com/vagabond?tag=v1.1.1#b190582517e6008ad1deff1859f15988e4efaa26" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] [[package]] -name = "xflags" -version = "0.4.0-pre.2" +name = "windows-targets" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a40f95e4e200baabdfe8b813e3ee754b58407a677141bd2890c28ef4a89c21" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "xflags-macros", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] -name = "xflags-macros" -version = "0.4.0-pre.2" +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a6d9b56f406f5754a3808524166b6e6bdfe219c0526e490cfc39ecc0582a4e6" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" @@ -1,13 +1,15 @@ [package] name = "toaster" -version = "1.7.0" +version = "1.12.0" edition = "2021" [dependencies] -vagabond = { git = "git://benbridle.com/vagabond", tag = "v1.1.0" } -markdown = { git = "git://benbridle.com/markdown", tag = "v3.2.0" } +vagabond = { git = "git://benbridle.com/vagabond", tag = "v1.1.1" } +markdown = { git = "git://benbridle.com/markdown", tag = "v3.3.0" } recipe = { git = "git://benbridle.com/recipe", tag = "v1.4.0" } -xflags = "0.4.0-pre.1" +log = { git = "git://benbridle.com/log", tag = "v2.0.0" } +switchboard = { git = "git://benbridle.com/switchboard", tag = "v1.0.0" } +highlight = { git = "git://benbridle.com/highlight", tag = "v1.0.1" } [profile.release] lto=true diff --git a/src/collect_files.rs b/src/collect_files.rs index 4eee1dc..ccdfc49 100644 --- a/src/collect_files.rs +++ b/src/collect_files.rs @@ -1,13 +1,16 @@ use crate::*; +use highlight::*; use vagabond::*; use std::collections::HashMap; +use std::fmt::Debug; pub struct Website { pub name: String, pub config: HashMap<String, String>, + pub highlighters: Highlighters, pub pages: Vec<Page>, pub redirects: Vec<Redirect>, pub static_files: Vec<StaticItem>, // Redirects, !-prefixed-dir contents @@ -47,7 +50,7 @@ pub struct Redirect { pub last_modified: Option<SystemTime>, // last-modified time of source file } -pub trait LinkFrom { +pub trait LinkFrom: Debug { fn name(&self) -> &str; fn parent_url(&self) -> &str; fn parents(&self) -> &[String]; @@ -58,8 +61,18 @@ pub trait LinkFrom { } return root; } + fn qualified_name(&self) -> String { + match self.parents().last() { + Some(parent) => format!("{parent}/{}", self.name()), + None => format!("/{}", self.name()), + } + } } +pub struct Highlighters { + pub languages: HashMap<String, usize>, + pub highlighters: Vec<Highlighter>, +} impl Page { pub fn root(&self) -> String { @@ -71,6 +84,18 @@ impl Page { } } +impl Debug for Page { + fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + write!(f, "\"{}\"", self.qualified_name()) + } +} + +impl Debug for Redirect { + fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + write!(f, "\"{}\"", self.qualified_name()) + } +} + impl LinkFrom for Page { fn name(&self) -> &str { &self.name } fn parent_url(&self) -> &str { &self.parent_url } @@ -93,11 +118,16 @@ impl Website { static_dirs: Vec::new(), name: match Entry::from_path(path) { Ok(entry) => entry.name, - Err(err) => error!("Couldn't open {:?}: {:?}", &path, err), + Err(err) => fatal!("Couldn't open {:?}: {:?}", &path, err), }, config: HashMap::new(), + highlighters: Highlighters { + languages: HashMap::new(), + highlighters: Vec::new(), + }, }; new.collect_entry(path, path); + new.parse_highlighters(); return new; } @@ -118,7 +148,7 @@ impl Website { // Generate parent URL, used only for files. let source_path = entry.original_path.clone(); let relative_path = source_path.strip_prefix(prefix).unwrap_or_else( - |_| error!("Path doesn't start with {prefix:?}: {source_path:?}")); + |_| fatal!("Path doesn't start with {prefix:?}: {source_path:?}")); let mut parents: Vec<_> = relative_path.components() .map(|c| c.as_os_str().to_string_lossy().to_string()).collect(); parents.pop(); // Remove file segment. @@ -130,7 +160,7 @@ impl Website { for child in traverse_directory(&entry).unwrap() { let source_path = child.original_path; let relative_path = source_path.strip_prefix(&entry.original_path).unwrap_or_else( - |_| error!("Path doesn't start with {prefix:?}: {source_path:?}")) + |_| fatal!("Path doesn't start with {prefix:?}: {source_path:?}")) .as_os_str().to_string_lossy().to_string(); let full_url = format!("{stripped}/{relative_path}"); self.static_files.push(StaticItem { full_url, source_path, last_modified }) @@ -143,13 +173,13 @@ impl Website { } } } else if parents.is_empty() && entry.name.to_lowercase() == "toaster.conf" { - verbose!("Reading configuration file at {path:?}"); + info!("Reading configuration file at {path:?}"); // Parse the config file. let config = std::fs::read_to_string(&source_path).unwrap(); let mut key = None; let mut value = String::new(); for line in config.lines() { - if line.starts_with(" ") { + if line.starts_with(" ") || line.trim().is_empty() { value.push_str(line.trim()); value.push('\n'); } else { @@ -163,6 +193,10 @@ impl Website { self.config.insert(key, std::mem::take(&mut value)); } } else { + let full_name = match parents.last() { + Some(parent) => format!("{parent}/{name}"), + None => name.to_string(), + }; match extension.as_str() { "md" => { let markdown = std::fs::read_to_string(&source_path).unwrap(); @@ -182,7 +216,7 @@ impl Website { None }).collect(); for url in duplicates { - warn!("Page {name:?} contains multiple headings with ID \"#{url}\""); + warn!("Page {full_name:?} contains multiple headings with ID \"#{url}\""); } if name_url == "+index" { if parents.is_empty() { @@ -268,6 +302,40 @@ impl Website { } } + pub fn parse_highlighters(&mut self) { + let mut languages = Vec::new(); + let mut source = String::new(); + for line in self.get_config("highlighters").lines() { + if let Some(line) = line.trim().strip_prefix('[') { + if let Some(line) = line.strip_suffix(']') { + // Bank the current source. + if !languages.is_empty() { + let i = self.highlighters.highlighters.len(); + for language in languages { + self.highlighters.languages.insert(language, i); + } + let highlighter = Highlighter::from_str(&source); + self.highlighters.highlighters.push(highlighter); + } + languages = line.split('/').map(|s| s.trim().to_string()).collect(); + source.clear(); + continue; + } + } + source.push_str(line); + source.push('\n'); + } + // Bank the current source. + if !languages.is_empty() { + let i = self.highlighters.highlighters.len(); + for language in languages { + self.highlighters.languages.insert(language, i); + } + let highlighter = Highlighter::from_str(&source); + self.highlighters.highlighters.push(highlighter); + } + } + // Ext is extension without a dot. // Checks if a relative link to an internal page name can be reached from // the current page, and returns a resolved absolute link to the page with extension. @@ -294,7 +362,7 @@ impl Website { if let Some(heading) = heading { let heading = make_url_safe(strip_appendix(heading)); if !page.headings.iter().any(|h| h.url == heading) { - warn!("Page {:?} contains link to nonexistent heading {heading:?} on page {path:?}", from.name()); + warn!("Page {from:?} contains link to nonexistent heading {heading:?} on page {path:?}"); } return Some(format!("{root}{path}.{ext}#{heading}")); } else { diff --git a/src/generate_html.rs b/src/generate_html.rs index dca68f7..af48d2e 100644 --- a/src/generate_html.rs +++ b/src/generate_html.rs @@ -63,15 +63,31 @@ pub fn generate_html_redirect(path: &str) -> String { pub fn get_html_head(page: &Page, website: &Website) -> String { + let mut include_default_head = true; + let mut html_head = String::new(); + for block in &page.document.blocks { + if let markdown::Block::Fragment { language, content } = block { + if language == "override-html-head" { + html_head.push_str(content); + include_default_head = false; + } + if language == "embed-html-head" { + html_head.push_str(content); + } + } + } + if include_default_head { + html_head.insert_str(0, &website.get_config("html.head")); + } let root = page.root(); - website.get_config("html.head") + html_head .replace("href='/", &format!("href='{root}")) .replace("src='/", &format!("src='{root}")) } pub fn get_table_of_contents(page: &Page) -> String { - if page.headings.iter().filter(|h| h.level != Level::Heading3).count() < 3 { + if page.headings.len() < 3 { return String::new(); } let mut toc = String::from("<details><summary></summary><ul>\n"); @@ -94,7 +110,7 @@ pub fn get_table_of_contents(page: &Page) -> String { pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Website) -> String { - let from = &page.name; + let from = &page; let mut html = String::new(); macro_rules! line_to_html { @@ -112,7 +128,7 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs for block in &document.blocks { match block { Block::Heading { level, line } => { - let id = make_url_safe(strip_appendix(&line_to_html!(line))); + let id = make_url_safe(strip_appendix(&line.to_string())); match level { Level::Heading1 => tag!("h1", line, format!("id='{id}'")), Level::Heading2 => tag!("h2", line, format!("id='{id}'")), @@ -176,6 +192,7 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs } } let label = sanitize_text(label, true); + let path = sanitize_text(&path, false); match extension.to_lowercase().as_str() { "jpg"|"jpeg"|"png"|"webp"|"gif"|"tiff" => html!( "<figure><a href='{path}'><img src='{path}' alt='{label}' title='{label}' /></a></figure>"), @@ -192,7 +209,15 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs "embed-html" => html!("{content}"), "embed-css" => wrap!("style", html!("{content}")), "embed-javascript"|"embed-js" => wrap!("script", html!("{content}")), - "hidden"|"todo"|"embed-html-head" => (), + "embed-html-head"|"override-html-head" => (), + "hidden"|"todo" => (), + "poem" => wrap!("div", "class='poem'", for line in content.lines() { + let line = line.trim_end(); + match line.is_empty() { + true => html!("<br>"), + false => html!("<p>{}</p>", sanitize_text(line, true)), + } + }), "recipe" => { let recipe = Recipe::parse(content); html!("<div class='recipe'><ul>"); @@ -207,9 +232,9 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs warn!("Gallery on page {from:?} references nonexistent image {file:?}"); continue; } - let large = format!("{root}images/large/{file}"); - // let small = format!("{root}images/small/{file}"); - let thumb = format!("{root}images/thumb/{file}"); + let large = sanitize_text(&format!("{root}images/large/{file}"), false); + // let small = sanitize_text(&format!("{root}images/small/{file}"), false); + let thumb = sanitize_text(&format!("{root}images/thumb/{file}"), false); html!("<a href='{large}'><img src='{thumb}' /></a>"); }), "gallery-nav" => wrap!("div", "class='gallery-nav'", for line in content.lines() { @@ -219,7 +244,7 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs let image = image.trim(); let ParsedLink { path, class, label } = parse_internal_link(name, page, website); if website.has_image(image) { - let thumb = format!("{root}images/thumb/{image}"); + let thumb = sanitize_text(&format!("{root}images/thumb/{image}"), false); html!("<a href='{path}' class='{class}'><img src='{thumb}'/><p>{label}</p></a>") } else { warn!("Gallery-nav on page {from:?} references nonexistent image {image:?}"); @@ -228,11 +253,24 @@ pub fn document_to_html(document: &MarkdownDocument, page: &Page, website: &Webs warn!("Gallery-nav on page {from:?} has line without a '::' separator"); } }), - _ => { - html!("<pre class='{language}'>"); - html!("{}", sanitize_text(content, false)); - html!("</pre>"); - }, + _ => wrap!("pre", format!("class='{language}'"), { + if let Some(i) = website.highlighters.languages.get(language) { + let mut source = String::new(); + let highlighter = &website.highlighters.highlighters[*i]; + for span in highlighter.highlight(content) { + if span.tag.is_empty() { + source.push_str(&sanitize_text(&span.text, false)); + } else { + source.push_str(&format!("<span class='{}'>", span.tag.to_lowercase())); + source.push_str(&sanitize_text(&span.text, false)); + source.push_str("</span>"); + } + } + html!("{source}"); + } else { + html!("{}", sanitize_text(content, false)) + } + }) } } Block::Break => html!("<hr>"), @@ -294,8 +332,11 @@ fn line_to_html(line: &Line, page: &Page, website: &Website) -> String { let text = &sanitize_text(text, false); html.push_str(&format!("<code>{text}</code>")) } Token::Math(text) => { let text = &sanitize_text(text, false); html.push_str(&format!("<span class='math'>{text}</span>")) } - Token::InternalLink(name) => { - let ParsedLink { path, class, label } = parse_internal_link(name, page, website); + Token::InternalLink{ label: link_label, path } => { + let ParsedLink { path, class, mut label } = parse_internal_link(path, page, website); + if !link_label.is_empty() { + label = link_label.to_string(); + } html.push_str(&format!("<a href='{path}' class='{class}'>{label}</a>")) } Token::ExternalLink { label, path } => { @@ -316,7 +357,7 @@ struct ParsedLink { } fn parse_internal_link(name: &str, page: &Page, website: &Website) -> ParsedLink { - let from = &page.name; + let from = &page; let (class, label, path) = match name.split_once('#') { Some(("", heading)) => ("heading", heading, format!("#{}", strip_appendix(heading))), Some((page, heading)) => ("page", heading, format!("{page}.html#{}", strip_appendix(heading))), @@ -341,11 +382,12 @@ fn parse_internal_link(name: &str, page: &Page, website: &Website) -> ParsedLink warn!("Page {from:?} contains link to nonexistent internal heading {heading:?}"); } } + let path = url_encode(&path); ParsedLink { path, class, label } } fn parse_external_link(label: &str, path: &str, page: &Page, website: &Website) -> ParsedLink { - let from = &page.name; + let from = &page; let mut path = path.to_owned(); let mut label = label.to_string(); let mut is_internal = true; @@ -372,6 +414,7 @@ fn parse_external_link(label: &str, path: &str, page: &Page, website: &Website) }; } } + let path = url_encode(&path); let label = sanitize_text(&label, true); ParsedLink { path, class: "external", label } } @@ -398,17 +441,26 @@ fn sanitize_text(text: &str, fancy: bool) -> String { }, '<' => output.push_str("<"), '>' => output.push_str(">"), - '"' if fancy => match prev.is_whitespace() { - true => output.push('“'), - false => output.push('”'), + '"' => match fancy { + true => match prev.is_whitespace() { + true => output.push('“'), + false => output.push('”'), + } + false => output.push_str("""), }, - '\'' if fancy => match prev.is_whitespace() { - true => output.push('‘'), - false => output.push('’'), + '\'' => match fancy { + true => match prev.is_whitespace() { + true => output.push('‘'), + false => output.push('’'), + } + false => output.push_str("'"), }, '-' if fancy => match prev.is_whitespace() && next.is_whitespace() { - true => output.push('—'), - false => output.push('-'), + true => match i > 0 { + true => output.push('—'), // em-dash, for mid-sentence + false => output.push('–'), // en-dash, for start of line + } + false => output.push('-'), // regular dash, for mid-word } _ => output.push(*c), } diff --git a/src/main.rs b/src/main.rs index 25d1528..1ea25d2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -11,55 +11,60 @@ use vagabond::*; use std::collections::HashSet; use std::time::SystemTime; +use log::{info, warn, error, fatal}; +use switchboard::{Switchboard, SwitchQuery}; -const NORMAL: &str = "\x1b[0m"; -const BOLD: &str = "\x1b[1m"; -const WHITE: &str = "\x1b[37m"; -const RED: &str = "\x1b[31m"; -const YELLOW: &str = "\x1b[33m"; -const BLUE: &str = "\x1b[34m"; -static mut VERBOSE: bool = false; -#[macro_export] macro_rules! verbose { - ($($tokens:tt)*) => { if unsafe { VERBOSE } { - eprint!("{BOLD}{BLUE}[INFO]{NORMAL}: "); eprint!($($tokens)*); - eprintln!("{NORMAL}"); - } }; -} -#[macro_export] macro_rules! warn { - ($($tokens:tt)*) => {{ - eprint!("{BOLD}{YELLOW}[WARNING]{NORMAL}{WHITE}: "); eprint!($($tokens)*); - eprintln!("{NORMAL}"); - }}; +fn print_help() -> ! { + eprintln!("\ +Usage: toaster <source> <destination> + +Generate a website from a structured directory of markdown files. + +Arguments: + source Source directory with markdown files + destination Path to output directory + +Switches: + --delete Delete the destination directory first if it exists + --html Generate HTML output + --version, -v Print information as each file is parsed + --version Print the program version and exit + --help, -h Print help +"); + std::process::exit(0); } -#[macro_export] macro_rules! error { - ($($tokens:tt)*) => {{ - eprint!("{BOLD}{RED}[ERROR]{WHITE}: "); eprint!($($tokens)*); - eprintln!("{NORMAL}"); std::process::exit(1); - }}; + +fn print_version() -> ! { + let version = env!("CARGO_PKG_VERSION"); + eprintln!("toaster, version {version}"); + eprintln!("written by ben bridle"); + std::process::exit(0); } + fn main() { - let args = Arguments::from_env_or_exit(); - if args.version { - let version = env!("CARGO_PKG_VERSION"); - eprintln!("toaster, version {version}"); - std::process::exit(0); + let mut args = Switchboard::from_env(); + if args.named("help").short('h').as_bool() { + print_help(); } - if args.verbose { - unsafe { VERBOSE = true; } + if args.named("version").as_bool() { + print_version(); } - if args.source.is_none() || args.destination.is_none() { - error!("Provide a source directory and a destination directory.") + if args.named("verbose").short('v').as_bool() { + log::set_log_level(log::LogLevel::Info); } - let source_directory = match args.source.as_ref().unwrap().canonicalize() { - Ok(source_directory) => source_directory, - Err(err) => error!("{:?}: {err}", args.source.unwrap()), - }; - let destination_directory = args.destination.unwrap(); + let source = args.positional("source").as_path(); + let destination = args.positional("destination").as_path(); + let delete_existing = args.named("delete").as_bool(); + let export_html = args.named("html").as_bool(); + let source = match source.canonicalize() { + Ok(source) => source, + Err(err) => fatal!("{source:?}: {err}"), + }; - let website = Website::from_path(&source_directory); + let website = Website::from_path(&source); // Check for duplicate output paths for pages. let mut destinations: HashSet<&str> = HashSet::new(); @@ -80,11 +85,11 @@ fn main() { } } - let mut destination = destination_directory.clone(); + let mut destination = destination; destination.push(make_url_safe(&website.name)); - if args.delete && Entry::from_path(&destination).is_ok() { - verbose!("Deleting existing destination directory {destination:?}"); + if delete_existing && Entry::from_path(&destination).is_ok() { + info!("Deleting existing destination directory {destination:?}"); remove(&destination).unwrap_or_else(|_| error!("Failed to delete existing destination directory {destination:?}")); } @@ -94,13 +99,13 @@ fn main() { let mut destination = destination.clone(); destination.push(&page.full_url); // Convert document to different formats. - if args.html { + if export_html { let html = generate_html(&page.document, page, &website); write_file(&html, &destination, "html", page.last_modified); } // Copy original markdown file. destination.add_extension("md"); - verbose!("Copying original markdown file to {destination:?}"); + info!("Copying original markdown file to {destination:?}"); copy(&page.source_path, &destination).unwrap_or_else(|_| error!("Failed to copy original markdown file {:?} to {:?}", page.source_path, destination)); @@ -109,7 +114,7 @@ fn main() { for static_file in &website.static_files { let mut destination = destination.clone(); destination.push(&static_file.full_url); - verbose!("Copying static file to {destination:?}"); + info!("Copying static file to {destination:?}"); make_parent_directory(&destination).unwrap(); copy(&static_file.source_path, &destination).unwrap_or_else(|_| error!("Failed to copy static file {:?} to {:?}", @@ -122,7 +127,7 @@ fn main() { let mut destination = destination.clone(); destination.push(&redirect.full_url); let path = &redirect.redirect; - if args.html { + if export_html { if !path.contains("://") { if let Some(path) = website.has_page(redirect, &path, "html") { write_file(&generate_html_redirect(&path), &destination, "html", redirect.last_modified); @@ -141,7 +146,7 @@ fn main() { pub fn write_file(text: &str, destination: &PathBuf, ext: &str, last_modified: Option<SystemTime>) { let mut destination = destination.clone(); destination.add_extension(ext); - verbose!("Generating {destination:?}"); + info!("Generating {destination:?}"); make_parent_directory(&destination).unwrap_or_else(|_| error!("Failed to create parent directories for {destination:?}")); write_to_file(&destination, text).unwrap_or_else(|_| @@ -162,24 +167,14 @@ pub fn make_url_safe(text: &str) -> String { .collect() } - - -xflags::xflags! { - /// Generate a website from a structured directory of markdown files. - cmd arguments { - /// Source directory with markdown files - optional source: PathBuf - /// Path to output directory - optional destination: PathBuf - /// Delete the destination directory first if it exists - optional --delete - /// Generate HTML output - optional --html - /// Generate Gemtext output - optional --gmi - /// Print information as each file is parsed - optional -v, --verbose - /// Print the program version and exit - optional --version +pub fn url_encode(text: &str) -> String { + let mut output = String::new(); + for c in text.chars() { + match c { + '"' => output.push_str("%22"), + '\'' => output.push_str("%27"), + _ => output.push(c), + } } + return output; } |