#![feature(path_add_extension)]
mod collect_files;
pub use collect_files::*;
mod generate_html;
pub use generate_html::*;
use markdown::*;
use vagabond::*;
use std::collections::HashSet;
const NORMAL: &str = "\x1b[0m";
const BOLD: &str = "\x1b[1m";
const WHITE: &str = "\x1b[37m";
const RED: &str = "\x1b[31m";
const YELLOW: &str = "\x1b[33m";
const BLUE: &str = "\x1b[34m";
static mut VERBOSE: bool = false;
#[macro_export] macro_rules! verbose {
($($tokens:tt)*) => { if unsafe { VERBOSE } {
eprint!("{BOLD}{BLUE}[INFO]{NORMAL}: "); eprint!($($tokens)*);
eprintln!("{NORMAL}");
} };
}
#[macro_export] macro_rules! warn {
($($tokens:tt)*) => {{
eprint!("{BOLD}{YELLOW}[WARNING]{NORMAL}{WHITE}: "); eprint!($($tokens)*);
eprintln!("{NORMAL}");
}};
}
#[macro_export] macro_rules! error {
($($tokens:tt)*) => {{
eprint!("{BOLD}{RED}[ERROR]{WHITE}: "); eprint!($($tokens)*);
eprintln!("{NORMAL}"); std::process::exit(1);
}};
}
fn main() {
let args = Arguments::from_env_or_exit();
if args.version {
let version = env!("CARGO_PKG_VERSION");
eprintln!("toaster, version {version}");
std::process::exit(0);
}
if args.verbose {
unsafe { VERBOSE = true; }
}
if args.source.is_none() || args.destination.is_none() {
error!("Provide a source directory and a destination directory.")
}
let source_directory = args.source.unwrap().canonicalize().unwrap();
let destination_directory = args.destination.unwrap();
let website = Website::from_path(&source_directory);
// Check for duplicate output paths for pages.
let mut destinations: HashSet<&str> = HashSet::new();
let mut duplicates: HashSet<&str> = HashSet::new();
for page in &website.pages {
if !destinations.insert(&page.full_url) {
duplicates.insert(&page.full_url);
};
}
if !duplicates.is_empty() {
for destination in duplicates {
warn!("Multiple pages have the output path {destination:?}");
for page in &website.pages {
if page.full_url == destination {
eprintln!(":: {:?}", page.source_path);
}
}
}
}
let mut destination = destination_directory.clone();
destination.push(make_url_safe(&website.name));
if args.delete && Entry::from_path(&destination).is_ok() {
verbose!("Deleting existing destination directory {destination:?}");
remove(&destination).unwrap_or_else(|_|
error!("Failed to delete existing destination directory {destination:?}"));
}
for page in &website.pages {
let mut destination = destination.clone();
destination.push(&page.full_url);
// Convert document to different formats.
if args.html {
let html = generate_html(&page.document, page, &website);
write_file(&html, &destination, "html");
}
// Copy original markdown file.
destination.add_extension("md");
verbose!("Copying original markdown file to {destination:?}");
copy(&page.source_path, &destination).unwrap_or_else(|_|
error!("Failed to copy original markdown file {:?} to {:?}",
page.source_path, destination));
}
for static_file in &website.static_files {
let mut destination = destination.clone();
destination.push(&static_file.full_url);
verbose!("Copying static file to {destination:?}");
make_parent_directory(&destination).unwrap();
copy(&static_file.source_path, &destination).unwrap_or_else(|_|
error!("Failed to copy static file {:?} to {:?}",
static_file.source_path, destination));
}
for static_dir in &website.static_dirs {
let mut destination = destination.clone();
destination.push(&static_dir.full_url);
verbose!("Copying static directory to {destination:?}");
make_parent_directory(&destination).unwrap();
copy(&static_dir.source_path, &destination).unwrap_or_else(|_|
error!("Failed to copy static directory {:?} to {:?}",
static_dir.source_path, destination));
}
for redirect in &website.redirects {
let mut destination = destination.clone();
destination.push(&redirect.full_url);
let path = &redirect.redirect;
if args.html {
if !path.contains("://") {
if let Some(path) = website.has_page(redirect, &path, "html") {
write_file(&generate_html_redirect(&path), &destination, "html");
} else {
warn!("Redirect {:?} links to nonexistent page {path:?}", redirect.name);
}
} else {
write_file(&generate_html_redirect(&path), &destination, "html");
}
}
}
}
pub fn write_file(text: &str, destination: &PathBuf, ext: &str) {
let mut destination = destination.clone();
destination.add_extension(ext);
verbose!("Generating {destination:?}");
make_parent_directory(&destination).unwrap_or_else(|_|
error!("Failed to create parent directories for {destination:?}"));
write_to_file(&destination, text).unwrap_or_else(|_|
error!("Failed to write generated {ext} file to {destination:?}"));
}
pub fn make_url_safe(text: &str) -> String {
text.to_ascii_lowercase().chars().filter_map(|c|
if c.is_alphanumeric() || "-_~.+/#".contains(c) { Some(c) }
else if c == ' ' { Some('-') }
else { None } )
.collect()
}
xflags::xflags! {
/// Generate a website from a structured directory of markdown files.
cmd arguments {
/// Source directory with markdown files
optional source: PathBuf
/// Path to output directory
optional destination: PathBuf
/// Delete the destination directory first if it exists
optional --delete
/// Generate HTML output
optional --html
/// Generate Gemtext output
optional --gmi
/// Print information as each file is parsed
optional -v, --verbose
/// Print the program version and exit
optional --version
}
}