summaryrefslogtreecommitdiff
path: root/src/main.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.rs')
-rw-r--r--src/main.rs186
1 files changed, 112 insertions, 74 deletions
diff --git a/src/main.rs b/src/main.rs
index 3ee0bc6..dad8377 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,9 +1,16 @@
#![feature(path_add_extension)]
mod collect_files;
-pub use collect_files::*;
+mod config;
mod generate_html;
+mod generate_rss;
+mod string_utils;
+
+pub use collect_files::*;
+pub use config::*;
pub use generate_html::*;
+pub use generate_rss::*;
+pub use string_utils::*;
use markdown::*;
use vagabond::*;
@@ -12,81 +19,97 @@ use std::collections::HashSet;
use std::time::SystemTime;
use log::{info, warn, error, fatal};
-use switchboard::{Switchboard, SwitchQuery};
-
-
-fn print_help() -> ! {
- eprintln!("\
-Usage: toaster <source> <destination>
-
-Generate a website from a structured directory of markdown files.
-
-Arguments:
- source Source directory with markdown files
- destination Path to output directory
-
-Switches:
- --delete Delete the destination directory first if it exists
- --html Generate HTML output
- --version, -v Print information as each file is parsed
- --version Print the program version and exit
- --help, -h Print help
-");
- std::process::exit(0);
-}
-
-fn print_version() -> ! {
- let version = env!("CARGO_PKG_VERSION");
- eprintln!("toaster, version {version}");
- eprintln!("written by ben bridle");
- std::process::exit(0);
-}
+use switchboard::*;
fn main() {
let mut args = Switchboard::from_env();
- if args.named("help").short('h').as_bool() {
+
+ // Informational switches.
+ args.named("help").short('h');
+ args.named("version");
+ args.named("verbose").short('v');
+ if args.get("help").as_bool() {
print_help();
+ std::process::exit(0);
}
- if args.named("version").as_bool() {
- print_version();
+ if args.get("version").as_bool() {
+ let version = env!("CARGO_PKG_VERSION");
+ eprintln!("toaster, version {version}");
+ eprintln!("written by ben bridle");
+ std::process::exit(0);
}
- if args.named("verbose").short('v').as_bool() {
+ if args.get("verbose").as_bool() {
log::set_log_level(log::LogLevel::Info);
}
- let source = args.positional("source").as_path();
- let destination = args.positional("destination").as_path();
- let delete_existing = args.named("delete").as_bool();
- let export_html = args.named("html").as_bool();
+ // Functional switches.
+ args.positional("source");
+ args.positional("destination");
+ args.named("delete");
+ args.named("html");
+ args.named("use-symlinks");
+ args.raise_errors();
+ let source = args.get("source").as_path();
+ let destination = args.get("destination").as_path();
+ let delete_existing = args.get("delete").as_bool();
+ let export_html = args.get("html").as_bool();
+ let use_symlinks = args.get("use-symlinks").as_bool();
+ #[cfg(not(target_family = "unix"))]
+ if use_symlinks {
+ fatal!("Symbolic links are only supported on Linux");
+ }
+
+ // Parse entire website directory.
let source = match source.canonicalize() {
Ok(source) => source,
Err(err) => fatal!("{source:?}: {err}"),
};
-
let website = Website::from_path(&source);
+ // ------------------------------------------------------------
+
// Check for duplicate output paths for pages.
- let mut destinations: HashSet<&str> = HashSet::new();
+ let mut urls: HashSet<&str> = HashSet::new();
let mut duplicates: HashSet<&str> = HashSet::new();
for page in &website.pages {
- if !destinations.insert(&page.full_url) {
- duplicates.insert(&page.full_url);
+ if !urls.insert(&page.url) {
+ duplicates.insert(&page.url);
+ };
+ }
+ for static_file in &website.static_files {
+ if !urls.insert(&static_file.url) {
+ duplicates.insert(&static_file.url);
+ };
+ }
+ for redirect in &website.redirects {
+ if !urls.insert(&redirect.url) {
+ duplicates.insert(&redirect.url);
};
}
if !duplicates.is_empty() {
for destination in duplicates {
- warn!("Multiple pages have the output path {destination:?}");
+ warn!("Multiple files, pages, or redirects have the output path {destination:?}");
for page in &website.pages {
- if page.full_url == destination {
+ if page.url == destination {
eprintln!(":: {:?}", page.source_path);
}
}
+ for static_file in &website.static_files {
+ if static_file.url == destination {
+ eprintln!(":: {:?}", static_file.source_path);
+ }
+ }
+ for redirect in &website.redirects {
+ if redirect.url == destination {
+ eprintln!(":: {:?}", redirect.source_path);
+ }
+ }
}
}
let mut destination = destination;
- destination.push(make_url_safe(&website.name));
+ destination.push(&website.name.slug());
if delete_existing && Entry::from_path(&destination).is_ok() {
info!("Deleting existing destination directory {destination:?}");
@@ -97,7 +120,7 @@ fn main() {
for page in &website.pages {
let mut destination = destination.clone();
- destination.push(&page.full_url);
+ destination.push(&page.url);
// Convert document to different formats.
if export_html {
let html = generate_html(page, &website);
@@ -113,20 +136,32 @@ fn main() {
for static_file in &website.static_files {
let mut destination = destination.clone();
- destination.push(&static_file.full_url);
- info!("Copying static file to {destination:?}");
- make_parent_directory(&destination).unwrap();
- copy(&static_file.source_path, &destination).unwrap_or_else(|_|
- error!("Failed to copy static file {:?} to {:?}",
- static_file.source_path, destination));
+ destination.push(&static_file.url);
+ if use_symlinks {
+ #[cfg(target_family = "unix")]
+ {
+ info!("Linking static file to {destination:?}");
+ make_parent_directory(&destination).unwrap();
+ let _ = remove(&destination);
+ std::os::unix::fs::symlink(&static_file.source_path, &destination).unwrap_or_else(|_|
+ error!("Failed to link static file {:?} to {:?}",
+ static_file.source_path, destination));
+ }
+ } else {
+ info!("Copying static file to {destination:?}");
+ copy(&static_file.source_path, &destination).unwrap_or_else(|_|
+ error!("Failed to copy static file {:?} to {:?}",
+ static_file.source_path, destination));
+ }
+
}
// NOTE: Static dir contents are copied as part of all static files.
for redirect in &website.redirects {
let mut destination = destination.clone();
- destination.push(&redirect.full_url);
- let path = &redirect.redirect;
+ destination.push(&redirect.url);
+ let path = &redirect.target;
if export_html {
if !path.contains("://") {
if let Some(path) = website.has_page(redirect, &path, "html") {
@@ -139,9 +174,32 @@ fn main() {
}
}
}
+
+ for feed in &website.feeds {
+ let mut destination = destination.clone();
+ destination.push(&feed.url);
+ write_file(&generate_rss(feed, &website), &destination, "rss", feed.last_modified);
+ }
}
+fn print_help() {
+ eprintln!("\
+Usage: toaster <source> <destination>
+Generate a website from a structured directory of markdown files.
+
+Arguments:
+ source Source directory with markdown files
+ destination Path to output directory
+
+Switches:
+ --delete Delete the destination directory first if it exists
+ --html Generate HTML output
+ --version, -v Print information as each file is parsed
+ --version Print the program version and exit
+ --help, -h Print help
+");
+}
pub fn write_file(text: &str, destination: &PathBuf, ext: &str, last_modified: Option<SystemTime>) {
let mut destination = destination.clone();
@@ -158,23 +216,3 @@ pub fn write_file(text: &str, destination: &PathBuf, ext: &str, last_modified: O
}
}
}
-
-pub fn make_url_safe(text: &str) -> String {
- text.to_ascii_lowercase().chars().filter_map(|c|
- if c.is_alphanumeric() || "-_~.+/#".contains(c) { Some(c) }
- else if c == ' ' { Some('-') }
- else { None } )
- .collect()
-}
-
-pub fn url_encode(text: &str) -> String {
- let mut output = String::new();
- for c in text.chars() {
- match c {
- '"' => output.push_str("%22"),
- '\'' => output.push_str("%27"),
- _ => output.push(c),
- }
- }
- return output;
-}