summaryrefslogtreecommitdiff
path: root/src/main.rs
diff options
context:
space:
mode:
authorBen Bridle <ben@derelict.engineering>2026-02-22 10:15:06 +1300
committerBen Bridle <ben@derelict.engineering>2026-02-22 10:15:33 +1300
commit700c0ddd79fc6ca01d52250b69b02c1a13d4ddef (patch)
tree9c3c31e8d9cde40dbcc689c0abd876e57a10f028 /src/main.rs
parent8c2ac6d92f6a4579591f748eebcbca2b9913d92d (diff)
downloadtoaster-700c0ddd79fc6ca01d52250b69b02c1a13d4ddef.zip
Big rewrite
A quick list of everything that's changed: - links to a duplicate heading beneath the same level 1 heading now work - rss feed generation using a .feed file - customisation of the html template using the html.template key - option to use symlinks instead of copying static files - fixed incorrect resolution of internal links - simplified different name forms with the Name type - allow linking to a redirect
Diffstat (limited to 'src/main.rs')
-rw-r--r--src/main.rs79
1 files changed, 64 insertions, 15 deletions
diff --git a/src/main.rs b/src/main.rs
index 32b0ab9..dad8377 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,10 +1,15 @@
#![feature(path_add_extension)]
mod collect_files;
+mod config;
mod generate_html;
+mod generate_rss;
mod string_utils;
+
pub use collect_files::*;
+pub use config::*;
pub use generate_html::*;
+pub use generate_rss::*;
pub use string_utils::*;
use markdown::*;
@@ -43,11 +48,17 @@ fn main() {
args.positional("destination");
args.named("delete");
args.named("html");
+ args.named("use-symlinks");
args.raise_errors();
let source = args.get("source").as_path();
let destination = args.get("destination").as_path();
let delete_existing = args.get("delete").as_bool();
let export_html = args.get("html").as_bool();
+ let use_symlinks = args.get("use-symlinks").as_bool();
+ #[cfg(not(target_family = "unix"))]
+ if use_symlinks {
+ fatal!("Symbolic links are only supported on Linux");
+ }
// Parse entire website directory.
let source = match source.canonicalize() {
@@ -59,26 +70,46 @@ fn main() {
// ------------------------------------------------------------
// Check for duplicate output paths for pages.
- let mut destinations: HashSet<&str> = HashSet::new();
+ let mut urls: HashSet<&str> = HashSet::new();
let mut duplicates: HashSet<&str> = HashSet::new();
for page in &website.pages {
- if !destinations.insert(&page.full_url) {
- duplicates.insert(&page.full_url);
+ if !urls.insert(&page.url) {
+ duplicates.insert(&page.url);
+ };
+ }
+ for static_file in &website.static_files {
+ if !urls.insert(&static_file.url) {
+ duplicates.insert(&static_file.url);
+ };
+ }
+ for redirect in &website.redirects {
+ if !urls.insert(&redirect.url) {
+ duplicates.insert(&redirect.url);
};
}
if !duplicates.is_empty() {
for destination in duplicates {
- warn!("Multiple pages have the output path {destination:?}");
+ warn!("Multiple files, pages, or redirects have the output path {destination:?}");
for page in &website.pages {
- if page.full_url == destination {
+ if page.url == destination {
eprintln!(":: {:?}", page.source_path);
}
}
+ for static_file in &website.static_files {
+ if static_file.url == destination {
+ eprintln!(":: {:?}", static_file.source_path);
+ }
+ }
+ for redirect in &website.redirects {
+ if redirect.url == destination {
+ eprintln!(":: {:?}", redirect.source_path);
+ }
+ }
}
}
let mut destination = destination;
- destination.push(make_url_safe(&website.name));
+ destination.push(&website.name.slug());
if delete_existing && Entry::from_path(&destination).is_ok() {
info!("Deleting existing destination directory {destination:?}");
@@ -89,7 +120,7 @@ fn main() {
for page in &website.pages {
let mut destination = destination.clone();
- destination.push(&page.full_url);
+ destination.push(&page.url);
// Convert document to different formats.
if export_html {
let html = generate_html(page, &website);
@@ -105,20 +136,32 @@ fn main() {
for static_file in &website.static_files {
let mut destination = destination.clone();
- destination.push(&static_file.full_url);
- info!("Copying static file to {destination:?}");
- make_parent_directory(&destination).unwrap();
- copy(&static_file.source_path, &destination).unwrap_or_else(|_|
- error!("Failed to copy static file {:?} to {:?}",
- static_file.source_path, destination));
+ destination.push(&static_file.url);
+ if use_symlinks {
+ #[cfg(target_family = "unix")]
+ {
+ info!("Linking static file to {destination:?}");
+ make_parent_directory(&destination).unwrap();
+ let _ = remove(&destination);
+ std::os::unix::fs::symlink(&static_file.source_path, &destination).unwrap_or_else(|_|
+ error!("Failed to link static file {:?} to {:?}",
+ static_file.source_path, destination));
+ }
+ } else {
+ info!("Copying static file to {destination:?}");
+ copy(&static_file.source_path, &destination).unwrap_or_else(|_|
+ error!("Failed to copy static file {:?} to {:?}",
+ static_file.source_path, destination));
+ }
+
}
// NOTE: Static dir contents are copied as part of all static files.
for redirect in &website.redirects {
let mut destination = destination.clone();
- destination.push(&redirect.full_url);
- let path = &redirect.redirect;
+ destination.push(&redirect.url);
+ let path = &redirect.target;
if export_html {
if !path.contains("://") {
if let Some(path) = website.has_page(redirect, &path, "html") {
@@ -131,6 +174,12 @@ fn main() {
}
}
}
+
+ for feed in &website.feeds {
+ let mut destination = destination.clone();
+ destination.push(&feed.url);
+ write_file(&generate_rss(feed, &website), &destination, "rss", feed.last_modified);
+ }
}
fn print_help() {