summaryrefslogtreecommitdiff
path: root/src/main.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.rs')
-rw-r--r--src/main.rs114
1 files changed, 94 insertions, 20 deletions
diff --git a/src/main.rs b/src/main.rs
index d8c9274..a1b38f6 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -11,6 +11,7 @@ const NORMAL: &str = "\x1b[0m";
const BOLD: &str = "\x1b[1m";
const WHITE: &str = "\x1b[37m";
const RED: &str = "\x1b[31m";
+const YELLOW: &str = "\x1b[33m";
const BLUE: &str = "\x1b[34m";
static mut VERBOSE: bool = false;
@@ -20,6 +21,12 @@ static mut VERBOSE: bool = false;
eprintln!("{NORMAL}");
} };
}
+#[macro_export] macro_rules! warn {
+ ($($tokens:tt)*) => {{
+ eprint!("{BOLD}{YELLOW}[WARNING]{NORMAL}{WHITE}: "); eprint!($($tokens)*);
+ eprintln!("{NORMAL}");
+ }};
+}
#[macro_export] macro_rules! error {
($($tokens:tt)*) => {{
eprint!("{BOLD}{RED}[ERROR]{WHITE}: "); eprint!($($tokens)*);
@@ -31,26 +38,36 @@ fn main() {
let args = Arguments::from_env_or_exit();
if args.version {
let version = env!("CARGO_PKG_VERSION");
- eprintln!("Markdown website generator, version {version}");
+ eprintln!("toaster, version {version}");
std::process::exit(0);
}
if args.verbose {
unsafe { VERBOSE = true; }
}
+ if args.source.is_none() || args.destination.is_none() {
+ error!("Provide a source directory and a destination directory.")
+ }
+ let source_directory = args.source.unwrap().canonicalize().unwrap();
+ let destination_directory = args.destination.unwrap();
let mut website = Website {
source_files: Vec::new(),
static_files: Vec::new(),
- name: match Entry::from_path(&args.source) {
+ name: match Entry::from_path(&source_directory) {
Ok(entry) => entry.name,
- Err(err) => error!("Couldn't open {:?}: {:?}", args.source, err),
+ Err(err) => error!("Couldn't open {:?}: {:?}", &source_directory, err),
},
error: false,
};
+
// Collect all website files.
- match traverse_directory(&args.source) {
+ match traverse_directory(&source_directory) {
Ok(entries) => for entry in entries {
+ // Ignore dot files.
+ if entry.name.starts_with(".") {
+ continue;
+ }
// Generate name, stripping any leading digit sequence.
let (mut name, extension) = entry.split_name();
if let Some((prefix, suffix)) = name.split_once(' ') {
@@ -60,9 +77,9 @@ fn main() {
}
// Generate full URL with stripped name, no extension.
let source_path = entry.original_path;
- let relative_path = source_path.strip_prefix(&args.source).unwrap_or_else(
+ let relative_path = source_path.strip_prefix(&source_directory).unwrap_or_else(
// Probably unreachable.
- |_| error!("Path doesn't start with {:?}: {:?}", args.source, source_path));
+ |_| error!("Path doesn't start with {:?}: {:?}", source_directory, source_path));
let mut full_url = String::new();
let mut components: Vec<_> = relative_path.components().collect();
components.pop(); // Remove file segment, use the stripped name instead.
@@ -70,10 +87,12 @@ fn main() {
full_url.push_str(&make_url_safe(&c.as_os_str().to_string_lossy()));
full_url.push('/')
};
+ let parent_url = full_url.clone();
full_url.push_str(&make_url_safe(&name));
if extension == "md" {
+ // Rename and relocate index files.
let mut file_url = make_url_safe(&name);
if file_url == "+index" {
let components: Vec<_> = relative_path.components().collect();
@@ -94,7 +113,15 @@ fn main() {
full_url.push_str(&file_url);
}
}
- website.source_files.push(SourceFile { name, file_url, full_url, source_path });
+ // Load and parse the markdown.
+ let markdown = std::fs::read_to_string(&source_path).unwrap();
+ let document = MarkdownDocument::from_str(&markdown);
+ let headings = document.blocks.iter().filter_map(|block| match block {
+ Block::Heading { line, .. } => Some(make_url_safe(&line.to_string())),
+ _ => None,
+ }).collect();
+ website.source_files.push(
+ SourceFile { name, parent_url, file_url, full_url, source_path, document, headings });
} else {
full_url.push('.'); full_url.push_str(&extension);
website.static_files.push(StaticFile { full_url, source_path });
@@ -103,21 +130,21 @@ fn main() {
Err(err) => error!("Could not read from source directory: {:?}", err),
}
- let mut destination = args.destination.clone();
+ let mut destination = destination_directory.clone();
destination.push(make_url_safe(&website.name));
for source_file in &website.source_files {
- let markdown = std::fs::read_to_string(&source_file.source_path).unwrap();
- let document = MarkdownDocument::from_str(&markdown);
let mut destination = destination.clone();
destination.push(&source_file.full_url);
// Convert document to different formats.
if args.html {
- let html = generate_html(&document, source_file, &website);
+ let html = generate_html(&source_file.document, source_file, &website);
write_file(&html, &destination, "html");
}
// Copy original markdown file.
- write_file(&markdown, &destination, "md");
+ destination.add_extension("md");
+ verbose!("Copying original markdown file to {destination:?}");
+ copy(&source_file.source_path, &destination).unwrap();
}
for static_file in &website.static_files {
@@ -141,7 +168,7 @@ pub fn write_file(text: &str, destination: &PathBuf, ext: &str) {
pub fn make_url_safe(text: &str) -> String {
text.to_ascii_lowercase().chars().filter_map(|c|
- if c.is_alphanumeric() || "-_~.+/".contains(c) { Some(c) }
+ if c.is_alphanumeric() || "-_~.+/#".contains(c) { Some(c) }
else if c == ' ' { Some('-') }
else { None } )
.collect()
@@ -156,21 +183,68 @@ pub struct Website {
}
impl Website {
- pub fn has_page(&self, path: &str) -> bool {
+ // Ext is extension without a dot.
+ // Checks if a relative link to an internal page name can be reached from
+ // the current page, and returns a resolved absolute link to the page with extension.
+ pub fn has_page(&self, from: &SourceFile, path: &str, ext: &str) -> Option<String> {
+ // Remove heading fragment and file extension.
+ let (path, heading) = match path.rsplit_once('#') {
+ Some((path, heading)) => match heading.is_empty() {
+ true => (path, None),
+ false => (path, Some(heading)),
+ }
+ None => (path, None),
+ };
+ let path = path.strip_suffix(&format!(".{ext}")).unwrap_or(path);
+
+ // Attach parent of current page to given path.
+ let directory = match from.parent_url.rsplit_once('/') {
+ Some((parent, _)) => parent,
+ None => &from.parent_url,
+ };
+ let full_path = match path.starts_with("/") {
+ true => path.to_string(),
+ false => format!("{directory}/{path}"),
+ };
+
+ // Remove relative portions of path.
+ let segments: Vec<&str> = full_path.split("/")
+ .filter(|seg| !seg.is_empty() && *seg != ".")
+ .collect();
+ let mut reduced_segments: Vec<&str> = segments.windows(2)
+ .filter(|w| w[1] != "..")
+ .map(|w| w[1])
+ .collect();
+ // The first segment is always skipped by the previous step.
+ if !segments.is_empty() && segments.get(1) != Some(&"..") {
+ if segments[0] != ".." {
+ reduced_segments.insert(0, segments[0]);
+ }
+ }
+ let path = reduced_segments.join("/");
+
for source_file in &self.source_files {
if source_file.full_url == path {
- return true;
+ if let Some(heading) = heading {
+ if !source_file.headings.contains(&make_url_safe(heading)) {
+ warn!("Page {:?} contains link to nonexistent heading {heading:?} on page {path:?}", from.name);
+ }
+ }
+ return Some(format!("{path}.{ext}"));
}
}
- return false;
+ return None;
}
}
pub struct SourceFile {
pub name: String,
- pub file_url: String, // URL file segment, no extension
- pub full_url: String, // URL full path, no extension
+ pub parent_url: String, // URL base of child pages
+ pub file_url: String, // URL file segment, no extension
+ pub full_url: String, // URL full path, no extension
pub source_path: PathBuf,
+ pub document: MarkdownDocument,
+ pub headings: Vec<String>,
}
pub struct StaticFile {
@@ -182,9 +256,9 @@ xflags::xflags! {
/// Generate a website from a structured directory of markdown files.
cmd arguments {
/// Source directory with markdown files
- required source: PathBuf
+ optional source: PathBuf
/// Path to output directory
- required destination: PathBuf
+ optional destination: PathBuf
/// Generate HTML output
optional --html
/// Generate Gemtext output