#![feature(path_add_extension)]

mod collect_files;
pub use collect_files::*;
mod generate_html;
pub use generate_html::*;

use markdown::*;
use vagabond::*;

use std::collections::HashSet;
use std::time::SystemTime;

use log::{info, warn, error, fatal};


fn main() {
    let args = Arguments::from_env_or_exit();
    if args.version {
        let version = env!("CARGO_PKG_VERSION");
        eprintln!("toaster, version {version}");
        std::process::exit(0);
    }
    if args.verbose {
        log::set_log_level(log::LogLevel::Info);
    }
    if args.source.is_none() || args.destination.is_none() {
        fatal!("Provide a source directory and a destination directory.")
    }
    let source_directory = match args.source.as_ref().unwrap().canonicalize() {
        Ok(source_directory) => source_directory,
        Err(err) => fatal!("{:?}: {err}", args.source.unwrap()),
    };
    let destination_directory = args.destination.unwrap();


    let website = Website::from_path(&source_directory);

    // Check for duplicate output paths for pages.
    let mut destinations: HashSet<&str> = HashSet::new();
    let mut duplicates: HashSet<&str> = HashSet::new();
    for page in &website.pages {
        if !destinations.insert(&page.full_url) {
            duplicates.insert(&page.full_url);
        };
    }
    if !duplicates.is_empty() {
        for destination in duplicates {
            warn!("Multiple pages have the output path {destination:?}");
            for page in &website.pages {
                if page.full_url == destination {
                    eprintln!(":: {:?}", page.source_path);
                }
            }
        }
    }

    let mut destination = destination_directory.clone();
    destination.push(make_url_safe(&website.name));

    if args.delete && Entry::from_path(&destination).is_ok() {
        info!("Deleting existing destination directory {destination:?}");
        remove(&destination).unwrap_or_else(|_|
            error!("Failed to delete existing destination directory {destination:?}"));
    }


    for page in &website.pages {
        let mut destination = destination.clone();
        destination.push(&page.full_url);
        // Convert document to different formats.
        if args.html {
            let html = generate_html(&page.document, page, &website);
            write_file(&html, &destination, "html", page.last_modified);
        }
        // Copy original markdown file.
        destination.add_extension("md");
        info!("Copying original markdown file to {destination:?}");
        copy(&page.source_path, &destination).unwrap_or_else(|_|
            error!("Failed to copy original markdown file {:?} to {:?}",
                page.source_path, destination));
    }

    for static_file in &website.static_files {
        let mut destination = destination.clone();
        destination.push(&static_file.full_url);
        info!("Copying static file to {destination:?}");
        make_parent_directory(&destination).unwrap();
        copy(&static_file.source_path, &destination).unwrap_or_else(|_|
            error!("Failed to copy static file {:?} to {:?}",
                static_file.source_path, destination));
    }

    // NOTE: Static dir contents are copied as part of all static files.

    for redirect in &website.redirects {
        let mut destination = destination.clone();
        destination.push(&redirect.full_url);
        let path = &redirect.redirect;
        if args.html {
            if !path.contains("://") {
                if let Some(path) = website.has_page(redirect, &path, "html") {
                    write_file(&generate_html_redirect(&path), &destination, "html", redirect.last_modified);
                } else {
                    warn!("Redirect {:?} links to nonexistent page {path:?}", redirect.name);
                }
            } else {
                write_file(&generate_html_redirect(&path), &destination, "html", redirect.last_modified);
            }
        }
    }
}



pub fn write_file(text: &str, destination: &PathBuf, ext: &str, last_modified: Option<SystemTime>) {
    let mut destination = destination.clone();
    destination.add_extension(ext);
    info!("Generating {destination:?}");
    make_parent_directory(&destination).unwrap_or_else(|_|
        error!("Failed to create parent directories for {destination:?}"));
    write_to_file(&destination, text).unwrap_or_else(|_|
        error!("Failed to write generated {ext} file to {destination:?}"));
    // Set the last-modified time of the new file to the time provided.
    if let Some(time) = last_modified {
        if let Ok(dest) = std::fs::File::open(&destination) {
            let _ = dest.set_modified(time);
        }
    }
}

pub fn make_url_safe(text: &str) -> String {
    text.to_ascii_lowercase().chars().filter_map(|c|
        if c.is_alphanumeric() || "-_~.+/#".contains(c) { Some(c) }
        else if c == ' ' { Some('-') }
        else { None } )
    .collect()
}



xflags::xflags! {
    /// Generate a website from a structured directory of markdown files.
    cmd arguments {
        /// Source directory with markdown files
        optional source: PathBuf
        /// Path to output directory
        optional destination: PathBuf
        /// Delete the destination directory first if it exists
        optional --delete
        /// Generate HTML output
        optional --html
        /// Generate Gemtext output
        optional --gmi
        /// Print information as each file is parsed
        optional -v, --verbose
        /// Print the program version and exit
        optional --version
    }
}