about summary refs log tree commit diff
path: root/web/vokobe
diff options
context:
space:
mode:
Diffstat (limited to 'web/vokobe')
-rw-r--r--web/vokobe/.gitignore7
-rw-r--r--web/vokobe/Cargo.lock270
-rw-r--r--web/vokobe/Cargo.toml10
-rw-r--r--web/vokobe/LICENSE21
-rw-r--r--web/vokobe/README.md101
-rw-r--r--web/vokobe/default.nix16
-rw-r--r--web/vokobe/flaaaaake.nix44
-rw-r--r--web/vokobe/src/main.rs922
8 files changed, 1391 insertions, 0 deletions
diff --git a/web/vokobe/.gitignore b/web/vokobe/.gitignore
new file mode 100644
index 0000000..b774d54
--- /dev/null
+++ b/web/vokobe/.gitignore
@@ -0,0 +1,7 @@
+# the cargo build artefacts
+debug/
+target/
+
+# the nix result symlink
+result
+
diff --git a/web/vokobe/Cargo.lock b/web/vokobe/Cargo.lock
new file mode 100644
index 0000000..cdf64e1
--- /dev/null
+++ b/web/vokobe/Cargo.lock
@@ -0,0 +1,270 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "clap"
+version = "2.34.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "bitflags",
+ "strsim",
+ "textwrap",
+ "unicode-width",
+ "vec_map",
+]
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.119"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
+
+[[package]]
+name = "memchr"
+version = "2.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex"
+version = "1.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+
+[[package]]
+name = "structopt"
+version = "0.3.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10"
+dependencies = [
+ "clap",
+ "lazy_static",
+ "structopt-derive",
+]
+
+[[package]]
+name = "structopt-derive"
+version = "0.4.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "vec_map"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "vokobe"
+version = "0.1.3"
+dependencies = [
+ "regex",
+ "structopt",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/web/vokobe/Cargo.toml b/web/vokobe/Cargo.toml
new file mode 100644
index 0000000..9c01d4e
--- /dev/null
+++ b/web/vokobe/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "vokobe"
+version = "0.1.3"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+regex = "1.10.3"
+structopt = "0.3"
diff --git a/web/vokobe/LICENSE b/web/vokobe/LICENSE
new file mode 100644
index 0000000..cb5d6ff
--- /dev/null
+++ b/web/vokobe/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Emile Hansmaennel
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/web/vokobe/README.md b/web/vokobe/README.md
new file mode 100644
index 0000000..c2c54a9
--- /dev/null
+++ b/web/vokobe/README.md
@@ -0,0 +1,101 @@
+# Vokobe
+
+A minimal static site generator tailored to my needs.
+
+CI: [https://hydra.emile.space/project/vokobe](https://hydra.emile.space/project/vokobe)
+
+## Build
+
+```bash
+; cargo build --release
+```
+    
+## Usage/Examples
+
+```bash
+; ./target/release/vokobe --help
+vokobe 0.1.0
+A static site generator
+
+USAGE:
+    vokobe [FLAGS] <in-path> <out-path> <site-name>
+
+FLAGS:
+    -a, --analytics    Activate sending analytics to stats.emile.space
+    -h, --help         Prints help information
+    -V, --version      Prints version information
+
+ARGS:
+    <in-path>      Input path
+    <out-path>     Output path
+    <site-name>    Site name (e.g. emile.space)
+```
+
+
+## Deployment
+
+The following subsections contain some example for small shell scripts that might be useful for Deployment.
+
+### build.sh
+
+Remove the output dir, build it from scratch and update the perms.
+
+I'm actually considering rebuilding vokobe with incremental builds in mind, as it can take a bit to create some really large projects.
+
+```bash
+rm -rf out/
+vokobe -a ./in ./out emile.space
+chmod -R +r out/
+```
+
+### sync.sh
+
+Syncronize the generated output to the remote host for hosting it.
+
+```bash
+rsync -avz --delete <out-path>/* <user>@<host>:<path>
+```
+
+### publish.sh
+
+Build and Syncronize.
+
+```bash
+./build.sh
+./sync.sh
+```
+
+### host.sh
+
+Host the local version
+
+```bash
+python3 -m http.server 8081 -d <outpath>/ -b 0.0.0.0
+```
+
+### watchbuild.sh
+
+rebuild on changes
+
+```bash
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p fd entr
+
+while sleep 0.5; do
+  fd . in | entr -d ./build.sh
+done
+```
+
+### local.sh
+
+run a script updating it on changes and one hosting the output.
+
+```bash
+sh ./watchbuild.sh &
+sh ./host.sh
+```
+
+
+## Contributing
+
+Send patches!
diff --git a/web/vokobe/default.nix b/web/vokobe/default.nix
new file mode 100644
index 0000000..7257962
--- /dev/null
+++ b/web/vokobe/default.nix
@@ -0,0 +1,16 @@
+{ pkgs, naersk, ... }:
+
+let
+	naersk' = pkgs.callPackage naersk {};
+in naersk'.buildPackage {
+	src = ./.;
+
+	meta = with pkgs.lib; {
+		description = "A minimal static site generator tailored to my needs.";
+		homepage    = "https://git.emile.space/hanemile/vokobe";
+		license     = licenses.mit;
+		platforms   = platforms.all;
+		maintainers = with maintainers; [ hanemile ];
+	};
+}
+
diff --git a/web/vokobe/flaaaaake.nix b/web/vokobe/flaaaaake.nix
new file mode 100644
index 0000000..7cf2f03
--- /dev/null
+++ b/web/vokobe/flaaaaake.nix
@@ -0,0 +1,44 @@
+{
+  inputs = {
+    flake-utils.url = "github:numtide/flake-utils";
+    naersk.url = "github:nix-community/naersk";
+    nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
+  };
+
+  outputs = { self, flake-utils, naersk, nixpkgs }:
+    let
+      pkgs = (import nixpkgs) {
+        system = "x86_64-linux";
+      };
+
+      naersk' = pkgs.callPackage naersk {};
+      
+    in rec {
+      packages."x86_64-linux".vokobe = naersk'.buildPackage {
+        src = ./.;
+
+        meta = with pkgs.lib; {
+          description = "A minimal static site generator tailored to my needs.";
+          homepage    = "https://git.emile.space/hanemile/vokobe";
+          license     = licenses.mit;
+          platforms   = platforms.all;
+          maintainers = with maintainers; [
+            hanemile
+          ];
+        };
+      };
+    
+      # For `nix build` & `nix run`:
+      defaultPackage = packages."x86_64-linux".vokobe;
+
+      # For `nix develop` (optional, can be skipped):
+      devShell = pkgs.mkShell {
+        nativeBuildInputs = with pkgs; [ rustc cargo ];
+      };
+
+      # hydraJobs."<attr>"."<system>" = derivation;
+      hydraJobs = {
+        build."x86_64-linux" = packages."x86_64-linux".vokobe;
+      };
+    };
+}
\ No newline at end of file
diff --git a/web/vokobe/src/main.rs b/web/vokobe/src/main.rs
new file mode 100644
index 0000000..ab26457
--- /dev/null
+++ b/web/vokobe/src/main.rs
@@ -0,0 +1,922 @@
+/*
+pull the std into scope and inline it so that we get documentation for it,
+even when running offline
+*/
+#[doc(inline)]
+pub use std;
+
+use std::path::{Path, PathBuf};
+use std::io::{self, Read, Write, BufRead, BufReader};
+use std::fs::{self, File};
+use std::time;
+use std::collections::HashMap;
+
+use structopt::StructOpt;
+use regex::Regex;
+
+#[derive(Debug, StructOpt)]
+#[structopt(name = "vokobe", about = "A static site generator")]
+struct Opt {
+    /// Input path 
+    #[structopt(parse(from_os_str))]
+    input_path: PathBuf,
+
+    /// Output path
+    #[structopt(parse(from_os_str))]
+    output_path: PathBuf,
+
+    /// Site name (e.g. emile.space)
+    site_name: String,
+
+    /// Activate sending analytics to stats.emile.space
+    // -a and --analytics will be generated
+    // analytics are sent to stats.emile.space
+    #[structopt(short, long)]
+    analytics: bool,
+}
+
+fn main() -> std::io::Result<()> {
+
+    let mut internal_links: HashMap<String, Vec<String>> = HashMap::new();
+
+    let opt = Opt::from_args();
+
+    let in_path = opt.input_path;
+    let output_path = opt.output_path;
+
+    // read the style
+    let style_path = Path::new(&in_path).join("style.css");
+    let mut style_file = File::open(style_path)
+        .expect("could not open style file");
+    let mut style = String::new();
+    style_file.read_to_string(&mut style)
+        .expect("could not read style file to string");
+
+    // read all dirs in the input path
+    let pathes = recursive_read_dir(&in_path, false)?;
+
+    // pass 1: store the backlinks
+
+    for path in &pathes {
+        if path.ends_with("README.md") {
+            // open the file and read it as a string
+            let mut readme_file = File::open(path)?;
+            let mut readme = String::new();
+            readme_file.read_to_string(&mut readme)?;
+
+            let internal_links_in_file
+                = parse_internal_links(readme.as_str());
+
+            for link in internal_links_in_file {
+
+                internal_links.entry(link).or_insert_with(Vec::new).push(path.to_string_lossy().into_owned())
+            }
+        }
+    }
+
+
+    // for each markdown_file in markdown_files {
+    //     let internal_links_in_file = parse_internal_links(markdown_file);
+    //     internal_links.insert(markdown_file, internal_links_in_file);
+    // }
+
+    // pass 2: create the html
+
+    println!("Got {} files", pathes.len());
+    let mut readme_counter = 0;
+
+    for path in pathes {
+        let stripped_path = path.strip_prefix(&in_path)
+            .expect(format!(
+                "could not strip the in_path prefix: {:?}", in_path).as_str());
+
+        // copy images and other files to the output folder
+        if path.is_file() {
+
+            // define the source and destination
+            let src = Path::new(&in_path).join(stripped_path);
+            let dst = Path::new(&output_path).join(stripped_path);
+
+            // define the destination folder (the dst path without the file) and create it
+            let mut dst_folder = dst.clone();
+            dst_folder.pop(); // remove the file itself from the path
+            fs::create_dir_all(dst_folder)?;
+
+            // copy the file to the destination
+            std::fs::copy(src, dst.as_path())?;
+        }
+
+        if stripped_path.ends_with("README.md") {
+            readme_counter += 1;
+
+            // define the "raw" path (no infile prefix, no file)
+            let mut ancestors = stripped_path.ancestors();
+            ancestors.next();
+
+            let raw_path = ancestors.next()
+                .expect("could not extract next ancestor");
+
+            // out + rawpath
+            let index_path = output_path.join(raw_path);
+
+            // (out + rawpath) + "index.html"
+            let index_file = index_path.join("index.html");
+
+            // - create the dir for the index.html as well as the index.html
+            // itself
+            fs::create_dir_all(index_path)?;
+            let mut file = File::create(&index_file)?;
+
+            // this is the main block calling all other smaller functions. The
+            // whole output is compsed here
+            write_header(&mut file, &opt.site_name, &style)?;
+            write_body_start(&mut file, &opt.site_name)?;
+            write_nav(&mut file, in_path.as_path(), raw_path, opt.analytics)?;
+            write_same_level(&mut file, in_path.as_path(), raw_path)?;
+            write_readme_content(&mut file, in_path.as_path(), raw_path)?;
+            write_footer(&mut file, raw_path, &internal_links)?;
+
+            file.write_all("".as_bytes())?;
+        }
+    }
+
+    println!("Got {readme_counter} README.md files");
+
+    Ok(())
+}
+
+fn parse_internal_links(markdown_file: &str) -> Vec<String> {
+    // Define a regular expression to match markdown-style links
+    let link_regex = Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap();
+
+    // Initialize a vector to store internal links found in the markdown file
+    let mut internal_links = Vec::new();
+
+    // Iterate over each match of the regular expression in the markdown content
+    for capture in link_regex.captures_iter(&markdown_file) {
+        // Extract the link text and URL from the capture groups
+        // let link_text = &capture[1];
+        let mut link_url = &capture[2];
+
+        // Check if the link is an internal link (e.g., relative URL)
+        // You can customize this condition based on your site's URL structure
+        if link_url.starts_with('/') || link_url.starts_with("../") {
+            if link_url.ends_with('/') {
+                link_url = link_url.trim_end_matches('/');
+            }
+            internal_links.push(link_url.to_string());
+        }
+    }
+
+    internal_links
+}
+
+/// Write the html header including the style file
+/// TODO: Don't add the style file into each compiled html output, as the
+/// style can be included allowing the user to cache the style file in their
+/// browser.
+fn write_header(file: &mut File, site_name: &String, style: &String) -> std::io::Result<()>{
+
+    // write the header including the style file
+    file.write_all(format!(r#"<!DOCTYPE html>
+<html lang="en">
+<head>
+  <meta charset="UTF-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  <title>{}</title>
+
+  <style>
+  {}
+  </style>
+</head>
+    "#, site_name, style).as_bytes())?;
+
+    Ok(())
+}
+
+/// write the start of the html body tag and the header linking back to the
+/// site itself.
+fn write_body_start(file: &mut File, site_name: &String) -> std::io::Result<()>{
+    file.write_all(format!(r#"
+<body>
+  <header>
+    <a href="/">{}</a>
+  </header>"#, site_name).as_bytes())?;
+
+    Ok(())
+}
+
+/// Write the navigation section to the given file
+fn write_nav(file: &mut File, in_path: &Path, raw_path: &Path, analytics: bool)
+    -> std::io::Result<()> {
+
+    if analytics == true {
+        /*
+        file.write_all(format!(r#"
+  <img src="https://stats.emile.space/count?p=/{}">
+  <nav>
+    <ul>"#, raw_path.to_str().unwrap()).as_bytes())?;
+        */
+        file.write_all(format!(r#"
+  <nav>
+    <ul>"#,).as_bytes())?;
+    } else {
+        file.write_all(format!(r#"
+  <nav>
+    <ul>"#).as_bytes())?;
+    }
+
+    // get the nav bar components
+    let components = raw_path.components().collect::<Vec<_>>();
+    
+    // for each list of components (["a"], ["a", "b"], ["a", "b", "c"]), create
+    // the path for the list, view all other dirs at that path and write the
+    // result to the file
+    let mut i = 0;
+    let slice = components.as_slice();
+
+    // for each navbar component
+    for component in slice {
+
+        // get the items belonging to that navbar item
+        // (["a"], ["a", "b"], ["a", "b", "c"])
+        let subpath_components = &slice[..i+1];
+        i += 1;
+
+        let mut subpath_path = PathBuf::new();
+
+        // push the inpath, so we've got a basis from where we can read the
+        // subpath items
+        // subpath_path = inpath + ???
+        subpath_path.push(in_path);
+
+        let mut nav_breadcrumb_link = PathBuf::new();
+
+        // for each item in the subpath, push it into the subpath_path so that
+        // in the end, we've got something like this:
+        // "inpath" + "a" + "b" + "c"
+        for subpath_component in subpath_components {
+            subpath_path.push(subpath_component);
+            nav_breadcrumb_link.push(subpath_component);
+        }
+
+        // make the nav_breadcrumb_link an absolute by prefixing it with a /
+        // (this is in scope of the web-page, so this is find) and make it a
+        // string
+        let nav_breadcrumb_link_absolute 
+            = Path::new("/")
+                .join(nav_breadcrumb_link);
+
+        let nav_breadcrumb_link
+            = nav_breadcrumb_link_absolute.to_str().unwrap();
+
+        // define the name of the breadcrumb
+        let nav_breadcrumb_name = component.as_os_str().to_str().unwrap();
+
+        ////////////////////////////////////////////////////////////////////////
+        file.write_all(format!(r#"
+        <li>
+            <a href="{}">{}</a>
+            <ul>"#, nav_breadcrumb_link, nav_breadcrumb_name).as_bytes())?;
+        ////////////////////////////////////////////////////////////////////////
+
+        // as we don't want to get the items for the individial entry, but on
+        // the same level, we push a ".."
+        // the subpath_path is now: inpath + subpath + ../
+        subpath_path.push("..");
+
+        // read all dirs in the subpath_path, add them to the dirs vector, so
+        // that we get a vector containing all the dirs we want
+        let mut dirs = Vec::new();
+        for entry in fs::read_dir(subpath_path)? {
+            let path = &entry?.path();
+            if path.is_dir() {
+                dirs.push(path.to_path_buf());
+            }
+        }
+
+        dirs.sort();
+
+        // DROPDOWN
+        // extract the link and name for each directory found
+        for dir in dirs {
+            let d = dir.canonicalize()?;
+            let abs_inpath = in_path.canonicalize()?;
+
+            let name = d.file_name().unwrap().to_str().unwrap();
+            let rel_link 
+                = d.strip_prefix(abs_inpath)
+                    .expect(format!(
+                        "could not strip the in_path prefix: {:?}",
+                        d).as_str());
+
+            let link = Path::new("/").join(rel_link);
+            let link = link.as_path().to_str().unwrap();
+
+            // don't add the current page to the dropdown, we're on it already!
+            if name == nav_breadcrumb_name {
+                continue
+            }
+
+            // don't add items starting with a dot to the dropdown, they're
+            // hidden!
+            if name.starts_with(".") {
+                continue
+            }
+
+            ////////////////////////////////////////////////////////////////////
+            file.write_all(format!(r#"
+                <li><a href="{}">{}/</a></li>"#, link, name).as_bytes())?;
+            ////////////////////////////////////////////////////////////////////
+        }
+
+        ////////////////////////////////////////////////////////////////////////
+        file.write_all(r#"
+            </ul>
+        </li>"#.as_bytes())?;
+        ////////////////////////////////////////////////////////////////////////
+    }
+
+    ////////////////////////////////////////////////////////////////////////////
+    file.write_all(format!(r#"
+    </ul>
+    <ul style="float: right">
+        <li>{:?}</li>
+        <li>
+            <a href="README.md">.md</a>
+        </li>
+    </ul>
+  </nav>"#, in_path.metadata()?.modified()?.duration_since(std::time::UNIX_EPOCH).unwrap().as_secs()).as_bytes())?;
+    ////////////////////////////////////////////////////////////////////////////
+
+    Ok(())
+}
+
+
+fn write_same_level(file: &mut File, in_path: &Path, raw_path: &Path)
+    -> std::io::Result<()> {
+
+    let search_path = Path::new(in_path).join(raw_path);
+
+    let mut dirs: Vec<PathBuf> = Vec::new();
+    let mut files: Vec<PathBuf> = Vec::new();
+
+    let mut vertical: bool = false;
+    let mut show_files: bool = false;
+
+    for entry in fs::read_dir(search_path)? {
+        let path = &entry?.path();
+
+        if path.is_dir() {
+            dirs.push(path.to_path_buf());
+        }
+        if path.is_file() {
+            files.push(path.to_path_buf());
+            if path.file_name().unwrap() == "vertical" {
+                vertical = true;
+            }
+            if path.file_name().unwrap() == "show_files" {
+                show_files = true;
+            }
+        }
+    }
+
+    dirs.sort();
+    files.sort();
+
+    let in_path = in_path.canonicalize()?;
+
+    if vertical == true {
+        file.write_all(format!(r#"
+  <ul class="vert">"#).as_bytes())?;
+    } else {
+        file.write_all(format!(r#"
+  <ul>"#).as_bytes())?;
+    }
+
+    for dir in dirs {
+        let dir = dir.canonicalize()?;
+        let dir = dir.strip_prefix(&in_path)
+            .expect("could not strip in_path prefix");
+
+        let link = Path::new("/").join(dir);
+        let link_str = link.as_path().to_str().unwrap();
+        let name = link.file_name().unwrap().to_str().unwrap();
+
+        if name.starts_with(".") {
+            continue
+        }
+
+        file.write_all(format!(r#"
+    <li><a href="{}">{}/</a></li>"#, link_str, name).as_bytes())?;
+    }
+
+    file.write_all(format!(r#"
+  </ul>"#).as_bytes())?;
+
+    if files.len() >= 1 && show_files == true {
+        file.write_all(format!(r#"<br>
+    <ul>"#).as_bytes())?;
+
+        for f in files {
+            let f = f.canonicalize()?;
+            let f = f.strip_prefix(&in_path)
+                .expect("could not strip in_path prefix");
+
+            let link = Path::new("/").join(f);
+            let link_str = link.as_path().to_str().unwrap();
+            let name = link.file_name().unwrap().to_str().unwrap();
+
+            if name == "README.md"
+                || name == "show_files"
+                || name.starts_with(".")
+                {
+                continue
+            };
+
+            file.write_all(format!(r#"
+        <li><a href="{}">{}</a></li>"#, link_str, name).as_bytes())?;
+        }
+
+        file.write_all(format!(r#"
+    </ul>"#).as_bytes())?;
+    }
+
+
+    Ok(())
+}
+
+fn write_readme_content(file: &mut File, in_path: &Path, raw_path: &Path) 
+    -> std::io::Result<()> {
+
+    // define the path of the README.md file
+    let readme_file_path 
+        = Path::new(in_path).join(raw_path).join("README.md");
+
+    // open the file and read it as a string
+    let mut readme_file = File::open(readme_file_path)?;
+    let mut readme = String::new();
+    readme_file.read_to_string(&mut readme)?;
+
+    // replace all "markdown" style links with HTML links
+    // let re = Regex::new(r"\[([^\[]+)\]\(([^\(]+)\)").unwrap();
+    let re = Regex::new(r"\[([^]]+)\]\(([^)]+)\)").unwrap();
+    let readme = re.replace_all(&readme, "<a href=\"$2\">$1</a>");
+
+    file.write_all(format!("<pre>").as_bytes())?;
+
+    // counting the occurrence of `---`
+    let mut hrule_count = 0;
+    let mut in_yaml_metadata_block= false;
+
+    let mut level_1_heading_num = 0;
+    let mut level_2_heading_num = 0;
+    let mut level_3_heading_num = 0;
+    let mut level_4_heading_num = 0;
+    let mut level_5_heading_num = 0;
+
+    // cheap markdown 2 html converter
+    for line in readme.split('\n') {
+
+        // 1 == 2, as I'm not sure how to comment out the file write 5 lines or so below
+        if in_yaml_metadata_block && 1 == 2 {
+            // if we find the end of the yaml metadata block, break this
+            if line.starts_with("---") {
+                in_yaml_metadata_block = false;
+                continue
+            } else {
+                file.write_all(format!(r##"yaml_line: {}
+"##, line).as_bytes())?;
+                continue
+            }
+        }
+
+        // if we've got a horizontal rule, it can be two things: the start and
+        // end of a yaml-metadata block or an actual horizontal rule.
+        //
+        // If it's yaml metadata, read it all, but don't print it, store it
+        // for later
+        // If it's a horizontal rule, print the horizontal rule
+        if line.starts_with("---") {
+
+            // store the yaml metadata
+            if hrule_count == 0 {
+                in_yaml_metadata_block = true;
+                continue
+            }                 
+            hrule_count += 1;
+
+            // print the horizontal rule
+            file.write_all(format!(r##"
+            <hr>"##).as_bytes())?;
+
+        } else if line.starts_with("#####") {
+            let heading = line.get(6..).unwrap();
+            let heading_sanitized = sanitize(heading.to_string());
+
+            level_5_heading_num += 1;
+
+            file.write_all(format!(r##"</pre>
+            <span id="{a}"></span>
+            <h5><a href="#{a}">{h1}.{h2}.{h3}.{h4}.{h5}. {b}</a></h3>
+            <pre>"##,
+                a = heading_sanitized,
+                b = heading,
+                h1 = level_1_heading_num,
+                h2 = level_2_heading_num,
+                h3 = level_3_heading_num,
+                h4 = level_4_heading_num,
+                h5 = level_5_heading_num,
+            ).as_bytes())?;
+
+        } else if line.starts_with("####") {
+            let heading = line.get(5..).unwrap();
+            let heading_sanitized = sanitize(heading.to_string());
+
+            level_4_heading_num += 1;
+            level_5_heading_num = 0;
+
+            file.write_all(format!(r##"</pre>
+            <span id="{a}"></span>
+            <h4><a href="#{a}">{h1}.{h2}.{h3}.{h4}. {b}</a></h3>
+            <pre>"##,
+                a = heading_sanitized,
+                b = heading,
+                h1 = level_1_heading_num,
+                h2 = level_2_heading_num,
+                h3 = level_3_heading_num,
+                h4 = level_4_heading_num,
+            ).as_bytes())?;
+
+        } else if line.starts_with("###") {
+            let heading = line.get(4..).unwrap();
+            let heading_sanitized = sanitize(heading.to_string());
+
+            level_3_heading_num += 1;
+            level_4_heading_num = 0;
+            level_5_heading_num = 0;
+
+            file.write_all(format!(r##"</pre>
+            <span id="{a}"></span>
+            <h3><a href="#{a}">{h1}.{h2}.{h3}. {b}</a></h3>
+            <pre>"##,
+                a = heading_sanitized,
+                b = heading,
+                h1 = level_1_heading_num,
+                h2 = level_2_heading_num,
+                h3 = level_3_heading_num,
+            ).as_bytes())?;
+
+        } else if line.starts_with("##") {
+            let heading = line.get(3..).unwrap();
+            let heading_sanitized = sanitize(heading.to_string());
+
+            level_2_heading_num += 1;
+            level_3_heading_num = 0;
+            level_4_heading_num = 0;
+            level_5_heading_num = 0;
+
+            file.write_all(format!(r##"</pre>
+            <span id="{a}"></span>
+            <h2><a href="#{a}">{h1}.{h2}. {b}</a></h2>
+            <pre>"##,
+                a = heading_sanitized,
+                b = heading,
+                h1 = level_1_heading_num,
+                h2 = level_2_heading_num,
+            ).as_bytes())?;
+
+        } else if line.starts_with("#") {
+            let heading = line.get(2..).unwrap();
+            let heading_sanitized = sanitize(heading.to_string());
+
+            level_1_heading_num += 1;
+            level_2_heading_num = 0;
+            level_3_heading_num = 0;
+            level_4_heading_num = 0;
+            level_5_heading_num = 0;
+
+            file.write_all(format!(r##"</pre>
+            <span id="{a}"></span>
+            <h1><a href="#{a}">{h1}. {b}</a></h1>
+            <pre>"##,
+                a = heading_sanitized,
+                b = heading,
+                h1 = level_1_heading_num
+            ).as_bytes())?;
+
+        } else if line.starts_with("> ") {
+            let line = line.replace("<", "&lt");
+            let line = line.get(2..).unwrap();
+            file.write_all(format!("</pre><pre class=\"code\">{}</pre><pre>\n", line).as_bytes())?;
+            
+        } else if line.starts_with(":::tree") {
+
+            // TODO: add some parameter controlling if the list is ascending or descending (reverse the list before writing)
+
+            // get all dirs in the current dir recursively
+            let tree_files_path = Path::new(in_path).join(raw_path);
+            let mut tree_files
+                = recursive_read_dir(&tree_files_path, true)?;
+
+            // sort them, otherwise we'll get complete chaos
+            tree_files.sort();
+
+            for path in tree_files {
+                
+                // strip the inpath prefix and raw_path prefix, as we don't need
+                // them
+                let path 
+                    = path.strip_prefix(in_path)
+                        .expect("could not strip in_file prefix")
+                        .strip_prefix(raw_path)
+                        .expect("could not strip raw_path prefix");
+
+                // convert the path to a string, check if it contains a hidden
+                // path by checking if it contains a `/.`, if so, skip this one
+                if String::from(path.to_str().unwrap()).contains("/.") {
+                    continue
+                }
+                if String::from(path.to_str().unwrap()).starts_with(".") {
+                    continue
+                }
+
+                // write the link and the entry name to the file
+                let link = Path::new(raw_path).join(path);
+                let name = path.file_name().unwrap().to_str().unwrap();
+
+                // count the amount of segments in the path and write spaces for
+                // each
+                let segments = path.iter().count();
+                for _ in 0..(segments-1) {
+                    file.write_all(r#"    "#.as_bytes())?;
+                }
+
+                file.write_all(
+                    format!("<a href=\"/{}\">{}</a>\n",
+                        link.display(), name, 
+                        ).as_bytes()
+                )?;
+            }
+
+        } else if line.starts_with(":::toc") {
+
+            // TODO: depth parameter for controlling the depth of the table of contents
+
+            let mut level_1_num = 0;
+            let mut level_2_num = 0;
+            let mut level_3_num = 0;
+            let mut level_4_num = 0;
+            let mut level_5_num = 0;
+
+            for line in readme.split('\n') {
+                if line.starts_with("#####") {
+                    let line = line.get(6..).unwrap();
+                    // trim the line to remove the trailing whitespace
+                    let line = line.trim();
+                    level_5_num += 1;
+                    file.write_all(
+                        format!(
+                            r##"           <a href="#{}">{}.{}.{}.{}.{}. {}</a>
+"##,
+                            sanitize(line.to_string()),
+                            level_1_num,
+                            level_2_num,
+                            level_3_num,
+                            level_4_num,
+                            level_5_num,
+                            line
+                        ).as_bytes()
+                    )?;
+                } else if line.starts_with("####") {
+                    let line = line.get(5..).unwrap();
+                    // trim the line to remove the trailing whitespace
+                    let line = line.trim();
+                    level_4_num += 1;
+                    level_5_num = 0;
+                    file.write_all(
+                        format!(
+                            r##"         <a href="#{}">{}.{}.{}.{}. {}</a>
+"##,
+                            sanitize(line.to_string()),
+                            level_1_num,
+                            level_2_num,
+                            level_3_num,
+                            level_4_num,
+                            line
+                        ).as_bytes()
+                    )?;
+                } else if line.starts_with("###") {
+                    let line = line.get(4..).unwrap();
+                    // trim the line to remove the trailing whitespace
+                    let line = line.trim();
+                    level_3_num += 1;
+                    level_4_num = 0;
+                    level_5_num = 0;
+                    file.write_all(
+                        format!(
+                            r##"       <a href="#{}">{}.{}.{}. {}</a>
+"##,
+                            sanitize(line.to_string()),
+                            level_1_num,
+                            level_2_num,
+                            level_3_num,
+                            line
+                        ).as_bytes()
+                    )?;
+                } else if line.starts_with("##") {
+                    let line = line.get(3..).unwrap();
+                    let line = line.trim();
+                    level_2_num += 1;
+                    level_3_num = 0;
+                    level_4_num = 0;
+                    level_5_num = 0;
+
+                    file.write_all(
+                        format!(
+                            //r##"    <a href="#{}">{}.{}. {}</a>
+                            r##"    <a href="#{}">{}.{}. {}</a>
+"##,
+                            sanitize(line.to_string()),
+                            level_1_num,
+                            level_2_num,
+                            line
+                        ).as_bytes()
+                    )?;
+                } else if line.starts_with("#") {
+                    let line = line.get(2..).unwrap();
+                    let line = line.trim();
+                    level_1_num += 1;
+                    level_2_num = 0;
+                    level_3_num = 0;
+                    level_4_num = 0;
+                    level_5_num = 0;
+
+                    file.write_all(
+                        format!(
+                            r##"<a href="#{}">{}. {}</a>
+"##,
+                            sanitize(line.to_string()),
+                            level_1_num,
+                            line
+                        ).as_bytes()
+                    )?;
+                }
+            }
+
+        } else {
+
+            // for the case that nothing of the above matches, just write the
+            // content into the html body as it is
+            file.write_all(format!("{}\n", line).as_bytes())?;
+        }
+    }
+
+    Ok(())
+}
+
+fn write_footer(file: &mut File, raw_path: &Path, internal_links: &HashMap<String, Vec<String>>) -> std::io::Result<()> {
+
+    // add some padding before the whole footer stuff
+    file.write_all(b"<br><br><br>")?;
+
+    // Backlinks
+
+    let search_path = Path::new("/").join(raw_path).into_os_string().into_string().unwrap();
+
+    match internal_links.get(&search_path) {
+        Some(values) => {
+
+            // only write "backlinks" if we've actually got some
+            file.write_all(b"backlinks:\n")?;
+
+            for link in values {
+
+                // strip the "in" prefix 
+                // strip the "README.md" suffix
+                // TODO: do all this magic by parsing it as a path and removing the unneeded parts, bonus by creating a function doing this and removing the horrible string mashing in this codebase
+                let a = link
+                    .strip_prefix("in")
+                    .expect("no prefix to strip")
+                    .strip_suffix("README.md")
+                    .expect("no README.md suffix to remove");
+
+                file.write_all(format!(r#"- <a href="{a}">{a}</a>
+"#).as_bytes())?;
+            }
+        }
+        None => (),
+    }
+
+    // The actual footer
+
+    file.write_all(format!(r#"
+    </pre>
+<a href="https://chaos.social/@hanemile.rss" target="_blank" rel="noopener" class="icon"><img class="webring" src="/rss.svg" alt="rss feed of @hanemile@chaos.social mastodon" height="32px"/></a>
+<a href="https://lieu.cblgh.org/" target="_blank" rel="noopener" class="icon"><img class="webring" src="/lieu.svg" alt="lieu webring search engine" height="32px"/></a>
+<a href="https://webring.xxiivv.com/#emile" target="_blank" rel="noopener" class="icon"><img class="webring" src="/webring.svg" alt="XXIIVV webring" height="32px"/></a>
+<a rel="me" href="https://chaos.social/@hanemile" target="_blank" class="icon"><img class="webring" src="/mastodon.svg" alt="mastodon" height="32px"/></a>
+    <pre>emile - {:?} - generated using <a href="https://github.com/hanemile/vokobe">vokobe {:?}</a><pre>
+</body>
+</html>
+"#,
+    time::SystemTime::now().duration_since(time::SystemTime::UNIX_EPOCH).unwrap(),
+    env!("CARGO_PKG_VERSION")
+    ).as_bytes())?;
+
+    Ok(())
+}
+
+/// sanitize the given string (to lower + space to hypen + keep only
+/// [a-zA-Z0-9])
+fn sanitize(input: String) -> String {
+    let input = input.replace(" ", "-");
+
+    input
+        .chars()
+        .filter(|c| c.is_ascii_alphanumeric() || c.eq(&'-'))
+        .collect::<String>()
+        .to_lowercase()
+}
+
+/// Return a list of all files in the directory, recursively.
+fn recursive_read_dir(dir: &PathBuf, dir_only: bool) -> io::Result<Vec<PathBuf>> {
+
+    // return an empty vec if the given path is not a directory
+    if dir.is_dir() == false {
+        return Ok(vec![]);
+    }
+
+    if dir.starts_with(".") {
+       return Ok(vec![]); 
+    }
+
+        // get all entries in the gitignore file, if it exists
+    let gitignore_entries: Vec<PathBuf> = gitignore_entries(&dir)?;
+
+    // store the child pathes
+    let mut entries: Vec<PathBuf> = Vec::new();
+    
+    // iterate over all items in the dir, pushing the dirs pathes to the dirs
+    // vector for returning it
+    'outer: for entry in fs::read_dir(dir)? {
+        let dir_entry = &entry?;
+        let path = dir_entry.path();
+
+        // skip hidden folders
+        if path.starts_with(".") {
+            //continue 'outer;
+            break 'outer;
+        }
+        if dir.starts_with(".") {
+            //continue 'outer;
+            break 'outer;
+        }
+
+        // check if the current entry is part of the gitignore, if so, skip it
+        for gitignore_entry in &gitignore_entries {
+            if gitignore_entry.to_str() == Some("") {
+                continue;
+            }
+            if path.ends_with(gitignore_entry) {
+                continue 'outer;
+            }
+        }
+
+        if dir_only == true {
+            if path.is_dir() {
+                entries.push(path.to_path_buf());
+            }
+        } else {
+            entries.push(path.to_path_buf());
+        }
+
+        // recursively push all dirs from all children to the dirs vector
+        let subdirs = recursive_read_dir(&path, dir_only)?;
+
+        for subdir in subdirs {
+            entries.push(subdir)
+        }
+    }
+
+    // return the dirs, the ones from this folder and the ones from all child folders
+    Ok(entries)
+}
+
+// try to open the gitignore file and read all entries from there.
+fn gitignore_entries(dir: &PathBuf) -> io::Result<Vec<PathBuf>> {
+    let gitignore_path = Path::new(&dir)
+        .join(Path::new(".gitignore"));
+
+    let mut entries: Vec<PathBuf> = Vec::new();
+    if let Ok(gitignore) = File::open(&gitignore_path) {
+        let reader = BufReader::new(gitignore);
+
+        for line in reader.lines() {
+            entries.push(PathBuf::from(line?));
+        }
+    }
+
+    Ok(entries)
+}