squeeze

A static site generator that can put the toothpaste back in the tube.
git clone https://git.stjo.hn/squeeze
Log | Files | Refs | README | LICENSE

commit bd3d10834ac8498123a5c49c56b2a12c5bc05dc3
parent b97c754c0de48b3fa4910b7946fac6fd1545a89e
Author: St John Karp <contact@stjo.hn>
Date:   Tue, 31 Mar 2020 07:45:47 -0500

Refactor Bash scripts

Refactored the Bash scripts to get rid of some unnecessary code,
put things in a more sensible order, and improve the
formatting/comments.

Diffstat:
Msqueeze.sh | 35++++++++++++++++++++++-------------
Munsqueeze.sh | 14+++++---------
2 files changed, 27 insertions(+), 22 deletions(-)

diff --git a/squeeze.sh b/squeeze.sh @@ -5,13 +5,12 @@ SOURCE_DIR=source SITE_PATH=$1 -# Create the directory structure. -find "$SITE_PATH"/"$SOURCE_DIR" -type d | - sed "s|^$SITE_PATH/$SOURCE_DIR|$SITE_PATH/$OUTPUT_DIR|" | - xargs -0 -d '\n' mkdir -p -- +# Copy everything that's not Markdown or HTML. +# This will also create the folder structure for the destination Markdown files. +rsync --archive --delete --verbose --exclude "*.md" --exclude "*.html" --exclude "feeds" "$SITE_PATH/$SOURCE_DIR/" "$SITE_PATH/$OUTPUT_DIR/" # Parse and create all the HTML files. -find "$SITE_PATH"/"$SOURCE_DIR" -type f -name "*.md" -print0 | +find "$SITE_PATH/$SOURCE_DIR" -type f -name "*.md" -print0 | while IFS= read -r -d '' file; do echo $file NEW_PATH=`echo "$file" | sed "s|^$SITE_PATH/$SOURCE_DIR|$SITE_PATH/$OUTPUT_DIR|" | sed 's|.md$|.html|'` @@ -19,6 +18,7 @@ find "$SITE_PATH"/"$SOURCE_DIR" -type f -name "*.md" -print0 | if [ ! -f $NEW_PATH ] || [[ $(find $file -mtime -7) ]]; then # Get everything after the metadata and feed it through Pandoc. sed "1,/^$/d" "$file" | + # Convert Markdown to HTML and smarten punctuation. pandoc --ascii --from markdown+smart --to html | # Recombine with the metadata and hand it to Prolog. (sed "/^$/q" "$file" && cat) | @@ -27,13 +27,22 @@ find "$SITE_PATH"/"$SOURCE_DIR" -type f -name "*.md" -print0 | fi done -# Copy anything else directly. -rsync --archive --delete --verbose --exclude "*.md" --exclude "*.html" --exclude "feeds" "$SITE_PATH/$SOURCE_DIR/" "$SITE_PATH/$OUTPUT_DIR/" - # Generate the RSS feed. -mkdir -p "$SITE_PATH"/"$OUTPUT_DIR"/feeds -# Grep the date of each article, sort them by date, then get a list of file names and take the most recent five. -ARTICLES=`grep -R --include=\*.md "^Date: " "$SITE_PATH"/"$SOURCE_DIR" | sed -rn 's/^([^:]+):(.+)$/\2\t\1/p' | sort | cut -f2 | tail -5 | paste -sd ',' - | sed "s|,|','|g"` +mkdir -p "$SITE_PATH/$OUTPUT_DIR/feeds" +# Grep the date of each article. +ARTICLES=`grep -R --include=\*.md "^Date: " "$SITE_PATH/$SOURCE_DIR" | + # Reformat the output so the date comes first, then the file name. + sed -rn 's/^([^:]+):(.+)$/\2\t\1/p' | + # Sort articles by date. + sort | + # Reformat to just the file names. + cut -f2 | + # Get the last (i.e. most recent) posts for the RSS feed. + tail -5 | + # Glue the file names together to be passed to Prolog. + paste -sd ',' - | + sed "s|,|','|g"` BUILD_DATE=`date +"%Y-%m-%d %T"` +# Parse the articles and generate the RSS. swipl --traditional -q -l generate_rss.pl -g "consult('$SITE_PATH/site.pl'), generate_rss(\"$BUILD_DATE\", ['$ARTICLES'])." \ - > "$SITE_PATH"/"$OUTPUT_DIR"/feeds/rss.xml -\ No newline at end of file + > "$SITE_PATH/$OUTPUT_DIR/feeds/rss.xml" +\ No newline at end of file diff --git a/unsqueeze.sh b/unsqueeze.sh @@ -5,13 +5,13 @@ SOURCE_DIR=source SITE_PATH=$1 -# Create the directory structure. -find "$SITE_PATH"/"$OUTPUT_DIR" -type d | - sed "s|^$SITE_PATH/$OUTPUT_DIR|$SITE_PATH/$SOURCE_DIR|" | - xargs -0 -d '\n' mkdir -p -- +# Copy everything that's not Markdown or HTML. +# Excludes the RSS folder, which we create ourselves upon generation. +# This will also create the folder structure for the destination Markdown files. +rsync --archive --delete --verbose --exclude "*.html" --exclude "*.md" --exclude "feeds" "$SITE_PATH/$OUTPUT_DIR/" "$SITE_PATH/$SOURCE_DIR/" # Parse and create all the markdown files. -find "$SITE_PATH"/"$OUTPUT_DIR" -type f -name "*.html" -print0 | +find "$SITE_PATH/$OUTPUT_DIR" -type f -name "*.html" -print0 | while IFS= read -r -d '' file; do NEW_PATH=`echo "$file" | sed "s|^$SITE_PATH/$OUTPUT_DIR|$SITE_PATH/$SOURCE_DIR|" | sed 's|.html$|.md|'` cat "$file" | @@ -25,6 +25,3 @@ find "$SITE_PATH"/"$OUTPUT_DIR" -type f -name "*.html" -print0 | > "$NEW_PATH" done -# Copy anything else directly. -# Excludes the RSS folder, which we create ourselves upon generation. -rsync --archive --delete --verbose --exclude "*.html" --exclude "*.md" --exclude "feeds" "$SITE_PATH/$OUTPUT_DIR/" "$SITE_PATH/$SOURCE_DIR/" -\ No newline at end of file