#!/usr/bin/env bash
# Run sfeed

# set -euo pipefail

main() {
	NOFETCH=false
	[ "x$1" = "x-n" ] && NOFETCH=true
	export NOFETCH
	export SFEED_CONFIG="$HOME/.sfeed"
	# SFEED_CONFIG="${XDG_CONFIG_HOME:-$HOME/.config}/sfeed"
	export SFEED_DATA="$HOME/.sfeed"
	# SFEED_DATA="${XDG_DATA_HOME:-$HOME/.local/share}/sfeed"
	if [ -d /var/www/acdw.casa/planet ]; then
		export SFEED_OUTPUT=/var/www/acdw.casa/planet
	else
		export SFEED_OUTPUT="$HOME/.sfeed"
	fi
	# SFEED_OUTPUT=/var/www/sfeed
	export sfeedrc="$SFEED_CONFIG/sfeedrc"
	export sfeedpath="$SFEED_DATA/feeds"
	test -d "$(dirname "$sfeedrc")" || mkdir -p "$(dirname "$sfeedrc")"
	test -d "$sfeedpath" || mkdir -p "$sfeedpath"
	touch /tmp/runsfeed.ok

	# if ! $NOFETCH; then
	# logok "Finding Invidious host" \
	# get_invidious_url "https://api.invidious.io/instances.json?sort_by=health"
	# fi
	logok "Removing unsubscribed feeds" \
		remove_unsubs "$sfeedrc"
	logok -n "Updating feeds" \
		update "$sfeedrc"
	logok "Updating urls" \
		update_urls "$sfeedrc"
	logok "Generating HTML" '{
		html "$sfeedpath"/* >/tmp/sfeed-index.html &&
			mv /tmp/sfeed-index.html "$SFEED_OUTPUT/index.html";
		html -v LIMIT=-1 "$sfeedpath"/* >/tmp/sfeed-feeds.html &&
			mv /tmp/sfeed-feeds.html "$SFEED_OUTPUT/feeds.html";
	}'
	logok "Generating RSS" '{
		atom "$sfeedpath" >/tmp/feeds.xml &&
		mv /tmp/feeds.xml "$SFEED_OUTPUT/feeds.xml";
	 atom "$sfeedpath" 7 >/tmp/feeds-short.xml &&
		 mv /tmp/feeds-short.xml "$SFEED_OUTPUT/feeds-short.xml";
			       }'
	logok "Generating OPML" '{
		opml "$sfeedrc" >/tmp/feeds.opml &&
			mv /tmp/feeds.opml "$SFEED_OUTPUT/feeds.opml";
				}'
	logok "Generating twtxt" '{
		twtxt "$sfeedpath" >/tmp/feeds.txt &&
			mv /tmp/feeds.txt "$SFEED_OUTPUT/feeds.txt";
		}'
	# logok "Archiving old feeds" \
	# archive "$sfeedpath"/*
	if [ -f /tmp/runsfeed.ok ]; then
		echo >&2 'Done.'
	else
		echo >&2 'Done (some errors).'
	fi
}

runcmd() {
	cmd="$(command -v "$1" || echo "./$1")"
	shift
	"$cmd" "$@"
}

remove_unsubs() {
	runcmd sfeed_unsubscribe.sh "$@"
}

update_urls() {
	runcmd sfeed_update_urls.sh "$@"
}

get_invidious_url() {
	"${NOFETCH:-false}" && return
	curl -sL "$1" |
		jq -r .[][1].uri |
		grep -v onion |
		head -n1 | tee /tmp/invidious.host
}

logok() {
	newline=''
	if [ "x$1" = x-n ]; then
		newline='\n'
		shift
	fi
	printf "%s...$newline" "$1" >&2
	shift
	if output="$(eval "$@" | tee /dev/stderr)"; then
		printf '%s\n' "${output:-ok}"
		return 0
	else
		rm /tmp/runsfeed.ok
		printf 'ERROR\n'
		return 1
	fi
}

update() {
	runcmd sfeed_update_xargs "$@"
}

twtxt() {
	curd="$PWD"
	cd "$1" || return 1
	if [ $# -eq 2 ]; then
		old="$(($(date +%s) - ($2 * 24 * 3600)))"
	else
		old=0
	fi
	awk -v old="$old" \
		'BEGIN{FS="\t";OFS="\t";} int($1)>=old{$2="["FILENAME"] "$2;print}' \
		* |
		sort -k1,1rn |
		sfeed_twtxt
}

opml() {
	sfeed_opml_export "$@"
}

html() {
	runcmd sfeed_html.awk "$@"
}

atom() ( # atom DIRECTORY [DAYS]
	curd="$PWD"
	cd "$1" || return 1
	if [ $# -eq 2 ]; then
		old=$(($(date +%s) - ($2 * 24 * 3600)))
	else
		old=0
	fi
	awk -F $'\t' -v old="$old" \
		'BEGIN{OFS="\t"} $1 && int($1)>=old{$2="["FILENAME"] "$2;print}' \
		* |
		sort -k1,1rn |
		sfeed_atom
)

archive() ( # sfeed_archive FEED ...
	for feed; do
		awk -v old="$(($(date +%s) - (60 * 24 * 3600)))" \
			-F '\t' 'int($1) > old' <"$feed" >"$feed.new"
		mv "$feed" "$feed.old"
		mv "$feed.new" "$feed"
	done
)

if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
	main "$@"
fi