about summary refs log tree commit diff stats
path: root/runsfeed
blob: 3fc6ee6de0f176cb33914e2095b6a11f5ef111c6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
#!/usr/bin/env bash
# Run sfeed

# set -euo pipefail

main() {
	NOFETCH=false
	[ "x$1" = "x-n" ] && NOFETCH=true
	export NOFETCH
	export SFEED_CONFIG="$HOME/.sfeed"
	# SFEED_CONFIG="${XDG_CONFIG_HOME:-$HOME/.config}/sfeed"
	export SFEED_DATA="$HOME/.sfeed"
	# SFEED_DATA="${XDG_DATA_HOME:-$HOME/.local/share}/sfeed"
	if [ -d /var/www/acdw.casa/planet ]; then
		export SFEED_OUTPUT=/var/www/acdw.casa/planet
	else
		export SFEED_OUTPUT="$HOME/.sfeed"
	fi
	# SFEED_OUTPUT=/var/www/sfeed
	export sfeedrc="$SFEED_CONFIG/sfeedrc"
	export sfeedpath="$SFEED_DATA/feeds"
	test -d "$(dirname "$sfeedrc")" || mkdir -p "$(dirname "$sfeedrc")"
	test -d "$sfeedpath" || mkdir -p "$sfeedpath"

	if ! $NOFETCH; then
		log Finding Invidious host...
		get_invidious_url "https://api.invidious.io/instances.json?sort_by=health"
	fi
	log Removing unsubscribed feeds...
	remove_unsubs "$sfeedrc"
	log Updating feeds...
	update "$sfeedrc"
	log Updating urls...
	update_urls "$sfeedrc"
	log Generating HTML...
	html "$sfeedpath"/* >/tmp/sfeed-index.html &&
		mv /tmp/sfeed-index.html "$SFEED_OUTPUT/index.html"
	log
	LIMIT=0 html "$sfeedpath"/* >/tmp/sfeed-feeds.html &&
		mv /tmp/sfeed-feeds.html "$SFEED_OUTPUT/feeds.html"
	log
	log Generating RSS...
	atom "$sfeedpath" >/tmp/feeds.xml &&
		mv /tmp/feeds.xml "$SFEED_OUTPUT/feeds.xml"
	atom "$sfeedpath" 7 >/tmp/feeds-short.xml &&
		mv /tmp/feeds-short.xml "$SFEED_OUTPUT/feeds-short.xml"
	log Generating OPML...
	opml "$sfeedrc" >/tmp/feeds.opml &&
		mv /tmp/feeds.opml "$SFEED_OUTPUT/feeds.opml"
	# log Archiving old feeds...
	# archive "$sfeedpath"/*
	log Done.
}

runcmd() {
	cmd="$(command -v "$1" || echo "./$1")"
	shift
	"$cmd" "$@"
}

remove_unsubs() {
	runcmd sfeed_unsubscribe.sh "$@"
}

update_urls() {
	runcmd sfeed_update_urls.sh "$@"
}

get_invidious_url() {
	"${NOFETCH:-false}" && return
	curl -sL "$1" |
		jq -r .[][1].uri |
		grep -v onion |
		head -n1 | tee /tmp/invidious.host
}

log() {
	printf '%s\n' "$*" >&2
}

update() {
	runcmd sfeed_update_xargs "$@"
}

opml() {
	sfeed_opml_export "$@"
}

html() {
	runcmd sfeed_html.sh "$@"
}

atom() ( # atom DIRECTORY [DAYS]
	curd="$PWD"
	cd "$1" || return 1
	if [ $# -eq 2 ]; then
		old=$(($(date +%s) - ($2 * 24 * 3600)))
	else
		old=0
	fi
	awk -F $'\t' -v old="$old" \
		'BEGIN{OFS="\t"} int($1)>=old{$2="["FILENAME"] "$2;print}' \
		* |
		sort -k1,1rn |
		sfeed_atom
)

archive() ( # sfeed_archive FEED ...
	for feed; do
		awk -v old="$(($(date +%s) - (60 * 24 * 3600)))" \
			-F '\t' 'int($1) > old' <"$feed" >"$feed.new"
		mv "$feed" "$feed.old"
		mv "$feed.new" "$feed"
	done
)

if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
	main "$@"
fi