diff options
-rwxr-xr-x | runsfeed | 7 | ||||
-rwxr-xr-x | sfeed_html.sh | 10 | ||||
-rwxr-xr-x | sfeed_update_urls.sh | 18 | ||||
-rw-r--r-- | style.css | 2 |
4 files changed, 35 insertions, 2 deletions
diff --git a/runsfeed b/runsfeed index f108031..da43529 100755 --- a/runsfeed +++ b/runsfeed | |||
@@ -23,6 +23,8 @@ main() { | |||
23 | get_invidious_url "https://api.invidious.io/instances.json?sort_by=health" | 23 | get_invidious_url "https://api.invidious.io/instances.json?sort_by=health" |
24 | log Updating feeds... | 24 | log Updating feeds... |
25 | update "$sfeedrc" | 25 | update "$sfeedrc" |
26 | log Updating urls... | ||
27 | update_urls "$sfeedrc" | ||
26 | log Generating HTML... | 28 | log Generating HTML... |
27 | html "$sfeedpath"/* >"$SFEED_OUTPUT/index.html" | 29 | html "$sfeedpath"/* >"$SFEED_OUTPUT/index.html" |
28 | log | 30 | log |
@@ -38,6 +40,11 @@ main() { | |||
38 | log Done. | 40 | log Done. |
39 | } | 41 | } |
40 | 42 | ||
43 | update_urls() { | ||
44 | cmd="$(command -v sfeed_update_urls.sh || echo ./sfeed_update_urls.sh)" | ||
45 | "$cmd" "$@" | ||
46 | } | ||
47 | |||
41 | get_invidious_url() { | 48 | get_invidious_url() { |
42 | curl -sL "$1" | | 49 | curl -sL "$1" | |
43 | jq -r .[][1].uri | | 50 | jq -r .[][1].uri | |
diff --git a/sfeed_html.sh b/sfeed_html.sh index d9f1b43..a618ff9 100755 --- a/sfeed_html.sh +++ b/sfeed_html.sh | |||
@@ -147,7 +147,15 @@ html_feed() { # html_feed FEED(file) => HTML | |||
147 | echo "<section id=\"$filename\">" | 147 | echo "<section id=\"$filename\">" |
148 | # TODO: Include a link back to the website | 148 | # TODO: Include a link back to the website |
149 | printf '<header><h2><a href="#%s">#</a> %s</h2>\n' "$filename" "$filename" | 149 | printf '<header><h2><a href="#%s">#</a> %s</h2>\n' "$filename" "$filename" |
150 | printf '<a class="top" href="#">%s</a></header>' "[back to top]" | 150 | echo "<span class=\"flinks\">" |
151 | if [ -f "$SFEED_DATA/urls/$filename" ]; then | ||
152 | feed_url="$(sed -n '1p;1q' "$SFEED_DATA/urls/$filename")" | ||
153 | site_url="$(sed -n '2p;2q' "$SFEED_DATA/urls/$filename")" | ||
154 | [ -n "$site_url" ] && printf '<a class="site-url" href="%s">%s</a>\n//' "$site_url" site | ||
155 | [ -n "$feed_url" ] && printf '<a class="feed-url" href="%s">%s</a>\n//' "$feed_url" feed | ||
156 | fi | ||
157 | printf '\n<a class="top" href="#">%s</a>' top | ||
158 | echo "</span></header>" | ||
151 | echo "<table class=\"entries\">" | 159 | echo "<table class=\"entries\">" |
152 | echo "$entries" | 160 | echo "$entries" |
153 | echo "</table>" | 161 | echo "</table>" |
diff --git a/sfeed_update_urls.sh b/sfeed_update_urls.sh new file mode 100755 index 0000000..a2ee2be --- /dev/null +++ b/sfeed_update_urls.sh | |||
@@ -0,0 +1,18 @@ | |||
1 | #!/bin/sh | ||
2 | |||
3 | . "$1" | ||
4 | |||
5 | feed() { | ||
6 | printf '%s\t%s\t%s\n' "$1" "$2" "${3:-}" | ||
7 | } | ||
8 | |||
9 | mkdir -p "$SFEED_DATA/urls" | ||
10 | |||
11 | feeds | awk -v dir="$SFEED_DATA/urls" ' | ||
12 | BEGIN{FS="\t";} | ||
13 | { | ||
14 | gsub(/\//, "_", $1); | ||
15 | file = dir "/" $1; | ||
16 | if ($2) print $2 > file; | ||
17 | if ($3) print $3 >> file; | ||
18 | }' | ||
diff --git a/style.css b/style.css index 9b5efca..abe4799 100644 --- a/style.css +++ b/style.css | |||
@@ -85,7 +85,7 @@ header h2, header h1 { | |||
85 | margin: 0; | 85 | margin: 0; |
86 | } | 86 | } |
87 | header h2 a { display: inline; } | 87 | header h2 a { display: inline; } |
88 | header .top { | 88 | header .flinks { |
89 | font-size: 80%; | 89 | font-size: 80%; |
90 | text-align: right; | 90 | text-align: right; |
91 | flex-grow: 1; | 91 | flex-grow: 1; |