about summary refs log tree commit diff stats
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--mars-eyes.pngbin0 -> 4907 bytes
-rwxr-xr-xrunsfeed77
-rwxr-xr-xscripts/get-feed.sh59
-rwxr-xr-xsfeed_html.sh148
-rwxr-xr-xsfeed_update_xargs45
-rw-r--r--sfeedrc225
-rw-r--r--style.css176
8 files changed, 731 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..865369a --- /dev/null +++ b/.gitignore
@@ -0,0 +1 @@
feeds/ \ No newline at end of file
diff --git a/mars-eyes.png b/mars-eyes.png new file mode 100644 index 0000000..2e62c21 --- /dev/null +++ b/mars-eyes.png
Binary files differ
diff --git a/runsfeed b/runsfeed new file mode 100755 index 0000000..f987a92 --- /dev/null +++ b/runsfeed
@@ -0,0 +1,77 @@
1#!/usr/bin/env bash
2# Run sfeed
3
4# set -euo pipefail
5
6main() {
7 export SFEED_CONFIG="$HOME/.sfeed"
8 # SFEED_CONFIG="${XDG_CONFIG_HOME:-$HOME/.config}/sfeed"
9 export SFEED_DATA="$HOME/.sfeed"
10 # SFEED_DATA="${XDG_DATA_HOME:-$HOME/.local/share}/sfeed"
11 export SFEED_OUTPUT="$HOME/.sfeed"
12 # SFEED_OUTPUT=/var/www/sfeed
13 export sfeedrc="$SFEED_CONFIG/sfeedrc"
14 export sfeedpath="$SFEED_DATA/feeds"
15 test -d "$(dirname "$sfeedrc")" || mkdir -p "$(dirname "$sfeedrc")"
16 test -d "$sfeedpath" || mkdir -p "$sfeedpath"
17
18 log Updating feeds...
19 update "$sfeedrc"
20 log Generating HTML...
21 html "$sfeedpath"/* >"$SFEED_OUTPUT/index.html"
22 log
23 LIMIT=0 html "$sfeedpath"/* >"$SFEED_OUTPUT/feeds.html"
24 log
25 log Generating RSS...
26 atom "$sfeedpath" >"$SFEED_OUTPUT/feeds.xml"
27 atom "$sfeedpath" 7 >"$SFEED_OUTPUT/feeds-short.xml"
28 log Generating OPML...
29 opml "$sfeedrc" >"$SFEED_OUTPUT/feeds.opml"
30 log Done.
31}
32
33log() {
34 printf '%s\n' "$*" >&2
35}
36
37update() {
38 cmd="$(command -v sfeed_update_xargs || echo ./sfeed_update_xargs)"
39 "$cmd" "$@"
40}
41
42opml() {
43 sfeed_opml_export "$@"
44}
45
46html() {
47 converter="$(command -v sfeed_html.sh || echo ./sfeed_html.sh)"
48 "$converter" "$@"
49}
50
51atom() ( # atom DIRECTORY [DAYS]
52 curd="$PWD"
53 cd "$1" || return 1
54 if [ $# -eq 2 ]; then
55 old=$(($(date +%s) - ($2 * 24 * 3600)))
56 else
57 old=0
58 fi
59 awk -F $'\t' -v old="$old" \
60 'BEGIN{OFS="\t"} int($1)>=old{$2="["FILENAME"] "$2;print}' \
61 * |
62 sort -k1,1rn |
63 sfeed_atom
64)
65
66sfeed_archive() ( # sfeed_archive FEED ...
67 for feed; do
68 awk -v old="$(($(date +%s) - (15 * 24 * 3600)))" \
69 -F '\t' 'int($1) > old' <"$feed" >"$feed.new"
70 mv "$feed" "$feed.old"
71 mv "$feed.new" "$feed"
72 done
73)
74
75if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
76 main "$@"
77fi
diff --git a/scripts/get-feed.sh b/scripts/get-feed.sh new file mode 100755 index 0000000..f000fee --- /dev/null +++ b/scripts/get-feed.sh
@@ -0,0 +1,59 @@
1#!/bin/sh
2
3main() {
4 # get-feed.sh URL(str) => DIRECTIVE(feed_directive)
5 url="$1"
6 wp="$(mktemp /tmp/get-feed.XXXXXX)"
7 curl -sL "$url" >"$wp"
8 case "$url" in
9 *html) # We know it's a webpage
10 type=html
11 ;;
12 *xml) # We know it's a feed
13 type=xml
14 ;;
15 *) # Not sure
16 type="$(head -n1 "$wp")"
17 ;;
18 esac
19 case "$type" in
20 *xml*) # a feed
21 title="$(get_title_xml <"$wp")"
22 output_feed "$title" "$url"
23 ;;
24 *html*) # a webpage
25 cat "$wp" | sfeed_web | cut -f1 |
26 while read u; do
27 title="$(curl -sL "$u" | get_title_xml)"
28 output_feed "$title" "$u"
29 done
30 ;;
31 *)
32 echo >&2 "Don't know type \"$type\"."
33 exit 1
34 ;;
35 esac
36}
37
38output_feed() {
39 ## output_feed TITLE(str) URL(str) => FEED_DIRECTIVE(str)
40 printf "feed \"%s\" '%s'\n" "$1" "$2"
41}
42
43get_title_xml() {
44 ## get_title_xml < FILE => TITLE(str)
45 awk '
46/<channel>/ { channel = 1; }/<item>/{ channel = 0; }
47channel && $0 ~ /<title>/ { title = 1; }
48title {
49 if (match($0,/<\/title>/)) title = 0;
50 gsub(/<\/?title>/,"");
51 sub(/^[ \t]*/,"");
52 sub(/[ \t]*$/,"");
53 print;
54}
55channel && $0 ~ /<\/title>/ { title = 0; }
56'
57}
58
59main "$@"
diff --git a/sfeed_html.sh b/sfeed_html.sh new file mode 100755 index 0000000..58fce30 --- /dev/null +++ b/sfeed_html.sh
@@ -0,0 +1,148 @@
1#!/usr/bin/env bash
2
3echo() { printf '%s\n' "$*"; }
4
5html() {
6 : "${LIMIT:=1}"
7 aside="$(mktemp /tmp/sfeed_html_aside.XXXXXX)"
8 cat <<EOF
9$(html_head)
10<body>
11<header>
12<h1>
13<a href="index.html"><img src="mars-eyes.png"
14 title="$(fortune)"
15 width="40" height="39"
16 alt="mars, but with eyes" /></a>
17Planet ACDW</h1>
18<p class="last-updated">last updated at <time>$(date -R)</time></p>
19</header>
20<nav>
21<a href="feeds.html">all feeds</a>
22//
23<a href="feeds.xml">rss (full)</a>
24//
25<a href="feeds-short.xml">rss (short)</a>
26//
27<a href="feeds.opml">opml</a>
28</nav>
29<main>
30$(html_main "$@")
31<aside><ul>$(cat "$aside")</ul></aside>
32</main>
33</body>
34</html>
35EOF
36 rm "$aside"
37}
38
39html_head() {
40 cat <<EOF
41<!DOCTYPE html>
42<html>
43<head>
44<meta charset="utf-8">
45<meta http-equiv="X-UA-Compatible" content="IE=edge">
46<meta http-equiv="Content-Type" content="text/html;charset=UTF-8">
47<meta name="viewport" content="width=device-width, initial-scale=1">
48<title>Planet ACDW</title>
49<link rel="stylesheet" type="text/css" href="style.css">
50<link rel="shortcut icon" type="image/png" href="mars-eyes.png">
51<link rel="alternate" type="application/atom+xml" title="rss (full)" href="feeds.xml">
52<link rel="alternate" type="application/atom+xml" title="rss (short)" href="feeds-short.xml">
53<link rel="alternate" type="application/xml" title="opml" href="feeds.opml">
54</head>
55EOF
56}
57
58html_main() {
59 cat <<EOF
60<section id="list">
61$(for file in "$@"; do html_feed "$file"; done)
62</section>
63EOF
64}
65
66html_feed() { # html_feed FEED(file) => HTML
67 filename="$(basename "$1")"
68 now="$(date +%s)"
69 fresh_days=7
70 fresh_secs="$((fresh_days * 24 * 60 * 60))"
71
72 ## ENTRIES
73 entries="$(awk -v NOW="$now" -v FRESH_SECS="$fresh_secs" \
74 -v NAME="$filename" -v ASIDE="$aside" -v limit="$LIMIT" \
75 'BEGIN { FS="\t"; fresh_feed = 0; FRESH = (NOW - FRESH_SECS); }
76 function unescape(t) {
77 t = html_escape(t);
78 gsub(/\\\t/,"\t",t);
79 gsub(/\\\n/,"\n",t);
80 gsub(/\\\\/,"\\",t);
81 return t
82 }
83 function html_escape(t) {
84 gsub(/</,"\\&lt;",t);
85 gsub(/>/,"\\&gt;",t);
86 gsub(/&/,"\\&amp;",t);
87 return t
88 }
89 {
90 timestamp=$1;
91 title=html_escape($2);
92 link=$3;
93 content=unescape($4);
94 content_type=$5;
95 id=$6;
96 author=$7;
97 enclosure=$8;
98 category=$9;
99
100 if (limit && (timestamp < (NOW - (FRESH_SECS * 3)))) next;
101 show_in_sidebar = 1;
102 #print timestamp, title, link > "/dev/stderr";
103
104 date_cmd = "date -d \"@" timestamp "\" +\"%F&nbsp;%R\""
105 if (timestamp) {
106 date_cmd | getline ts;
107 close(date_cmd);
108 }
109
110 fresh = (timestamp >= FRESH)
111 if (fresh) fresh_feed = 1;
112
113 print "<tr class=\"entry " (fresh ? "fresh" : "") "\">"
114 print "<td class=\"entry-timestamp\">" ts "</td>"
115 printf "%s", "<td class=\"entry-extra\">"
116 if (enclosure) {
117 stamp = "@"
118 printf "%s", "<a href=\"" enclosure "\" target=\"_blank\">" stamp "</a>"
119 }
120 if ((link != id) && (id != enclosure) && (id ~ /^https?:/)) {
121 stamp = "#"
122 printf "%s", "<a href=\"" id "\" target=\"_blank\">" stamp "</a>"
123 }
124 print "</td>"
125 print "<td class=\"entry-title\"><a href=\"" link "\" target=\"_blank\">" title "</a></td>"
126 print "</tr>"
127 }
128 END {
129 if (show_in_sidebar) {
130 printf "<li%s>", (fresh_feed?" class=\"fresh\"":"") >> ASIDE
131 printf "<a href=\"#%s\">%s</a></li>\n", NAME, NAME >> ASIDE
132 }
133 printf "%s", (stamp ? stamp : ".") > "/dev/stderr"
134 }' "$1")"
135 if [ -z "$entries" ]; then return 1; fi
136 echo "<section id=\"$filename\">"
137 # TODO: Include a link back to the website
138 printf '<header><h2><a href="#%s">#</a> %s</h2>\n' "$filename" "$filename"
139 printf '<a class="top" href="#">%s</a></header>' "[back to top]"
140 echo "<table class=\"entries\">"
141 echo "$entries"
142 echo "</table>"
143 echo "</section>"
144}
145
146if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
147 html "$@"
148fi
diff --git a/sfeed_update_xargs b/sfeed_update_xargs new file mode 100755 index 0000000..f8ee8e7 --- /dev/null +++ b/sfeed_update_xargs
@@ -0,0 +1,45 @@
1#!/bin/sh
2# -*- sh -*-
3# update feeds, merge with old feeds using xargs in parallel mode (non-POSIX).
4
5# include script and reuse its functions, but do not start main().
6SFEED_UPDATE_INCLUDE="1" . sfeed_update
7# load config file, sets $config.
8loadconfig "$1"
9
10# process a single feed.
11# args are: config, tmpdir, name, feedurl, basesiteurl, encoding
12if [ "${SFEED_UPDATE_CHILD}" = "1" ]; then
13 sfeedtmpdir="$2"
14 _feed "$3" "$4" "$5" "$6"
15 exit $?
16fi
17
18# ...else parent mode:
19
20# feed(name, feedurl, basesiteurl, encoding)
21feed() {
22 # workaround: *BSD xargs doesn't handle empty fields in the middle.
23 name="${1:-$$}"
24 feedurl="${2:-http://}"
25 basesiteurl="${3:-${feedurl}}"
26 encoding="$4"
27
28 printf '%s\0%s\0%s\0%s\0%s\0%s\0' "${config}" "${sfeedtmpdir}" \
29 "${name}" "${feedurl}" "${basesiteurl}" "${encoding}"
30}
31
32# fetch feeds and store in temporary directory.
33sfeedtmpdir="$(mktemp -d '/tmp/sfeed_XXXXXX')"
34mkdir -p "${sfeedtmpdir}/feeds"
35touch "${sfeedtmpdir}/ok"
36# make sure path exists.
37mkdir -p "${sfeedpath}"
38# print feeds for parallel processing with xargs.
39feeds | SFEED_UPDATE_CHILD="1" xargs -r -0 -P "${maxjobs}" -L 6 "$(readlink -f "$0")"
40status=$?
41# check error exit status indicator for parallel jobs.
42test -f "${sfeedtmpdir}/ok" || status=1
43# cleanup temporary files etc.
44cleanup
45exit ${status}
diff --git a/sfeedrc b/sfeedrc new file mode 100644 index 0000000..c5e3001 --- /dev/null +++ b/sfeedrc
@@ -0,0 +1,225 @@
1# -*- sh -*-
2
3# SFEED="$HOME/.sfeed"
4USER_AGENT='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0'
5# sfeedpath="$SFEED/feeds"
6maxjobs="$(nproc)"
7
8### Feeds #######################################################
9
10feeds() {
11 feeds_planets
12 feeds_youtube
13 feeds_podcasts
14 feeds_friends
15 feeds_news
16 feeds_smolweb
17 feeds_comics
18 feeds_misc
19 feeds_me
20}
21
22feeds_planets() {
23 feed "Planet Emacs" "https://planet.emacslife.com/atom.xml"
24 feed "Planet Lisp" "https://planet.lisp.org/rss20.xml"
25 feed "Planet Scheme" "https://planet.scheme.org/atom.xml"
26}
27
28feeds_youtube() {
29 feed "3Blue1Brown" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCYO_jab_esuFRV4b17AJtAw'
30 feed "AB - Ancienne Belgique" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCUVAw2kdxJlcfCdEcdgXv5A'
31 feed "Abraham Moller" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCMfIwe2KHD2XoBO2lWqeFXg'
32 feed "Adam Ragusea" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC9_p50tH3WmMslWRWKnM7dQ'
33 feed "Babish Culinary Universe" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCJHA_jMfCvEnv-3kRjTCQXw'
34 feed "Baggers" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCMV8p6Lb-bd6UZtTc_QD4zA'
35 feed "Case Duckworth" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC92gRJdnUYklVu4pvj9n0Lw'
36 feed "Claire Saffitz x Dessert Person" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCvw6Y1kr_8bp6B5m1dqNyiw'
37 feed "Computerphile" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC9-y-6csu5WGm29I7JiwpnA'
38 feed "EBRPL Career Center" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCIvntuaxP7PyaJDeHE_9E8Q'
39 feed "EmacsConf and Emacs hangouts" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCwuyodzTl_KdEKNuJmeo99A'
40 feed "freeCodeCamp.org" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC8butISFwT-Wl7EV0hUK0BQ'
41 feed "Gavin Freeborn" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCJetJ7nDNLlEzDLXv7KIo0w'
42 feed "Henry Homesweet" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCZqjwc1Wy5t1rsviYYsJiYg'
43 feed "Howard Abrams" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCVHICXXtKG7rZgtC5xonNdQ'
44 feed "Ignite Talks" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCZotK8ZPTUNLMeW5Q6T0cKg'
45 feed "Jake B" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCBMMB7Yi0eyFuY95Qn2o0Yg'
46 feed "James Tomasino" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCbTp1BYjpuhDRG5OmgIT8iw'
47 feed "jan Misali" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCJOh5FKisc0hUlEeWFBlD-w'
48 feed "J Duckworth Animations" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCtAEaNVrNxAUy2VSRPD_PYQ'
49 feed "Jelle's Marble Runs" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCYJdpnjuSWVOLgGT9fIzL0g'
50 feed "John Kitchin" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCQp2VLAOlvq142YN3JO3y8w'
51 feed "karthik" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCbh_g91w0T6OYp40xFrtnhA'
52 feed "Ken Forkish" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCvVvFZd0e86bLbd5FdgYiUg'
53 feed "Lex Fridman" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCSHZKyawb77ixDdsGog4iWA'
54 feed "LockPickingLawyer" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCm9K6rby98W8JigLoZOh6FQ'
55 feed "Maangchi" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC8gFadPgK2r1ndqLI04Xvvw'
56 feed "Mike Zamansky" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCxkMDXQ5qzYOgXPRnOBrp1w'
57 feed "MIT OpenCourseWare" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCEBb1b_L6zDS3xTUrIALZOw'
58 feed "My Analog Journal" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC8TZwtZ17WKFJSmwTZQpBTA'
59 feed "Nat's What I Reckon" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCEFW1E8QzP-hKxjO2Rj68wg'
60 feed "Now You See It" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCWTFGPpNQ0Ms6afXhaWDiRw'
61 feed "Numberphile" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCoxcjq-8xIDTYp3uz647V5A'
62 feed "Philosophy Tube" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC2PA-AKmVpU6NKCGtZq_rKQ'
63 feed "PronunciationManual" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCqDSLtXeZsGc3dtVb5MW13g'
64 feed "Protesilaos Stavrou" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC0uTPqBCFIpZxlz_Lv1tk_g'
65 feed "RailCowGirl" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCj-Xm8j6WBgKY8OG7s9r2vQ'
66 feed "Simone Giertz" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC3KEoMzNz8eYnwBC34RaKCQ'
67 feed "Steve Yegge" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC2RCcnTltR3HMQOYVqwmweA'
68 feed "System Crafters" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCAiiOTio8Yu69c3XnR7nQBQ'
69 feed "Tasting History with Max Miller" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCsaGKqPZnGp_7N80hcHySGQ'
70 feed "Technology Connections" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCy0tKL1T7wFoYcxCe0xjN6Q'
71 feed "Too Many Zooz" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCtjXVqMVzBIgU0SO8AV0vPg'
72 feed "Townsends" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCxr2d4As312LulcajAkKJYw'
73 feed "Unitarian Church of Baton Rouge" 'https://www.youtube.com/feeds/videos.xml?channel_id=UClrqHvbiFM-1hn931ZmAPFw'
74 feed "Vulf" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCtWuB1D_E3mcyYThA9iKggQ'
75 feed "WFTDA: Women's Flat Track Derby Association" 'https://www.youtube.com/feeds/videos.xml?channel_id=UC7eMWpvytqd3gYAqxTl9w7g'
76 feed "Zach Anner" 'https://www.youtube.com/feeds/videos.xml?channel_id=UCPTVYxUoYWhNa8J7GzIGnyQ'
77}
78
79feeds_podcasts() {
80 feed "Tilde Whirl Tildeverse Podcast" 'https://tilde.town/~dozens/podcast/rss.xml'
81 feed "trash cat tech cat" 'https://podcast.librepunk.club/tctc/ogg.xml'
82 feed "Hacker Public Radio" 'https://hackerpublicradio.org/hpr_ogg_rss.php'
83}
84
85feeds_friends() {
86 # nihilazo
87 feed "lipu pi jan Niko" 'https://tilde.town/~nihilazo/index.xml'
88 # dozens
89 feed "chrismanbrown.gitlab.io (dozens)" 'https://chrismanbrown.gitlab.io/rss.xml'
90 feed "Dozens and Dragons" 'https://dozensanddragons.neocities.org/rss.xml'
91 feed "dozens: Society For Putting Things On Top Of Other Things" 'https://society.neocities.org/rss.xml'
92 feed "dozens: vgnfdblg" 'https://supervegan.neocities.org/feed.xml'
93 feed "dozens: backgammon" 'http://tilde.town/~dozens/backgammon/rss.xml'
94 feed "dozens: It's Pro Toad and Superb Owl" 'https://git.tilde.town/dozens/protoadandsuperbowl/raw/branch/master/feed.xml'
95 feed "dozens dreams" 'https://tilde.team/~dozens/dreams/rss.xml'
96 feed "dozens: write.as" 'https://write.tildeverse.org/dozens/feed/'
97 feed "dozens css art" 'http://tilde.town/~dozens/cssart/feed.xml'
98 ###
99 feed "Benjamin Wil" 'https://benjaminwil.info/feed.xml'
100 feed "(lambda (x) (create x))" 'http://lambdacreate.com/static/feed.rss'
101 feed "m455.casa" 'https://m455.casa/feed.rss'
102 feed "Oatmeal" 'https://eli.li/feed.rss'
103 feed "RSRSSS" 'https://envs.net/~lucidiot/rsrsss/feed.xml'
104 feed "Tomasino Blog" 'https://blog.tomasino.org/index.xml'
105 feed "Tomasino Labs" 'https://labs.tomasino.org/index.xml'
106 feed "Will's Blog" 'https://wflewis.com/feed.xml'
107 feed "Rick Carlino's Blog" 'https://rickcarlino.com/rss/feed.rss'
108 feed "Causal Agency" 'https://text.causal.agency/feed.atom'
109 feed "Benoit Joly" 'https://blog.benoitj.ca/posts/index.xml'
110 feed "p1k3::feed" 'https://p1k3.com/feed'
111 feed "linkbudz" 'https://linkbudz.m455.casa/feed.rss'
112 feed "Alex Schroeder" "https://alexschroeder.ch/wiki/feed/full/"
113 feed "Björn Wärmedal" "https://warmedal.se/~bjorn/atom.xml"
114 feed "a rickety bridge of impossible crossing" "https://bluelander.bearblog.dev/feed/"
115}
116
117feeds_comics() {
118 feed "Cat and Girl" 'https://catandgirl.com/feed/'
119 feed "Dinosaur Comics!" 'https://qwantz.com/rssfeed.php'
120 feed "False Knees" 'https://falseknees.tumblr.com/rss'
121 feed "Saturday Morning Breakfast Cereal" 'https://www.smbc-comics.com/comic/rss'
122 feed "xkcd" 'https://xkcd.com/atom.xml'
123}
124
125feeds_news() {
126 feed "tilde news: Private feed for acdw" \
127 'https://tilde.news/rss?token=FvdFj8rQkhrBy9j1yON1t6RYKDdcuG1MoUlyvRICmbgDGCf2JTWAEObDhdgt'
128 feed "Tildes Atom feed" 'https://tildes.net/topics.atom'
129 feed "NPR" "https://feeds.npr.org/1001/rss.xml"
130}
131
132feeds_me() {
133 :
134}
135
136feeds_smolweb() {
137 feed "~town friday postcard" 'https://tilde.town/~lucidiot/fridaypostcard.xml'
138 feed "Cosmic Voyage" 'https://cosmic.voyage/rss.xml'
139 feed "plan.cat" 'https://plan.cat/rss'
140}
141
142feeds_misc() {
143 feed "Crystalverse" 'https://crystalverse.com/feed/'
144 feed "Hetzner" 'https://status.hetzner.com/en.atom'
145 feed "LOW-TECH MAGAZINE" 'https://feeds2.feedburner.com/typepad/krisdedecker/lowtechmagazineenglish'
146}
147
148### Filter ######################################################
149
150filter() {
151 case "$1" in
152 # Filter items based on feed name.
153 *NPR*)
154 sed 's@www\.npr\.org@text.npr.org@'
155 ;;
156 *) cat ;;
157 esac |
158 filter_add_empties |
159 filter_html_entities |
160 filter_embed_youtube |
161 filter_filter_links
162}
163
164filter_add_empties() {
165 awk 'BEGIN{FS="\t";OFS=FS;}
166 { $2 = $2 ? $2 : "[empty]" }
167 { print $1,$2,$3,$4,$5,$6,$7,$8,$9; }
168'
169}
170
171filter_embed_youtube() {
172 # replace youtube links with embed links
173 sed 's@www.youtube.com/watch?v=@www.youtube.com/embed/@g'
174}
175
176filter_filter_links() {
177 # shorten feedburner links and strip tracking parameters and pixels
178 awk -F '\t' 'BEGIN { OFS = "\t"; }
179 function filterlink(s) {
180 # protocol must start with http, https or gopher.
181 if (match(s, /^(http|https|gopher):\/\//) == 0) {
182 return "";
183 }
184 # shorten feedburner links.
185 if (match(s, /^(http|https):\/\/[^\/]+\/~r\/.*\/~3\/[^\/]+\//)) {
186 s = substr($3, RSTART, RLENGTH);
187 }
188 # strip tracking parameters
189 # urchin, facebook, piwik, webtrekk and generic.
190 gsub(/\?(ad|campaign|fbclid|pk|tm|utm|wt)_([^&]+)/, "?", s);
191 gsub(/&(ad|campaign|fbclid|pk|tm|utm|wt)_([^&]+)/, "", s);
192 gsub(/\?&/, "?", s);
193 gsub(/[\?&]+$/, "", s);
194 return s
195 }
196 {
197 $3 = filterlink($3); # link
198 $8 = filterlink($8); # enclosure
199 # try to remove tracking pixels: <img/> tags with 1px width or height.
200 gsub("<img[^>]*(width|height)[[:space:]]*=[[:space:]]*[\"'"'"' ]?1[\"'"'"' ]?[^0-9>]+[^>]*>", "", $4);
201 print $0;
202 }'
203}
204
205filter_html_entities() {
206 # convert HTML entities into dumb counterparts
207 awk '{
208 gsub(/&#34;/,"\""); gsub(/&#x22;/,"\"");
209 gsub(/&#39;/,"'\''"); gsub(/&#x27;/,"'\''");
210 gsub(/&amp;/,"\\&"); # MUST BE LAST!;
211 print
212}'
213}
214
215# Fetch #########################################################
216
217fetch() { # fetch(name, url, feedfile)
218 # return
219 curl -s -L \
220 --max-redirs 3 \
221 --header "'User-Agent: $USER_AGENT'" \
222 --fail \
223 --max-time 15 \
224 "$2"
225}
diff --git a/style.css b/style.css new file mode 100644 index 0000000..0fa9d6b --- /dev/null +++ b/style.css
@@ -0,0 +1,176 @@
1body {
2 background: #888;
3}
4
5main {
6 position: relative;
7 display: flex;
8 flex: column nowrap;
9}
10
11#items {
12 max-width: 70ch;
13}
14
15.entries tr {
16 vertical-align: baseline;
17}
18
19.fresh {
20 font-weight: bold;
21}
22
23.entry-timestamp {
24 font-family: monospace;
25 padding-right: 8px;
26}
27
28#list {
29 padding: 2ch;
30}
31
32aside {
33 flex-grow: 1;
34}
35
36aside li {
37 list-style: none;
38 text-align: right;
39}
40
41aside li:nth-child(even),
42tr:nth-child(even)
43{
44 background: inherit;
45}
46
47aside li:nth-child(odd),
48tr:nth-child(odd)
49{
50 background: #aaa;
51}
52
53a { display: block; }
54
55a:link {
56 text-decoration: none;
57 color: black;
58}
59a:visited {
60 font-style: italic;
61 color: inherit;
62}
63aside a:visited { font-style: normal; }
64a:hover {
65 background: yellow;
66}
67a:active{
68 background: cyan;
69}
70
71header {
72 display: flex;
73 flex: row wrap;
74 margin: 1ch 0;
75 align-items: baseline;
76 justify-content: space-between;
77}
78header h2, header h1 {
79 margin: 0;
80}
81header h2 a { display: inline; }
82header .top {
83 font-size: 80%;
84 text-align: right;
85 flex-grow: 1;
86}
87
88body>nav {
89 border-bottom: 1px solid black;
90 margin-top: 0;
91 text-align: right;
92}
93body>nav a { display: inline; }
94
95.last-updated {
96 font-style: italic;
97 font-size: 80%;
98 text-align: right;
99}
100
101.entries { border-collapse: collapse; }
102
103.entries, .entry-title { width: 100%; }
104.entry-extra a { color: blue; display: inline; }
105
106header a { display: inline; }
107
108@media screen and (max-width: 720px) {
109 main, header {
110 flex-flow: row wrap;
111 padding:0; margin: 0;
112 }
113 * { border: none; }
114 .entries, .entry-title { width: 100%; }
115 html, body, #list, #items { padding: 0; margin: 0; max-width: 100%; }
116 header h2, header p { margin: 4px; }
117 aside a { display: inline; }
118 aside {
119 padding: 2ch;
120 order: 1;
121 border-bottom: 1px solid black;
122 }
123 aside ul { margin: 0; padding: 0; }
124 #list { order: 2; }
125 aside li {
126 display: inline;
127 background: inherit !important;
128 }
129 aside li::after {
130 background: inherit;
131 font-weight: normal;
132 content: " //";
133 }
134 aside li:last-child::after {
135 content: "";
136 }
137 .entry-timestamp {
138 display: none;
139 }
140}
141
142@media (prefers-color-scheme: light) {
143 body { background: white; color: black; }
144 aside li:nth-child(even),
145 tr:nth-child(even) { background: white; }
146 aside li:nth-child(odd),
147 tr:nth-child(odd) { background: #eee; }
148 a:link { color: black; }
149 a:hover { background: yellow; }
150 a:active { background: cyan; }
151 .entry-extra a { color: blue; }
152 body>nav { border-bottom: 1px solid black; }
153}
154
155@media (prefers-color-scheme: dark) {
156 body { background: black; color: white; }
157 aside li:nth-child(even),
158 tr:nth-child(even) { background: black; }
159 aside li:nth-child(odd),
160 tr:nth-child(odd) { background: #222; }
161 a:link { color: white; }
162 a:hover { color: yellow; background: inherit; }
163 a:active { color: cyan; background: inherit; }
164 .entry-extra a { color: cyan; }
165 body>nav { border-bottom: 1px solid white; }
166}
167
168@media screen and (max-width: 720px) and (prefers-color-scheme: dark) {
169 aside { border-bottom: 1px solid white; }
170 aside li { background: black !important; }
171}
172
173@media screen and (max-width: 720px) and (prefers-color-scheme: light) {
174 aside { border-bottom: 1px solid black; }
175 aside li { background: white !important; }
176}