Compare commits

..

14 commits

Author SHA1 Message Date
1244ace745 change style and timezone
All checks were successful
/ deploy (push) Successful in 11s
2026-02-25 17:03:49 +01:00
08573a0b59 output not public
All checks were successful
/ deploy (push) Successful in 11s
2026-02-25 16:29:54 +01:00
f80ae93ab6 update deploy
Some checks failed
/ deploy (push) Failing after 10s
2026-02-25 16:29:15 +01:00
b6e4f23389 update to a fork
Some checks failed
/ deploy (push) Failing after 8s
2026-02-25 16:27:11 +01:00
9065ee59f4 spaces
All checks were successful
/ deploy (push) Successful in 15s
2026-02-25 15:51:57 +01:00
be59c826bd comment
Some checks failed
/ deploy (push) Failing after 16s
2026-02-25 15:50:11 +01:00
d3806751c9 deploy good path ?
Some checks failed
/ deploy (push) Failing after 16s
2026-02-25 15:44:21 +01:00
d8b7053fb5 depolouych anges
All checks were successful
/ deploy (push) Successful in 15s
2026-02-25 15:41:02 +01:00
941c1abf5b workdir
All checks were successful
/ deploy (push) Successful in 15s
2026-02-25 15:25:40 +01:00
b83efcbfde rebuild
Some checks failed
/ deploy (push) Failing after 15s
2026-02-25 15:24:35 +01:00
9fdb414160 install rsync in deploy
Some checks failed
/ deploy (push) Failing after 15s
2026-02-25 15:17:11 +01:00
8c72cafc90 add my feeds
Some checks failed
/ deploy (push) Failing after 15s
2026-02-25 15:05:08 +01:00
George Mandis
d9857c501e
Removing Glitch (RIP) (#26) 2026-01-10 06:09:19 -05:00
kmfd
dbfe46a05d
Update README.md (#23) 2025-01-20 05:21:35 -05:00
26 changed files with 700 additions and 3790 deletions

View file

@ -1,50 +0,0 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module"
},
"plugins": [
"@typescript-eslint"
],
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"double"
],
"semi": [
"error",
"always"
],
"no-trailing-spaces": [
2,
{
"skipBlankLines": false
}
],
"no-multiple-empty-lines": [
"error",
{
"max": 2,
"maxEOF": 1
}
],
"@typescript-eslint/no-var-requires": 0
}
}

View file

@ -0,0 +1,32 @@
on:
push:
branches:
- main
schedule:
- cron: '0 * * * *' # every hour
jobs:
deploy:
runs-on: ubuntu-latest
container:
volumes:
- /var/www/bubo:/var/www/bubo
steps:
- uses: actions/checkout@v4
- name: Install dependencies
run: npm install
- name: Install rsync
run: apt-get update && apt-get install -y rsync
- name: Build Bubo
run: npm run build
- name: List built files
run: ls -la ./output/
- name: Deploy to web root
run: |
rsync -avz --delete --exclude='.git' --exclude='.forgejo' \
./output/ /var/www/bubo/
chmod -R 755 /var/www/bubo

12
.github/FUNDING.yml vendored
View file

@ -1,12 +0,0 @@
# These are supported funding model platforms
github: georgemandis
patreon: #
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: #

View file

@ -1,38 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View file

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

6
.gitignore vendored
View file

@ -1,4 +1,4 @@
node_modules/* node_modules/*
public/index.html output/index.html
dist/* cache.json
.DS_Store data.json

1
.node-version Normal file
View file

@ -0,0 +1 @@
v18.15.0

1
.nvmrc
View file

@ -1 +0,0 @@
18.12.1

21
LICENSE
View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 George Mandis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

148
README.md
View file

@ -1,137 +1,37 @@
[![Netlify Status](https://api.netlify.com/api/v1/badges/81dd219c-51cb-4418-a18c-42c8b104c689/deploy-status)](https://app.netlify.com/sites/bubo-rss-demo/deploys) # 🦉 Bubo Reader (Fork)
# 🦉 Bubo Reader ![screenshot](./demo.png)
Bubo Reader is a hyper-minimalist feed reader (RSS, Atom, JSON) you can deploy on your own server, [Netlify](https://netlify.com) in a few steps or [Glitch](https://glitch.com) in even fewer steps! The goal of the project is to generate a webpage that shows a list of links from a collection of feeds organized by category and website. That's it. [Demo Site](https://kevinfiol.com/rss-reader/)
It is named after this [silly robot owl](https://www.youtube.com/watch?v=MYSeCfo9-NI) from Clash of the Titans (1981). This is a personal fork of the excellent [Bubo Reader](https://github.com/georgemandis/bubo-rss) by George Mandis. I've made several opinionated changes to the setup, including replacing dependencies with more compact alternatives. Please see below for deployment instructions.
You can read more about this project on my blog: Original blogpost: [Introducing Bubo RSS: An Absurdly Minimalist RSS Feed Reader](https://george.mand.is/2019/11/introducing-bubo-rss-an-absurdly-minimalist-rss-feed-reader/)
- [Introducing Bubo RSS: An Absurdly Minimalist RSS Feed Reader](https://george.mand.is/2019/11/introducing-bubo-rss-an-absurdly-minimalist-rss-feed-reader/). Blogpost about my fork: [A minimal RSS Feed Reader](https://kevinfiol.com/blog/a-minimal-rss-feed-reader/)
- [Publishing Bubos RSS to Netlify with GitHub Actions](https://george.mand.is/2020/02/publishing-bubos-rss-to-netlify-with-github-actions/)
## Get Started Some changes I made:
- Clone or fork the repo and run `npm install` to install the dependencies. * Replace `nunjucks` with template strings (less dependencies for shorter build times)
- Update `feeds.json` to include categories and links to feeds you would like to see. * Replace `node-fetch` with Node's native `fetch`
- Run `npm run build:bubo` * Many styling changes, including using the `:target` CSS selector to switch between groups (inspired by https://john-doe.neocities.org/)
* The build script now sorts the feeds in each group by which one has the latest updates (this greatly improves the experience, imo)
* An "All Articles" view
* Privacy-redirect support via config file
* Dark mode via `@media (prefers-color-scheme: dark)`
That's it! You should now have a static page with links to the latest content from your feeds in the `public` folder, ready to serve. ## How to build
<details> Node `>=18.x` required.
<summary>
<strong>Anatomy of Bubo Reader</strong>
</summary>
The static pieces: ```shell
npm install
- `conf/feeds.json` - a JSON file containing your feed URLS separated into categories. npm run build
- `config/template.html` - a [Nunjucks](https://mozilla.github.io/nunjucks/) template that lets you change how the feeds are displayed. This can be changed to anything else you like— see below.
- `public/style.css` - a CSS file to stylize your feed output.
- `public/index.html` - The HTML file that gets automatically generated when Bubo is run.
The engine:
- `src/index.ts` - The primary script you run when you want to build a new version of Bubo. It will automatically fetch the latest content from your feeds and build a new static file at `public/index.html`.
- `src/renderer.ts` — The renderer that loads Nunjucks, the template and understands how to process the incoming feed data. Prefer something else? This is the place to change it!
- `src/utilities.ts` — A variety of parsing and normalization utilities for Bubo, hidden away to try and keep things clean.
</details>
<details>
<summary>
<strong>Throttling</strong>
</summary>
In the main `index.ts` file you will find two values that allow you to batch and throttle your feed requests:
- `MAX_CONNECTIONS` dictates the maximum number of requests a batch can have going at once.
- `DELAY_MS` dictates the amount of delay time between each batch.
The default configuration is **no batching or throttling** because `MAX_CONNECTIONS` is set to `Infinity`. If you wanted to change Bubo to only fetch one feed at a time every second you could set these values to:
```javascript
const MAX_CONNECTIONS = 1;
const DELAY_MS = 1000;
``` ```
If you wanted to limit things to 10 simultaneous requests every 2.5 seconds you could set it like so: ## How to host on Github Pages
```javascript 1. Fork this repo!
const MAX_CONNECTIONS = 10; 2. Enable [Github Pages](https://pages.github.com/) for your repo (either as a project site, or user site)
const DELAY_MS = 2500; 3. Configure `.github/workflows/build.yml` to your liking
``` * Uncomment the `schedule` section to enable scheduled builds
In practice, I've never _really_ run into an issue leaving `MAX_CONNECTIONS` set to `Infinity` but this feels like a sensible safeguard to design.
</details>
<details>
<summary>
<strong>Getting Started</strong>
</summary>
- [Deploying to Glitch](#glitch)
- [Deploying to Netlify](#netlify)
- [Keeping feeds updated](#updated)
<a id="glitch"></a>
## Deploying to Glitch
The quickest way is to remix the project on Glitch:
[https://glitch.com/edit/#!/bubo-rss](https://glitch.com/edit/#!/bubo-rss)
There is also a `glitch` branch on this repo if you'd prefer to start there.
Just change some feeds in `./config/feeds.json` file and you're set! If you'd like to modify the style or the template you can changed `./public/style.css` file or the `./config/template.html` file respectively.
<a id="netlify"></a>
## Deploying to Netlify
- [Fork the repository](https://github.com/georgemandis/bubo-rss/fork)
- From your forked repository edit `config/feeds.json` to manage your feeds and categories
- [Create a new site](https://app.netlify.com/start) on Netlify from GitHub
The deploy settings should automatically import from the `netlify.toml` file. All you'll need to do is confirm and you're ready to go!
<a id="updated"></a>
### Keeping Feeds Updated
#### Using Netlify Webhooks
To keep your feeds up to date you'll want to [setup a Build Hook](https://www.netlify.com/docs/webhooks/#incoming-webhooks) for your Netlify site and use another service to ping it every so often to trigger a rebuild. I'd suggest looking into:
- [IFTTT](https://ifttt.com/)
- [Zapier](https://zapier.com/)
- [EasyCron](https://www.easycron.com/)
#### Rolling Your Own
If you already have a server running Linux and some command-line experience it might be simpler to setup a [cron job](https://en.wikipedia.org/wiki/Cron).
</details>
## Demos
You can view live demos here:
- [https://bubo-rss-demo.netlify.com/](https://bubo-rss-demo.netlify.com/)
- [http://bubo-rss.glitch.me/](http://bubo-rss.glitch.me/)
## Support
If you found this useful please consider [sponsoring me or this project](https://github.com/sponsors/georgemandis).
If you'd rather run this on your own server please consider using one of these affiliate links to setup a micro instance on [Linode](https://www.linode.com/?r=8729957ab02b50a695dcea12a5ca55570979d8b9), [Digital Ocean](https://m.do.co/c/31f58d367777) or [Vultr](https://www.vultr.com/?ref=8403978).
## Showcase
Here are some websites using Bubo Reader:
- [Kevin Fiol](https://kevinfiol.com/reader/) ([repo](https://github.com/kevinfiol/reader))
Please share if you would like to be featured!

View file

@ -1,23 +0,0 @@
{
"Developer News": [
"https://hacks.mozilla.org/feed/",
"https://web.dev/feed.xml",
"https://v8.dev/blog.atom",
"https://alistapart.com/main/feed/",
"https://css-tricks.com/feed/",
"https://dev.to/feed",
"https://changelog.com/feed"
],
"Blogs": [
"https://george.mand.is/feed.xml",
"https://joy.recurse.com/feed.atom"
],
"My GitHub Projects": [
"https://github.com/georgemandis.atom",
"https://github.com/georgemandis/bubo-rss/releases.atom",
"https://github.com/georgemandis/konami-js/releases.atom",
"https://github.com/georgemandis/konami-js/commits/main.atom",
"https://github.com/javascriptforartists/cheer-me-up-and-sing-me-a-song/commits/master.atom",
"https://github.com/georgemandis/circuit-playground-midi-multi-tool/commits/master.atom"
]
}

View file

@ -1,56 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta http-equiv="X-UA-Compatible" content="ie=edge" />
<title>🦉 Bubo Reader</title>
<link rel="stylesheet" href="/style.css" />
</head>
<body>
<h1>🦉 Bubo Reader</h1>
{% for group, feeds in data %}
<h2>{{ group }}</h2>
{% for feed in feeds %}
<details>
<summary>
<span class="feed-title">{{ feed.title }}</span>
<span class="feed-url">({{ feed.feed }})</span>
</summary>
<ul>
{% for item in feed.items %}
<li>
{{ item.timestamp | formatDate }} -
<a
href="{{ item.link }}"
target="_blank"
rel="noopener norefferer nofollow"
>{{ item.title }}</a
>
</li>
{% endfor %}
</ul>
</details>
{% endfor %} {% endfor %} {% if errors | length > 0 %}
<h2>Errors</h2>
<p>There were errors trying to parse these feeds:</p>
<ul>
{% for error in errors %}
<li>{{ error }}</li>
{% endfor %}
</ul>
{% endif %}
<br />
<hr />
<p>Last updated {{ now }}.</p>
<p>
Powered by
<a href="https://github.com/georgemandis/bubo-rss"
>Bubo Reader (v{{ info.version }})</a
>, a project by <a href="https://george.mand.is">George Mandis</a>. ❤️
<a href="{{ info.funding.url }}">Sponsor on GitHub</a>
</p>
</body>
</html>

View file

@ -1,4 +0,0 @@
[build]
command = "npm run build:bubo"
publish = "./public/"

276
output/style.css Normal file
View file

@ -0,0 +1,276 @@
:root {
--font-size: 14px;
--color: #111;
--bg-color: #fffff8;
--faded-bg: #f9f9f2;
--title-font-size: 16px;
--title-font-weight: 600;
--main-padding-right: 8rem;
}
@media (prefers-color-scheme: dark) {
:root {
--color: #ddd;
--bg-color: #151515;
--faded-bg: #1b1b1b;
}
}
@media screen and (max-width: 900px) {
:root {
--main-padding-right: 0;
}
article.item {
margin-right: 0 !important;
margin-left: 0 !important;
}
.menu-label,
.menu-btn {
display: block !important;
position: absolute;
top: 0;
right: 0;
z-index: 99;
}
.menu-btn {
display: none !important;
}
.menu-label {
padding: 1rem 2rem;
background-color: var(--faded-bg);
}
.menu-label::after {
content: 'groups';
}
.menu-btn:checked ~ .sidebar {
display: block !important;
}
.menu-btn:checked ~ main {
display: none !important;
}
.menu-btn:checked ~ .menu-label::after {
content: 'back';
}
.sidebar {
display: none !important;
padding: 1rem;
position: absolute;
top: 0;
left: 5px;
height: 100%;
background-color: var(--bg-color);
}
.sidebar
> header
> .group-selector {
list-style: none;
padding: 0;
}
.sidebar
> header
> .group-selector
> li {
font-size: 1.2em;
}
}
@keyframes details-show {
from {
opacity:0;
transform: var(--details-translate, translateY(-0.5em));
}
}
body {
color: var(--color);
background-color: var(--bg-color);
margin: 0;
padding: 0;
font-family: serif;
font-size: var(--font-size);
overflow: hidden;
}
details[open] > *:not(summary),
section {
animation: details-show 100ms ease-in-out;
}
h1, h2, h3 {
font-family: monospace;
}
a:link {
color: inherit;
}
a:visited {
color: #b58c8c;
}
a:hover {
opacity: .75;
}
summary {
position: sticky;
top: 0;
padding-top: 0.65rem;
padding-bottom: 0.65rem;
user-select: none;
cursor:pointer;
font-family: monospace;
background-color: var(--bg-color);
}
summary:hover span,
summary:hover div {
opacity:.75;
}
.menu-btn,
.menu-label {
display: none;
}
.group-selector a,
.group-selector a:visited {
color: inherit;
font-family: monospace;
line-height: 1.5em;
}
.feed-title {
font-weight: var(--title-font-weight);
font-size: var(--title-font-size);
}
.feed-url, .feed-timestamp {
color:#aaa;
}
.feed-url {
/**/
}
.feed-timestamp {
margin-left: 1.45rem;
}
.monospace {
font-family: monospace;
}
.inline {
display: inline;
}
.app {
display: flex;
gap: 2rem;
padding: 0 0rem 1rem 1rem;
}
.sidebar {
flex: 1;
display: flex;
flex-direction: column;
justify-content: space-between;
}
header {
padding-top: 1rem;
}
main {
flex: 5;
height: 100vh;
overflow-y: auto;
padding-right: var(--main-padding-right);
}
article.item {
max-width: 85%;
padding: 0.15rem 0.75rem;
margin-left: 1.5rem;
margin-right: 1.5rem;
margin-top: 0.5rem;
margin-bottom: 0.5rem;
background-color: var(--faded-bg);
}
#all-articles > article.item {
margin-left: 0;
}
article.item header.item__header {
font-size: var(--title-font-size);
}
.item__feed-url {
opacity: 0.25;
}
ul.article-links {
list-style: none;
padding-left: 0;
}
ul.article-links > li {
display: inline-block;
margin-right: .5rem;
}
footer {
opacity: 0.25;
font-size: 0.75em;
}
footer:hover {
opacity: 1;
}
section {
z-index: 1;
/* ! Everything below is needed ! */
display: none;
height: 100%;
width: 100%;
background-color: var(--bg-color);
}
section > h2 {
margin-top: 0;
padding-top: 19px;
}
section:target { /* Show section */
display: block;
}
section:target ~ .default-text {
display: none;
}
.default-text {
text-align: center;
position: relative;
top: 5%;
font-family: monospace;
font-size: 2em;
}

3036
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,45 +1,19 @@
{ {
"name": "bubo-reader", "name": "reader",
"version": "2.0.2", "version": "1.0.0",
"description": "A simple but effective feed reader (RSS, JSON)", "description": "A somewhat dumb but effective feed reader (RSS, JSON & Twitter)",
"homepage": "https://github.com/georgemandis/bubo-rss",
"main": "src/index.js",
"type": "module", "type": "module",
"engines": {
"node": ">=18.x"
},
"scripts": { "scripts": {
"dev": "tsc --watch", "build": "node src/build.js",
"clean": "rm -rf dist", "write": "node src/build.js --write",
"build": "tsc", "cached": "node src/build.js --cached"
"bubo": "node dist/index.js",
"build:bubo": "tsc && node dist/index.js"
}, },
"author": { "author": "kevinfiol",
"name": "George Mandis", "license": "ISC",
"email": "george@mand.is",
"url": "https://george.mand.is"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/georgemandis"
},
"bugs": {
"url": "https://github.com/georgemandis/bubo-rss/issues",
"email": "george+bubo@mand.is"
},
"license": "MIT",
"dependencies": { "dependencies": {
"chalk": "^5.2.0", "rss-parser": "^3.6.3"
"node-fetch": "^3.3.1",
"nunjucks": "^3.2.4",
"rss-parser": "^3.13.0"
},
"devDependencies": {
"@types/node": "^20.2.5",
"@types/nunjucks": "^3.2.2",
"@types/xml2js": "^0.4.11",
"@typescript-eslint/eslint-plugin": "^5.59.8",
"@typescript-eslint/parser": "^5.59.8",
"eslint": "^8.42.0",
"tslib": "^2.5.3",
"typescript": "^5.1.3"
} }
} }

View file

@ -1,28 +0,0 @@
body {
font-family: system-ui;
font-size: 18px;
}
details:focus,
details:focus-within,
details:hover {
/* background:#ffeb3b; */
/* outline:2px #000 solid; */
}
details ul li {}
summary {
cursor: pointer;
}
summary:hover {
opacity: .75;
}
.feed-url {
color: #aaa;
}

15
src/@types/bubo.d.ts vendored
View file

@ -1,15 +0,0 @@
export interface Feeds {
[key: string]: object[]
}
export interface FeedItem {
[key: string]: string | number | Date | FeedItem[];
items: FeedItem[]
}
//NEW WAY
export type JSONValue =
| string
| number
| boolean
| { [x: string]: JSONValue }
| Array<JSONValue>;

208
src/build.js Normal file
View file

@ -0,0 +1,208 @@
/**
* 🦉 Bubo RSS Reader
* ====
* Dead, dead simple feed reader that renders an HTML
* page with links to content from feeds organized by site
*
*/
import Parser from 'rss-parser';
import { resolve } from 'node:path';
import { readFileSync, writeFileSync } from 'node:fs';
import { template } from './template.js';
const WRITE = process.argv.includes('--write');
const USE_CACHE = !WRITE && process.argv.includes('--cached');
const CACHE_PATH = './src/cache.json';
const OUTFILE_PATH = './output/index.html';
const CONTENT_TYPES = [
'application/json',
'application/atom+xml',
'application/rss+xml',
'application/xml',
'application/octet-stream',
'text/xml'
];
const config = readCfg('./src/config.json');
const feeds = USE_CACHE ? {} : readCfg('./src/feeds.json');
const cache = USE_CACHE ? readCfg(CACHE_PATH) : {};
await build({ config, feeds, cache, writeCache: WRITE });
async function build({ config, feeds, cache, writeCache = false }) {
let allItems = cache.allItems || [];
const parser = new Parser();
const errors = [];
const groupContents = {};
for (const groupName in feeds) {
groupContents[groupName] = [];
const results = await Promise.allSettled(
Object.values(feeds[groupName]).map(url =>
fetch(url, { method: 'GET' })
.then(res => [url, res])
.catch(e => {
throw [url, e];
})
)
);
for (const result of results) {
if (result.status === 'rejected') {
const [url, error] = result.reason;
errors.push(url);
console.error(`Error fetching ${url}:\n`, error);
continue;
}
const [url, response] = result.value;
try {
// e.g., `application/xml; charset=utf-8` -> `application/xml`
const contentType = response.headers.get('content-type').split(';')[0];
if (!CONTENT_TYPES.includes(contentType))
throw Error(`Feed at ${url} has invalid content-type.`)
const body = await response.text();
const contents = typeof body === 'string'
? await parser.parseString(body)
: body;
const isRedditRSS = contents.feedUrl && contents.feedUrl.includes("reddit.com/r/");
if (!contents.items.length === 0)
throw Error(`Feed at ${url} contains no items.`)
contents.feed = url;
contents.title = contents.title || contents.link;
groupContents[groupName].push(contents);
// item sort & normalization
contents.items.sort(byDateSort);
contents.items.forEach((item) => {
// 1. try to normalize date attribute naming
const dateAttr = item.pubDate || item.isoDate || item.date || item.published;
item.timestamp = new Date(dateAttr).toLocaleDateString();
// 2. correct link url if it lacks the hostname
if (item.link && item.link.split('http').length === 1) {
item.link =
// if the hostname ends with a /, and the item link begins with a /
contents.link.slice(-1) === '/' && item.link.slice(0, 1) === '/'
? contents.link + item.link.slice(1)
: contents.link + item.link;
}
// 3. parse subreddit feed comments
if (isRedditRSS && item.contentSnippet && item.contentSnippet.startsWith('submitted by ')) {
// matches anything between double quotes, like `<a href="matches this">foo</a>`
const quotesContentMatch = /(?<=")(?:\\.|[^"\\])*(?=")/g;
let [_submittedBy, _userLink, contentLink, commentsLink] = item.content.split('<a href=');
item.link = contentLink.match(quotesContentMatch)[0];
item.comments = commentsLink.match(quotesContentMatch)[0];
}
// 4. redirects
if (config.redirects) {
// need to parse hostname methodically due to unreliable feeds
const url = new URL(item.link);
const tokens = url.hostname.split('.');
const host = tokens[tokens.length - 2];
const redirect = config.redirects[host];
if (redirect) item.link = `https://${redirect}${url.pathname}${url.search}`;
}
// 5. escape html in titles
item.title = escapeHtml(item.title);
});
// add to allItems
allItems = [...allItems, ...contents.items];
} catch (e) {
console.error(e);
errors.push(url)
}
}
}
const groups = cache.groups || Object.entries(groupContents);
if (writeCache) {
writeFileSync(
resolve(CACHE_PATH),
JSON.stringify({ groups, allItems }),
'utf8'
);
}
// for each group, sort the feeds
// sort the feeds by comparing the isoDate of the first items of each feed
groups.forEach(([_groupName, feeds]) => {
feeds.sort((a, b) => byDateSort(a.items[0], b.items[0]));
});
// sort `all articles` view
allItems.sort((a, b) => byDateSort(a, b));
const now = getNowDate(config.timezone_offset).toString();
const html = template({ allItems, groups, now, errors });
writeFileSync(resolve(OUTFILE_PATH), html, { encoding: 'utf8' });
console.log(`Reader built successfully at: ${OUTFILE_PATH}`);
}
/**
* utils
*/
function parseDate(item) {
let date = item
? (item.isoDate || item.pubDate)
: undefined;
return date ? new Date(date) : undefined;
}
function byDateSort(dateStrA, dateStrB) {
const [aDate, bDate] = [parseDate(dateStrA), parseDate(dateStrB)];
if (!aDate || !bDate) return 0;
return bDate - aDate;
}
function getNowDate(offset = 0) {
let d = new Date();
const utc = d.getTime() + (d.getTimezoneOffset() * 60000);
d = new Date(utc + (3600000 * offset));
return d;
}
function readCfg(path) {
let contents, json;
try {
contents = readFileSync(resolve(path), { encoding: 'utf8' });
} catch (e) {
console.warn(`Warning: Config at ${path} does not exist`);
return {};
}
try {
json = JSON.parse(contents);
} catch (e) {
console.error('Error: Config is Invalid JSON: ' + path);
process.exit(1);
}
return json;
}
function escapeHtml(html) {
return html.replaceAll('&', '&amp;')
.replaceAll('<', '&lt;')
.replaceAll('>', '&gt;')
.replaceAll('\'', '&apos;')
.replaceAll('"', '&quot;');
}

6
src/config.json Normal file
View file

@ -0,0 +1,6 @@
{
"timezone_offset": 1.0,
"redirects": {
"medium": "scribe.rip"
}
}

13
src/feeds.json Normal file
View file

@ -0,0 +1,13 @@
{
"feeds": [
"https://la-poterie-des-chemins-creux.fr/feed/"
],
"blogs": [
"https://drewdevault.com/blog/index.xml",
"https://maia.crimew.gay/feed.xml",
"https://george.mand.is/feed.xml",
"https://invisibleup.com/atom.xml",
"https://www.wheresyoured.at/rss/",
"https://solar.lowtechmagazine.com/fr/posts/index.xml"
]
}

View file

@ -1,172 +0,0 @@
/*
* 🦉 Bubo Reader
* ====
* Dead simple feed reader (RSS + JSON) that renders an HTML
* page with links to content from feeds organized by site
*
* Code: https://github.com/georgemandis/bubo-rss
* Copyright (c) 2019 George Mandis (https://george.mand.is)
* Version: 1.0.1 (11/14/2021)
* Licensed under the MIT License (http://opensource.org/licenses/MIT)
*/
import fetch from "node-fetch";
import Parser from "rss-parser";
import { Feeds, FeedItem } from "./@types/bubo";
import { Response } from "node-fetch";
import { render } from "./renderer.js";
import {
getLink,
getTitle,
getTimestamp,
parseFeed,
getFeedList,
getBuboInfo
} from "./utilities.js";
import { writeFile } from "fs/promises";
import chalk from "chalk";
const buboInfo = await getBuboInfo();
const parser = new Parser();
const feedList = await getFeedList();
const feedListLength =
Object.entries(feedList).flat(2).length - Object.keys(feedList).length;
/**
* contentFromAllFeeds = Contains normalized, aggregated feed data and is passed to template renderer at the end
* errors = Contains errors from parsing feeds and is also passed to template.
*/
const contentFromAllFeeds: Feeds = {};
const errors: unknown[] = [];
// benchmarking data + utility
const initTime = Date.now();
const benchmark = (startTime: number) =>
chalk.cyanBright.bold(`${(Date.now() - startTime) / 1000} seconds`);
/**
* These values are used to control throttling/batching the fetches:
* - MAX_CONNECTION = max number of fetches to contain in a batch
* - DELAY_MS = the delay in milliseconds between batches
*/
const MAX_CONNECTIONS = Infinity;
const DELAY_MS = 850;
const error = chalk.bold.red;
const success = chalk.bold.green;
// keeping tally of total feeds fetched and parsed so we can compare
// to feedListLength and know when we're finished.
let completed = 0;
/**
* finishBuild
* --
* function that gets called when all the feeds are through fetching
* and we want to build the static output.
*/
const finishBuild: () => void = async () => {
completed++;
// if this isn't the last feed, just return early
if (completed !== feedListLength) return;
process.stdout.write("\nDone fetching everything!\n");
// generate the static HTML output from our template renderer
const output = render({
data: contentFromAllFeeds,
errors: errors,
info: buboInfo
});
// write the output to public/index.html
await writeFile("./public/index.html", output);
process.stdout.write(
`\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark(
initTime
)}\n- ${errors.length} errors\n`
);
};
/**
* processFeed
* --
* Process an individual feed and normalize its items
* @param { group, feed, startTime}
* @returns Promise<void>
*/
const processFeed =
({
group,
feed,
startTime
}: {
group: string;
feed: string;
startTime: number;
}) =>
async (response: Response): Promise<void> => {
const body = await parseFeed(response);
//skip to the next one if this didn't work out
if (!body) return;
try {
const contents: FeedItem = (
typeof body === "string" ? await parser.parseString(body) : body
) as FeedItem;
contents.feed = feed;
contents.title = getTitle(contents);
contents.link = getLink(contents);
// try to normalize date attribute naming
contents?.items?.forEach(item => {
item.timestamp = getTimestamp(item);
item.title = getTitle(item);
item.link = getLink(item);
});
contentFromAllFeeds[group].push(contents as object);
process.stdout.write(
`${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n`
);
} catch (err) {
process.stdout.write(
`${error("Error processing:")} ${feed} - ${benchmark(
startTime
)}\n${err}\n`
);
errors.push(`Error processing: ${feed}\n\t${err}`);
}
finishBuild();
};
// go through each group of feeds and process
const processFeeds = () => {
let idx = 0;
for (const [group, feeds] of Object.entries(feedList)) {
contentFromAllFeeds[group] = [];
for (const feed of feeds) {
const startTime = Date.now();
setTimeout(() => {
process.stdout.write(`Fetching: ${feed}...\n`);
fetch(feed)
.then(processFeed({ group, feed, startTime }))
.catch(err => {
process.stdout.write(
error(`Error fetching ${feed} ${benchmark(startTime)}\n`)
);
errors.push(`Error fetching ${feed} ${err.toString()}\n`);
finishBuild();
});
}, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS);
idx++;
}
}
};
processFeeds();

View file

@ -1,44 +0,0 @@
/*
* Return our renderer.
* Using Nunjucks out of the box.
* https://mozilla.github.io/nunjucks/
*/
import nunjucks from "nunjucks";
const env: nunjucks.Environment = nunjucks.configure({ autoescape: true });
import { readFile } from "fs/promises";
import { Feeds, JSONValue } from "./@types/bubo";
/**
* Global filters for my Nunjucks templates
*/
env.addFilter("formatDate", function (dateString): string {
const date: Date = new Date(parseInt(dateString));
return !isNaN(date.getTime()) ? date.toLocaleDateString() : dateString;
});
env.addGlobal("now", new Date().toUTCString());
// load the template
const template: string = (
await readFile(new URL("../config/template.html", import.meta.url))
).toString();
// generate the static HTML output from our template renderer
const render = ({
data,
errors,
info
}: {
data: Feeds;
errors: unknown[];
info?: JSONValue;
}) => {
return env.renderString(template, {
data,
errors,
info
});
};
export { render };

110
src/template.js Normal file
View file

@ -0,0 +1,110 @@
const forEach = (arr, fn) => {
let str = '';
arr.forEach(i => str += fn(i) || '');
return str;
};
const article = (item) => `
<article class="item">
<header class="item__header">
<a href="${item.link}" target='_blank' rel='noopener norefferer nofollow'>
${item.title}
</a>
</header>
<small>
${item.feedUrl ? `<span class="item__feed-url monospace">${item.feedUrl}</span>` : ''}
${item.feedTitle ? `<span class="item__feed-title">${item.feedTitle}</span>` : ''}
<ul class="article-links">
<li class="monospace">${item.timestamp || ''}</li>
${item.comments ? `
<li><a href="${item.comments}" target='_blank' rel='noopener norefferer nofollow'>comments</a></li>
` : ''
}
</ul>
</small>
</article>
`;
export const template = ({ allItems, groups, errors, now }) => (`
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>rss reader</title>
<link rel="stylesheet" href="./style.css">
</head>
<body>
<div class="app">
<input type="checkbox" class="menu-btn" id="menu-btn" />
<label class="menu-label" for="menu-btn"></label>
<div class="sidebar">
<header>
<h1 class="inline" style="user-select: none;">rss</h1>
<ul class="group-selector">
<li><a href="#all-articles">all articles</a></li>
${forEach(groups, group => `
<li><a href="#${group[0]}">${group[0]}</a></li>
`)}
</ul>
</header>
<footer>
${errors.length > 0 ? `
<h2>Errors</h2>
<p>There were errors trying to parse these feeds:</p>
<ul>
${forEach(errors, error => `
<li>${error}</li>
`)}
</ul>
` : ''
}
<p>
Last updated ${now}. Powered by <a href="https://github.com/kevinfiol/rss-reader">Bubo Reader</a>, a project by <a href="https://george.mand.is">George Mandis</a> and <a href="https://kevinfiol.com">Kevin Fiol</a>.
</p>
</footer>
</div>
<main>
<section id="all-articles">
<h2>all articles</h2>
${forEach(allItems, item => article(item))}
</section>
${forEach(groups, ([groupName, feeds]) => `
<section id="${groupName}">
<h2>${groupName}</h2>
${forEach(feeds, feed => `
<details>
<summary>
<span class="feed-title">${feed.title}</span>
<span class="feed-url">
<small>
(${feed.feedUrl})
</small>
</span>
<div class="feed-timestamp">
<small>Latest: ${feed.items[0] && feed.items[0].timestamp || ''}</small>
</div>
</summary>
${forEach(feed.items, item => article(item))}
</details>
`)}
</section>
`)}
<div class="default-text">
<p>welcome to bubo rss reader</p>
<p>select a feed group to get started</p>
</div>
</main>
</div>
</body>
</html>
`);

View file

@ -1,90 +0,0 @@
/*
There's a little inconsistency with how feeds report certain things like
title, links and timestamps. These helpers try to normalize that bit and
provide an order-of-operations list of properties to look for.
Note: these are tightly-coupled to the template and a personal preference.
*/
import { Response } from "node-fetch";
import { readFile } from "fs/promises";
import { FeedItem, JSONValue } from "./@types/bubo";
export const getLink = (obj: FeedItem): string => {
const link_values: string[] = ["link", "url", "guid", "home_page_url"];
const keys: string[] = Object.keys(obj);
const link_property: string | undefined = link_values.find(link_value =>
keys.includes(link_value)
);
return link_property ? (obj[link_property] as string) : "";
};
// fallback to URL for the title if not present
// (title -> url -> link)
export const getTitle = (obj: FeedItem): string => {
const title_values: string[] = ["title", "url", "link"];
const keys: string[] = Object.keys(obj);
// if title is empty for some reason, fall back on url or link
const title_property: string | undefined = title_values.find(
title_value => keys.includes(title_value) && obj[title_value]
);
return title_property ? (obj[title_property] as string) : "";
};
// More dependable way to get timestamps
export const getTimestamp = (obj: FeedItem): string => {
const dateString: string = (
obj.pubDate ||
obj.isoDate ||
obj.date ||
obj.date_published
).toString();
const timestamp: number = new Date(dateString).getTime();
return isNaN(timestamp) ? dateString : timestamp.toString();
};
// parse RSS/XML or JSON feeds
export async function parseFeed(response: Response): Promise<JSONValue> {
const contentType = response.headers.get("content-type")?.split(";")[0];
if (!contentType) return {};
const rssFeed = [contentType]
.map(item =>
[
"application/atom+xml",
"application/rss+xml",
"application/xml",
"text/xml",
"text/html" // this is kind of a gamble
].includes(item)
? response.text()
: false
)
.filter(_ => _)[0];
const jsonFeed = [contentType]
.map(item =>
["application/json", "application/feed+json"].includes(item)
? (response.json() as Promise<JSONValue>)
: false
)
.filter(_ => _)[0];
return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {};
}
export const getFeedList = async (): Promise<JSONValue> => {
return JSON.parse(
(
await readFile(new URL("../config/feeds.json", import.meta.url))
).toString()
);
};
export const getBuboInfo = async (): Promise<JSONValue> => {
return JSON.parse(
(await readFile(new URL("../package.json", import.meta.url))).toString()
);
};

View file

@ -1,30 +0,0 @@
{
"compilerOptions": {
"module": "esnext",
"forceConsistentCasingInFileNames": true,
"removeComments": true,
"strict": true,
"importHelpers": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"target": "ES2021",
"noImplicitAny": true,
"moduleResolution": "node",
"sourceMap": false,
"resolveJsonModule": true,
"outDir": "dist",
"baseUrl": ".",
"typeRoots": [
"src/@types"
],
"paths": {
"*": [
"node_modules/*",
"src/@types"
]
}
},
"include": [
"src/**/*"
]
}