Switch from eslint to biome for linting and formatting

This commit is contained in:
Carter McBride 2024-06-18 21:35:07 -06:00
parent 5cc7a45d42
commit 7f0af2c0c1
8 changed files with 198 additions and 233 deletions

View file

@ -1,50 +0,0 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module"
},
"plugins": [
"@typescript-eslint"
],
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"double"
],
"semi": [
"error",
"always"
],
"no-trailing-spaces": [
2,
{
"skipBlankLines": false
}
],
"no-multiple-empty-lines": [
"error",
{
"max": 2,
"maxEOF": 1
}
],
"@typescript-eslint/no-var-requires": 0
}
}

BIN
bun.lockb

Binary file not shown.

9
eslint.config.js Normal file
View file

@ -0,0 +1,9 @@
import globals from "globals";
import pluginJs from "@eslint/js";
import tseslint from "typescript-eslint";
export default [
{ languageOptions: { globals: { ...globals.browser, ...globals.node } } },
pluginJs.configs.recommended,
...tseslint.configs.recommended,
];

View file

@ -3,12 +3,13 @@
"version": "2.0.2", "version": "2.0.2",
"description": "A simple but effective feed reader (RSS, JSON)", "description": "A simple but effective feed reader (RSS, JSON)",
"homepage": "https://github.com/georgemandis/bubo-rss", "homepage": "https://github.com/georgemandis/bubo-rss",
"main": "src/index.js", "main": "src/index.ts",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "bun src/index.js --watch",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"build": "bun src/index.js" "build": "bun src/index.js",
"lint": "biome lint --write ./src",
"format": "biome format --write ./src ./eslint.config.js"
}, },
"author": { "author": {
"name": "George Mandis", "name": "George Mandis",
@ -31,13 +32,15 @@
"rss-parser": "^3.13.0" "rss-parser": "^3.13.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.2.5", "@biomejs/biome": "^1.8.1",
"@types/bun": "latest",
"@types/nunjucks": "^3.2.2", "@types/nunjucks": "^3.2.2",
"@types/xml2js": "^0.4.11", "@types/xml2js": "^0.4.11",
"@typescript-eslint/eslint-plugin": "^5.59.8",
"@typescript-eslint/parser": "^5.59.8",
"eslint": "^8.42.0",
"tslib": "^2.5.3", "tslib": "^2.5.3",
"typescript": "^5.1.3" "typescript": "^5.1.3",
} "typescript-eslint": "^7.13.1"
},
"trustedDependencies": [
"@biomejs/biome"
]
} }

16
src/@types/bubo.d.ts vendored
View file

@ -1,15 +1,15 @@
export interface Feeds { export interface Feeds {
[key: string]: object[] [key: string]: object[];
} }
export interface FeedItem { export interface FeedItem {
[key: string]: string | number | Date | FeedItem[]; [key: string]: string | number | Date | FeedItem[];
items: FeedItem[] items: FeedItem[];
} }
//NEW WAY //NEW WAY
export type JSONValue = export type JSONValue =
| string | string
| number | number
| boolean | boolean
| { [x: string]: JSONValue } | { [x: string]: JSONValue }
| Array<JSONValue>; | Array<JSONValue>;

View file

@ -12,25 +12,25 @@
import fetch from "node-fetch"; import fetch from "node-fetch";
import Parser from "rss-parser"; import Parser from "rss-parser";
import { Feeds, FeedItem } from "./@types/bubo"; import type { Feeds, FeedItem } from "./@types/bubo";
import { Response } from "node-fetch"; import type { Response } from "node-fetch";
import { render } from "./renderer.js"; import { render } from "./renderer.js";
import { import {
getLink, getLink,
getTitle, getTitle,
getTimestamp, getTimestamp,
parseFeed, parseFeed,
getFeedList, getFeedList,
getBuboInfo getBuboInfo,
} from "./utilities.js"; } from "./utilities.js";
import { writeFile } from "fs/promises"; import { writeFile } from "node:fs/promises";
import chalk from "chalk"; import chalk from "chalk";
const buboInfo = await getBuboInfo(); const buboInfo = await getBuboInfo();
const parser = new Parser(); const parser = new Parser();
const feedList = await getFeedList(); const feedList = await getFeedList();
const feedListLength = const feedListLength =
Object.entries(feedList).flat(2).length - Object.keys(feedList).length; Object.entries(feedList).flat(2).length - Object.keys(feedList).length;
/** /**
* contentFromAllFeeds = Contains normalized, aggregated feed data and is passed to template renderer at the end * contentFromAllFeeds = Contains normalized, aggregated feed data and is passed to template renderer at the end
@ -42,14 +42,14 @@ const errors: unknown[] = [];
// benchmarking data + utility // benchmarking data + utility
const initTime = Date.now(); const initTime = Date.now();
const benchmark = (startTime: number) => const benchmark = (startTime: number) =>
chalk.cyanBright.bold(`${(Date.now() - startTime) / 1000} seconds`); chalk.cyanBright.bold(`${(Date.now() - startTime) / 1000} seconds`);
/** /**
* These values are used to control throttling/batching the fetches: * These values are used to control throttling/batching the fetches:
* - MAX_CONNECTION = max number of fetches to contain in a batch * - MAX_CONNECTION = max number of fetches to contain in a batch
* - DELAY_MS = the delay in milliseconds between batches * - DELAY_MS = the delay in milliseconds between batches
*/ */
const MAX_CONNECTIONS = Infinity; const MAX_CONNECTIONS = Number.POSITIVE_INFINITY;
const DELAY_MS = 850; const DELAY_MS = 850;
const error = chalk.bold.red; const error = chalk.bold.red;
@ -66,26 +66,26 @@ let completed = 0;
* and we want to build the static output. * and we want to build the static output.
*/ */
const finishBuild: () => void = async () => { const finishBuild: () => void = async () => {
completed++; completed++;
// if this isn't the last feed, just return early // if this isn't the last feed, just return early
if (completed !== feedListLength) return; if (completed !== feedListLength) return;
process.stdout.write("\nDone fetching everything!\n"); process.stdout.write("\nDone fetching everything!\n");
// generate the static HTML output from our template renderer // generate the static HTML output from our template renderer
const output = render({ const output = render({
data: contentFromAllFeeds, data: contentFromAllFeeds,
errors: errors, errors: errors,
info: buboInfo info: buboInfo,
}); });
// write the output to public/index.html // write the output to public/index.html
await writeFile("./public/index.html", output); await writeFile("./public/index.html", output);
process.stdout.write( process.stdout.write(
`\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark( `\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark(
initTime initTime,
)}\n- ${errors.length} errors\n` )}\n- ${errors.length} errors\n`,
); );
}; };
/** /**
@ -96,77 +96,80 @@ const finishBuild: () => void = async () => {
* @returns Promise<void> * @returns Promise<void>
*/ */
const processFeed = const processFeed =
({ ({
group, group,
feed, feed,
startTime startTime,
}: { }: {
group: string; group: string;
feed: string; feed: string;
startTime: number; startTime: number;
}) => }) =>
async (response: Response): Promise<void> => { async (response: Response): Promise<void> => {
const body = await parseFeed(response); const body = await parseFeed(response);
//skip to the next one if this didn't work out //skip to the next one if this didn't work out
if (!body) return; if (!body) return;
try { try {
const contents: FeedItem = ( const contents: FeedItem = (
typeof body === "string" ? await parser.parseString(body) : body typeof body === "string" ? await parser.parseString(body) : body
) as FeedItem; ) as FeedItem;
contents.feed = feed; contents.feed = feed;
contents.title = getTitle(contents); contents.title = getTitle(contents);
contents.link = getLink(contents); contents.link = getLink(contents);
// try to normalize date attribute naming // try to normalize date attribute naming
contents?.items?.forEach(item => { for (const item of contents.items) {
item.timestamp = getTimestamp(item); item.timestamp = getTimestamp(item);
item.title = getTitle(item); item.title = getTitle(item);
item.link = getLink(item); item.link = getLink(item);
}); }
contentFromAllFeeds[group].push(contents as object); contentFromAllFeeds[group].push(contents as object);
process.stdout.write( process.stdout.write(
`${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n` `${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n`,
); );
} catch (err) { } catch (err) {
process.stdout.write( process.stdout.write(
`${error("Error processing:")} ${feed} - ${benchmark( `${error("Error processing:")} ${feed} - ${benchmark(
startTime startTime,
)}\n${err}\n` )}\n${err}\n`,
); );
errors.push(`Error processing: ${feed}\n\t${err}`); errors.push(`Error processing: ${feed}\n\t${err}`);
} }
finishBuild(); finishBuild();
}; };
// go through each group of feeds and process // go through each group of feeds and process
const processFeeds = () => { const processFeeds = () => {
let idx = 0; let idx = 0;
for (const [group, feeds] of Object.entries(feedList)) { for (const [group, feeds] of Object.entries(feedList)) {
contentFromAllFeeds[group] = []; contentFromAllFeeds[group] = [];
for (const feed of feeds) { for (const feed of feeds) {
const startTime = Date.now(); const startTime = Date.now();
setTimeout(() => { setTimeout(
process.stdout.write(`Fetching: ${feed}...\n`); () => {
process.stdout.write(`Fetching: ${feed}...\n`);
fetch(feed) fetch(feed)
.then(processFeed({ group, feed, startTime })) .then(processFeed({ group, feed, startTime }))
.catch(err => { .catch((err) => {
process.stdout.write( process.stdout.write(
error(`Error fetching ${feed} ${benchmark(startTime)}\n`) error(`Error fetching ${feed} ${benchmark(startTime)}\n`),
); );
errors.push(`Error fetching ${feed} ${err.toString()}\n`); errors.push(`Error fetching ${feed} ${err.toString()}\n`);
finishBuild(); finishBuild();
}); });
}, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS); },
idx++; (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS,
} );
} idx++;
}
}
}; };
processFeeds(); processFeeds();

View file

@ -6,39 +6,39 @@
import nunjucks from "nunjucks"; import nunjucks from "nunjucks";
const env: nunjucks.Environment = nunjucks.configure({ autoescape: true }); const env: nunjucks.Environment = nunjucks.configure({ autoescape: true });
import { readFile } from "fs/promises"; import { readFile } from "node:fs/promises";
import { Feeds, JSONValue } from "./@types/bubo"; import type { Feeds, JSONValue } from "./@types/bubo";
/** /**
* Global filters for my Nunjucks templates * Global filters for my Nunjucks templates
*/ */
env.addFilter("formatDate", function (dateString): string { env.addFilter("formatDate", (dateString): string => {
const date: Date = new Date(parseInt(dateString)); const date: Date = new Date(Number.parseInt(dateString));
return !isNaN(date.getTime()) ? date.toLocaleDateString() : dateString; return !Number.isNaN(date.getTime()) ? date.toLocaleDateString() : dateString;
}); });
env.addGlobal("now", new Date().toUTCString()); env.addGlobal("now", new Date().toUTCString());
// load the template // load the template
const template: string = ( const template: string = (
await readFile(new URL("../config/template.html", import.meta.url)) await readFile(new URL("../config/template.html", import.meta.url))
).toString(); ).toString();
// generate the static HTML output from our template renderer // generate the static HTML output from our template renderer
const render = ({ const render = ({
data, data,
errors, errors,
info info,
}: { }: {
data: Feeds; data: Feeds;
errors: unknown[]; errors: unknown[];
info?: JSONValue; info?: JSONValue;
}) => { }) => {
return env.renderString(template, { return env.renderString(template, {
data, data,
errors, errors,
info info,
}); });
}; };
export { render }; export { render };

View file

@ -1,90 +1,90 @@
/* /*
There's a little inconsistency with how feeds report certain things like There's a little inconsistency with how feeds report certain things like
title, links and timestamps. These helpers try to normalize that bit and title, links and timestamps. These helpers try to normalize that bit and
provide an order-of-operations list of properties to look for. provide an order-of-operations list of properties to look for.
Note: these are tightly-coupled to the template and a personal preference. Note: these are tightly-coupled to the template and a personal preference.
*/ */
import { Response } from "node-fetch"; import type { Response } from "node-fetch";
import { readFile } from "fs/promises"; import { readFile } from "node:fs/promises";
import { FeedItem, JSONValue } from "./@types/bubo"; import type { FeedItem, JSONValue } from "./@types/bubo";
export const getLink = (obj: FeedItem): string => { export const getLink = (obj: FeedItem): string => {
const link_values: string[] = ["link", "url", "guid", "home_page_url"]; const link_values: string[] = ["link", "url", "guid", "home_page_url"];
const keys: string[] = Object.keys(obj); const keys: string[] = Object.keys(obj);
const link_property: string | undefined = link_values.find(link_value => const link_property: string | undefined = link_values.find((link_value) =>
keys.includes(link_value) keys.includes(link_value),
); );
return link_property ? (obj[link_property] as string) : ""; return link_property ? (obj[link_property] as string) : "";
}; };
// fallback to URL for the title if not present // fallback to URL for the title if not present
// (title -> url -> link) // (title -> url -> link)
export const getTitle = (obj: FeedItem): string => { export const getTitle = (obj: FeedItem): string => {
const title_values: string[] = ["title", "url", "link"]; const title_values: string[] = ["title", "url", "link"];
const keys: string[] = Object.keys(obj); const keys: string[] = Object.keys(obj);
// if title is empty for some reason, fall back on url or link // if title is empty for some reason, fall back on url or link
const title_property: string | undefined = title_values.find( const title_property: string | undefined = title_values.find(
title_value => keys.includes(title_value) && obj[title_value] (title_value) => keys.includes(title_value) && obj[title_value],
); );
return title_property ? (obj[title_property] as string) : ""; return title_property ? (obj[title_property] as string) : "";
}; };
// More dependable way to get timestamps // More dependable way to get timestamps
export const getTimestamp = (obj: FeedItem): string => { export const getTimestamp = (obj: FeedItem): string => {
const dateString: string = ( const dateString: string = (
obj.pubDate || obj.pubDate ||
obj.isoDate || obj.isoDate ||
obj.date || obj.date ||
obj.date_published obj.date_published
).toString(); ).toString();
const timestamp: number = new Date(dateString).getTime(); const timestamp: number = new Date(dateString).getTime();
return isNaN(timestamp) ? dateString : timestamp.toString(); return Number.isNaN(timestamp) ? dateString : timestamp.toString();
}; };
// parse RSS/XML or JSON feeds // parse RSS/XML or JSON feeds
export async function parseFeed(response: Response): Promise<JSONValue> { export async function parseFeed(response: Response): Promise<JSONValue> {
const contentType = response.headers.get("content-type")?.split(";")[0]; const contentType = response.headers.get("content-type")?.split(";")[0];
if (!contentType) return {}; if (!contentType) return {};
const rssFeed = [contentType] const rssFeed = [contentType]
.map(item => .map((item) =>
[ [
"application/atom+xml", "application/atom+xml",
"application/rss+xml", "application/rss+xml",
"application/xml", "application/xml",
"text/xml", "text/xml",
"text/html" // this is kind of a gamble "text/html", // this is kind of a gamble
].includes(item) ].includes(item)
? response.text() ? response.text()
: false : false,
) )
.filter(_ => _)[0]; .filter((_) => _)[0];
const jsonFeed = [contentType] const jsonFeed = [contentType]
.map(item => .map((item) =>
["application/json", "application/feed+json"].includes(item) ["application/json", "application/feed+json"].includes(item)
? (response.json() as Promise<JSONValue>) ? (response.json() as Promise<JSONValue>)
: false : false,
) )
.filter(_ => _)[0]; .filter((_) => _)[0];
return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {}; return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {};
} }
export const getFeedList = async (): Promise<JSONValue> => { export const getFeedList = async (): Promise<JSONValue> => {
return JSON.parse( return JSON.parse(
( (
await readFile(new URL("../config/feeds.json", import.meta.url)) await readFile(new URL("../config/feeds.json", import.meta.url))
).toString() ).toString(),
); );
}; };
export const getBuboInfo = async (): Promise<JSONValue> => { export const getBuboInfo = async (): Promise<JSONValue> => {
return JSON.parse( return JSON.parse(
(await readFile(new URL("../package.json", import.meta.url))).toString() (await readFile(new URL("../package.json", import.meta.url))).toString(),
); );
}; };