diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index efced33..0000000 --- a/.eslintrc.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "env": { - "browser": true, - "es2021": true - }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended" - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 6, - "sourceType": "module" - }, - "plugins": [ - "@typescript-eslint" - ], - "rules": { - "indent": [ - "error", - 2 - ], - "linebreak-style": [ - "error", - "unix" - ], - "quotes": [ - "error", - "double" - ], - "semi": [ - "error", - "always" - ], - "no-trailing-spaces": [ - 2, - { - "skipBlankLines": false - } - ], - "no-multiple-empty-lines": [ - "error", - { - "max": 2, - "maxEOF": 1 - } - ], - "@typescript-eslint/no-var-requires": 0 - } -} \ No newline at end of file diff --git a/bun.lockb b/bun.lockb index 552b1ed..323630e 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..871a05a --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,9 @@ +import globals from "globals"; +import pluginJs from "@eslint/js"; +import tseslint from "typescript-eslint"; + +export default [ + { languageOptions: { globals: { ...globals.browser, ...globals.node } } }, + pluginJs.configs.recommended, + ...tseslint.configs.recommended, +]; diff --git a/package.json b/package.json index d0e9173..de10632 100644 --- a/package.json +++ b/package.json @@ -3,12 +3,13 @@ "version": "2.0.2", "description": "A simple but effective feed reader (RSS, JSON)", "homepage": "https://github.com/georgemandis/bubo-rss", - "main": "src/index.js", + "main": "src/index.ts", "type": "module", "scripts": { - "dev": "bun src/index.js --watch", "clean": "rm -rf dist", - "build": "bun src/index.js" + "build": "bun src/index.js", + "lint": "biome lint --write ./src", + "format": "biome format --write ./src ./eslint.config.js" }, "author": { "name": "George Mandis", @@ -31,13 +32,15 @@ "rss-parser": "^3.13.0" }, "devDependencies": { - "@types/node": "^20.2.5", + "@biomejs/biome": "^1.8.1", + "@types/bun": "latest", "@types/nunjucks": "^3.2.2", "@types/xml2js": "^0.4.11", - "@typescript-eslint/eslint-plugin": "^5.59.8", - "@typescript-eslint/parser": "^5.59.8", - "eslint": "^8.42.0", "tslib": "^2.5.3", - "typescript": "^5.1.3" - } + "typescript": "^5.1.3", + "typescript-eslint": "^7.13.1" + }, + "trustedDependencies": [ + "@biomejs/biome" + ] } diff --git a/src/@types/bubo.d.ts b/src/@types/bubo.d.ts index 040b51b..1d53a24 100644 --- a/src/@types/bubo.d.ts +++ b/src/@types/bubo.d.ts @@ -1,15 +1,15 @@ export interface Feeds { - [key: string]: object[] + [key: string]: object[]; } export interface FeedItem { - [key: string]: string | number | Date | FeedItem[]; - items: FeedItem[] + [key: string]: string | number | Date | FeedItem[]; + items: FeedItem[]; } //NEW WAY export type JSONValue = - | string - | number - | boolean - | { [x: string]: JSONValue } - | Array; \ No newline at end of file + | string + | number + | boolean + | { [x: string]: JSONValue } + | Array; diff --git a/src/index.ts b/src/index.ts index 61762b8..60e7338 100644 --- a/src/index.ts +++ b/src/index.ts @@ -12,25 +12,25 @@ import fetch from "node-fetch"; import Parser from "rss-parser"; -import { Feeds, FeedItem } from "./@types/bubo"; -import { Response } from "node-fetch"; +import type { Feeds, FeedItem } from "./@types/bubo"; +import type { Response } from "node-fetch"; import { render } from "./renderer.js"; import { - getLink, - getTitle, - getTimestamp, - parseFeed, - getFeedList, - getBuboInfo + getLink, + getTitle, + getTimestamp, + parseFeed, + getFeedList, + getBuboInfo, } from "./utilities.js"; -import { writeFile } from "fs/promises"; +import { writeFile } from "node:fs/promises"; import chalk from "chalk"; const buboInfo = await getBuboInfo(); const parser = new Parser(); const feedList = await getFeedList(); const feedListLength = - Object.entries(feedList).flat(2).length - Object.keys(feedList).length; + Object.entries(feedList).flat(2).length - Object.keys(feedList).length; /** * contentFromAllFeeds = Contains normalized, aggregated feed data and is passed to template renderer at the end @@ -42,14 +42,14 @@ const errors: unknown[] = []; // benchmarking data + utility const initTime = Date.now(); const benchmark = (startTime: number) => - chalk.cyanBright.bold(`${(Date.now() - startTime) / 1000} seconds`); + chalk.cyanBright.bold(`${(Date.now() - startTime) / 1000} seconds`); /** * These values are used to control throttling/batching the fetches: * - MAX_CONNECTION = max number of fetches to contain in a batch * - DELAY_MS = the delay in milliseconds between batches */ -const MAX_CONNECTIONS = Infinity; +const MAX_CONNECTIONS = Number.POSITIVE_INFINITY; const DELAY_MS = 850; const error = chalk.bold.red; @@ -66,26 +66,26 @@ let completed = 0; * and we want to build the static output. */ const finishBuild: () => void = async () => { - completed++; - // if this isn't the last feed, just return early - if (completed !== feedListLength) return; + completed++; + // if this isn't the last feed, just return early + if (completed !== feedListLength) return; - process.stdout.write("\nDone fetching everything!\n"); + process.stdout.write("\nDone fetching everything!\n"); - // generate the static HTML output from our template renderer - const output = render({ - data: contentFromAllFeeds, - errors: errors, - info: buboInfo - }); + // generate the static HTML output from our template renderer + const output = render({ + data: contentFromAllFeeds, + errors: errors, + info: buboInfo, + }); - // write the output to public/index.html - await writeFile("./public/index.html", output); - process.stdout.write( - `\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark( - initTime - )}\n- ${errors.length} errors\n` - ); + // write the output to public/index.html + await writeFile("./public/index.html", output); + process.stdout.write( + `\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark( + initTime, + )}\n- ${errors.length} errors\n`, + ); }; /** @@ -96,77 +96,80 @@ const finishBuild: () => void = async () => { * @returns Promise */ const processFeed = - ({ - group, - feed, - startTime - }: { - group: string; - feed: string; - startTime: number; - }) => - async (response: Response): Promise => { - const body = await parseFeed(response); - //skip to the next one if this didn't work out - if (!body) return; + ({ + group, + feed, + startTime, + }: { + group: string; + feed: string; + startTime: number; + }) => + async (response: Response): Promise => { + const body = await parseFeed(response); + //skip to the next one if this didn't work out + if (!body) return; - try { - const contents: FeedItem = ( - typeof body === "string" ? await parser.parseString(body) : body - ) as FeedItem; + try { + const contents: FeedItem = ( + typeof body === "string" ? await parser.parseString(body) : body + ) as FeedItem; - contents.feed = feed; - contents.title = getTitle(contents); - contents.link = getLink(contents); + contents.feed = feed; + contents.title = getTitle(contents); + contents.link = getLink(contents); - // try to normalize date attribute naming - contents?.items?.forEach(item => { - item.timestamp = getTimestamp(item); - item.title = getTitle(item); - item.link = getLink(item); - }); + // try to normalize date attribute naming + for (const item of contents.items) { + item.timestamp = getTimestamp(item); + item.title = getTitle(item); + item.link = getLink(item); + } - contentFromAllFeeds[group].push(contents as object); - process.stdout.write( - `${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n` - ); - } catch (err) { - process.stdout.write( - `${error("Error processing:")} ${feed} - ${benchmark( - startTime - )}\n${err}\n` - ); - errors.push(`Error processing: ${feed}\n\t${err}`); - } + contentFromAllFeeds[group].push(contents as object); + process.stdout.write( + `${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n`, + ); + } catch (err) { + process.stdout.write( + `${error("Error processing:")} ${feed} - ${benchmark( + startTime, + )}\n${err}\n`, + ); + errors.push(`Error processing: ${feed}\n\t${err}`); + } - finishBuild(); - }; + finishBuild(); + }; // go through each group of feeds and process const processFeeds = () => { - let idx = 0; + let idx = 0; - for (const [group, feeds] of Object.entries(feedList)) { - contentFromAllFeeds[group] = []; + for (const [group, feeds] of Object.entries(feedList)) { + contentFromAllFeeds[group] = []; - for (const feed of feeds) { - const startTime = Date.now(); - setTimeout(() => { - process.stdout.write(`Fetching: ${feed}...\n`); + for (const feed of feeds) { + const startTime = Date.now(); + setTimeout( + () => { + process.stdout.write(`Fetching: ${feed}...\n`); - fetch(feed) - .then(processFeed({ group, feed, startTime })) - .catch(err => { - process.stdout.write( - error(`Error fetching ${feed} ${benchmark(startTime)}\n`) - ); - errors.push(`Error fetching ${feed} ${err.toString()}\n`); - finishBuild(); - }); - }, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS); - idx++; - } - } + fetch(feed) + .then(processFeed({ group, feed, startTime })) + .catch((err) => { + process.stdout.write( + error(`Error fetching ${feed} ${benchmark(startTime)}\n`), + ); + errors.push(`Error fetching ${feed} ${err.toString()}\n`); + finishBuild(); + }); + }, + (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS, + ); + idx++; + } + } }; -processFeeds(); \ No newline at end of file +processFeeds(); diff --git a/src/renderer.ts b/src/renderer.ts index 2e5a00d..a42fcc3 100644 --- a/src/renderer.ts +++ b/src/renderer.ts @@ -6,39 +6,39 @@ import nunjucks from "nunjucks"; const env: nunjucks.Environment = nunjucks.configure({ autoescape: true }); -import { readFile } from "fs/promises"; -import { Feeds, JSONValue } from "./@types/bubo"; +import { readFile } from "node:fs/promises"; +import type { Feeds, JSONValue } from "./@types/bubo"; /** * Global filters for my Nunjucks templates */ -env.addFilter("formatDate", function (dateString): string { - const date: Date = new Date(parseInt(dateString)); - return !isNaN(date.getTime()) ? date.toLocaleDateString() : dateString; +env.addFilter("formatDate", (dateString): string => { + const date: Date = new Date(Number.parseInt(dateString)); + return !Number.isNaN(date.getTime()) ? date.toLocaleDateString() : dateString; }); env.addGlobal("now", new Date().toUTCString()); // load the template const template: string = ( - await readFile(new URL("../config/template.html", import.meta.url)) + await readFile(new URL("../config/template.html", import.meta.url)) ).toString(); // generate the static HTML output from our template renderer const render = ({ - data, - errors, - info + data, + errors, + info, }: { - data: Feeds; - errors: unknown[]; - info?: JSONValue; + data: Feeds; + errors: unknown[]; + info?: JSONValue; }) => { - return env.renderString(template, { - data, - errors, - info - }); + return env.renderString(template, { + data, + errors, + info, + }); }; export { render }; diff --git a/src/utilities.ts b/src/utilities.ts index 75f4339..9a881cd 100644 --- a/src/utilities.ts +++ b/src/utilities.ts @@ -1,90 +1,90 @@ /* - There's a little inconsistency with how feeds report certain things like - title, links and timestamps. These helpers try to normalize that bit and - provide an order-of-operations list of properties to look for. + There's a little inconsistency with how feeds report certain things like + title, links and timestamps. These helpers try to normalize that bit and + provide an order-of-operations list of properties to look for. - Note: these are tightly-coupled to the template and a personal preference. + Note: these are tightly-coupled to the template and a personal preference. */ -import { Response } from "node-fetch"; -import { readFile } from "fs/promises"; -import { FeedItem, JSONValue } from "./@types/bubo"; +import type { Response } from "node-fetch"; +import { readFile } from "node:fs/promises"; +import type { FeedItem, JSONValue } from "./@types/bubo"; export const getLink = (obj: FeedItem): string => { - const link_values: string[] = ["link", "url", "guid", "home_page_url"]; - const keys: string[] = Object.keys(obj); - const link_property: string | undefined = link_values.find(link_value => - keys.includes(link_value) - ); - return link_property ? (obj[link_property] as string) : ""; + const link_values: string[] = ["link", "url", "guid", "home_page_url"]; + const keys: string[] = Object.keys(obj); + const link_property: string | undefined = link_values.find((link_value) => + keys.includes(link_value), + ); + return link_property ? (obj[link_property] as string) : ""; }; // fallback to URL for the title if not present // (title -> url -> link) export const getTitle = (obj: FeedItem): string => { - const title_values: string[] = ["title", "url", "link"]; - const keys: string[] = Object.keys(obj); + const title_values: string[] = ["title", "url", "link"]; + const keys: string[] = Object.keys(obj); - // if title is empty for some reason, fall back on url or link - const title_property: string | undefined = title_values.find( - title_value => keys.includes(title_value) && obj[title_value] - ); - return title_property ? (obj[title_property] as string) : ""; + // if title is empty for some reason, fall back on url or link + const title_property: string | undefined = title_values.find( + (title_value) => keys.includes(title_value) && obj[title_value], + ); + return title_property ? (obj[title_property] as string) : ""; }; // More dependable way to get timestamps export const getTimestamp = (obj: FeedItem): string => { - const dateString: string = ( - obj.pubDate || - obj.isoDate || - obj.date || - obj.date_published - ).toString(); - const timestamp: number = new Date(dateString).getTime(); - return isNaN(timestamp) ? dateString : timestamp.toString(); + const dateString: string = ( + obj.pubDate || + obj.isoDate || + obj.date || + obj.date_published + ).toString(); + const timestamp: number = new Date(dateString).getTime(); + return Number.isNaN(timestamp) ? dateString : timestamp.toString(); }; // parse RSS/XML or JSON feeds export async function parseFeed(response: Response): Promise { - const contentType = response.headers.get("content-type")?.split(";")[0]; + const contentType = response.headers.get("content-type")?.split(";")[0]; - if (!contentType) return {}; + if (!contentType) return {}; - const rssFeed = [contentType] - .map(item => - [ - "application/atom+xml", - "application/rss+xml", - "application/xml", - "text/xml", - "text/html" // this is kind of a gamble - ].includes(item) - ? response.text() - : false - ) - .filter(_ => _)[0]; + const rssFeed = [contentType] + .map((item) => + [ + "application/atom+xml", + "application/rss+xml", + "application/xml", + "text/xml", + "text/html", // this is kind of a gamble + ].includes(item) + ? response.text() + : false, + ) + .filter((_) => _)[0]; - const jsonFeed = [contentType] - .map(item => - ["application/json", "application/feed+json"].includes(item) - ? (response.json() as Promise) - : false - ) - .filter(_ => _)[0]; + const jsonFeed = [contentType] + .map((item) => + ["application/json", "application/feed+json"].includes(item) + ? (response.json() as Promise) + : false, + ) + .filter((_) => _)[0]; - return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {}; + return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {}; } export const getFeedList = async (): Promise => { - return JSON.parse( - ( - await readFile(new URL("../config/feeds.json", import.meta.url)) - ).toString() - ); + return JSON.parse( + ( + await readFile(new URL("../config/feeds.json", import.meta.url)) + ).toString(), + ); }; export const getBuboInfo = async (): Promise => { - return JSON.parse( - (await readFile(new URL("../package.json", import.meta.url))).toString() - ); + return JSON.parse( + (await readFile(new URL("../package.json", import.meta.url))).toString(), + ); };