diff --git a/src/index.ts b/src/index.ts index faf86fb..98eb1ef 100644 --- a/src/index.ts +++ b/src/index.ts @@ -21,28 +21,45 @@ import chalk from "chalk"; const parser = new Parser(); const feedList = await getFeedList(); +// calculate the total number of feeds to we can throttle +const feedListLength = Object.entries(feedList).flat(2).length - Object.keys(feedList).length; const contentFromAllFeeds: Feeds = {}; const errors: unknown[] = []; const allFetches: Promise[] = []; - -const MAX_CONNECTIONS = 2; -let connections = 0; - const initTime = Date.now(); const benchmark = (startTime: number) => chalk.cyanBright.bold(`(${(Date.now() - startTime) / 1000} seconds)`); +// used to throttle fetches +const MAX_CONNECTIONS = Infinity; +const DELAY_MS = 850; + const error = chalk.bold.red; const success = chalk.bold.green; +let completed = 0; + +const finishBuild: () => void = async () => { + console.log("\nDone fetching everything!"); + + // generate the static HTML output from our template renderer + const output = render({ + data: contentFromAllFeeds, + errors: errors + }); + + // write the output to public/index.html + await writeFile("./public/index.html", output); + console.log(`Finished writing to output. ${benchmark(initTime)}`); +}; + // process each feed and its content const processFeed = ( { group, feed, startTime }: { group: string; feed: string, startTime: number } ) => async (response: Response) => { - connections--; const body = await parseFeed(response); - + completed++; // skip to the next one if this didn't work out if (!body) return; @@ -60,50 +77,36 @@ const processFeed = ( item.title = getTitle(item); item.link = getLink(item); }); + contentFromAllFeeds[group].push(contents as object); console.log(`${success("Successfully fetched:")} ${feed} ${benchmark(startTime)}`); - return true; + } catch (err) { console.log(`${error("Error processing:")} ${feed} ${benchmark(startTime)}`); errors.push(err); - return false; } + // if this is the last feed, go ahead and build the output + (completed === feedListLength - 1) && finishBuild(); }; -// go through each group of feeds and process them - +let idx = 0; +// go through each group of feeds and process for (const [group, feeds] of Object.entries(feedList)) { contentFromAllFeeds[group] = []; - for (const feed of feeds) { - // throttle if we're exceeding MAX_CONNECTIONS - connections++; - console.log(`Fetching: ${feed}...`); const startTime = Date.now(); - allFetches.push( - fetch(feed).then(processFeed({ group, feed, startTime })).catch(err => { - console.log(error(`Error fetching ${feed} ${benchmark(startTime)}`)); - errors.push(`Error fetching ${feed} ${err.toString()}`); - }) - ); + setTimeout(() => { + console.log(`Fetching: ${feed}...`); + allFetches.push( + fetch(feed).then(processFeed({ group, feed, startTime })).catch(err => { + console.log(error(`Error fetching ${feed} ${benchmark(startTime)}`)); + errors.push(`Error fetching ${feed} ${err.toString()}`); + }) + ); + }, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS); + idx++; } } - -// eslint-disable-next-line @typescript-eslint/no-unused-vars -Promise.all(allFetches).then(async () => { - console.log("\nDone fetching everything!"); - - // generate the static HTML output from our template renderer - const output = render({ - data: contentFromAllFeeds, - errors: errors - }); - - // write the output to public/index.html - await writeFile("./public/index.html", output); - console.log(`Finished writing to output. ${benchmark(initTime)}`); -}); -