#!/usr/bin/env -S node --import=tsx import type { WikipediaPagesInternalLinks } from "@repo/wikipedia-game-solver/wikipedia-api" import { getWikipediaPageInternalLinks } from "@repo/wikipedia-game-solver/wikipedia-api" import fs from "node:fs" import path from "node:path" const localeWikipedia = "en" const cachePath = path.join(process.cwd(), "cache.json") const fromPageInput = "New York City" // const fromPageInput = "Linux" // const toPageInput = "Node.js" // console.log({ // fromPageInput, // toPageInput, // }) // const [fromPageWikipediaLinks, toPageWikipediaLinks] = await Promise.all([ // getWikipediaPageInternalLinks({ // title: fromPageInput, // locale: localeWikipedia, // }), // getWikipediaPageInternalLinks({ // title: toPageInput, // locale: localeWikipedia, // }), // ]) // console.log({ // fromPageWikipediaLinks, // toPageWikipediaLinks, // }) // const data = { // [fromPageWikipediaLinks.title]: fromPageWikipediaLinks, // [toPageWikipediaLinks.title]: toPageWikipediaLinks, // } const data = JSON.parse( await fs.promises.readFile(cachePath, { encoding: "utf-8" }), ) as WikipediaPagesInternalLinks // let maxLinks = { max: 0, title: "" } // for (const [title, page] of Object.entries(data)) { // if (page.links.length > maxLinks.max) { // maxLinks = { max: page.links.length, title } // } // } // console.log(maxLinks) const pageLinks = (data[fromPageInput]?.links ?? []).slice(0, 1100) for (const pageLink of pageLinks) { if (pageLink in data) { continue } console.log("Fetching", pageLink) data[pageLink] = await getWikipediaPageInternalLinks({ title: pageLink, locale: localeWikipedia, }) } await fs.promises.writeFile(cachePath, JSON.stringify(data, null, 2), { encoding: "utf-8", })