Compare commits
6 Commits
aa2fb4f5b9
...
63862b19c0
Author | SHA1 | Date | |
---|---|---|---|
63862b19c0 | |||
376c0fd041 | |||
207d3483ca | |||
0ce950c9c8 | |||
cdc8cf2b05 | |||
02ee112de4 |
@ -5,4 +5,4 @@ DATABASE_USER=wikipedia_user
|
|||||||
DATABASE_PASSWORD=password
|
DATABASE_PASSWORD=password
|
||||||
DATABASE_NAME=wikipedia
|
DATABASE_NAME=wikipedia
|
||||||
DATABASE_HOST=127.0.0.1
|
DATABASE_HOST=127.0.0.1
|
||||||
DATABASE_PORT=3306
|
DATABASE_PORT=5432
|
||||||
|
10
.gitignore
vendored
10
.gitignore
vendored
@ -21,11 +21,13 @@ build/
|
|||||||
*.pem
|
*.pem
|
||||||
.turbo
|
.turbo
|
||||||
tmp/
|
tmp/
|
||||||
cache.json
|
|
||||||
|
# data
|
||||||
data/dump
|
data/dump
|
||||||
data/sql/*
|
data/sql-pages-inserts/*
|
||||||
!data/sql/0000-tables-create.sql
|
!data/sql-pages-inserts/0000-pages.sh
|
||||||
!data/sql/0999-constraints.sql
|
data/sql-internal-links-inserts/*
|
||||||
|
!data/sql-internal-links-inserts/0000-internal-links.sh
|
||||||
|
|
||||||
# debug
|
# debug
|
||||||
npm-debug.log*
|
npm-debug.log*
|
||||||
|
13
TODO.md
13
TODO.md
@ -30,17 +30,22 @@
|
|||||||
- [ ] Handle redirects
|
- [ ] Handle redirects
|
||||||
- [ ] Implement REST API (`api`) with JSON responses ([AdonisJS](https://adonisjs.com/)) to get shortest paths between 2 pages
|
- [ ] Implement REST API (`api`) with JSON responses ([AdonisJS](https://adonisjs.com/)) to get shortest paths between 2 pages
|
||||||
- [x] Init AdonisJS project
|
- [x] Init AdonisJS project
|
||||||
- [ ] Create Lucid models and migrations for Wikipedia Database Dump: `pages` and `internal_links` tables
|
- [x] Create Lucid models and migrations for Wikipedia Database Dump: `pages` and `internal_links` tables
|
||||||
- [ ] Implement `GET /wikipedia/pages?title=Node.js` to search a page by title (not necessarily with the title sanitized, search with input by user to check if page exists)
|
- [x] Implement `GET /wikipedia/pages?title=Node.js` to search a page by title (not necessarily with the title sanitized, search with input by user to check if page exists)
|
||||||
- [ ] Implement `GET /wikipedia/pages/internal-links/paths?from=Node.js&to=Linux` to get all the possible paths between 2 pages with titles sanitized
|
- [x] Implement `GET /wikipedia/pages/[id]` to get a page and all its internal links with the pageId
|
||||||
|
- [ ] Implement `GET /wikipedia/internal-links/paths?fromPageId=id&toPageId=id` to get all the possible paths between 2 pages
|
||||||
|
- [x] Setup tests with database + add coverage
|
||||||
|
- [x] Setup Health checks
|
||||||
|
- [x] Setup Rate limiting
|
||||||
|
- [ ] Share VineJS validators between `website` and `api`
|
||||||
- [ ] Implement Wikipedia Game Solver (`website`)
|
- [ ] Implement Wikipedia Game Solver (`website`)
|
||||||
- [x] Init Next.js project
|
- [x] Init Next.js project
|
||||||
- [ ] Try to use <https://www.npmjs.com/package/@tuyau/client> for API calls
|
- [ ] Try to use <https://www.npmjs.com/package/@tuyau/client> for API calls
|
||||||
- [ ] Hard code 2 pages to test if it works with `console.log` in the browser
|
- [ ] Hard code 2 pages to test if it works with `console.log` in the browser
|
||||||
- [ ] Implement a form with inputs, button to submit, and list all pages to go from one to another, or none if it is not possible
|
- [ ] Implement a form with inputs, button to submit, and list all pages to go from one to another, or none if it is not possible
|
||||||
- [ ] Add images, links to the pages + good UI/UX
|
- [ ] Add images, links to the pages + good UI/UX
|
||||||
- [ ] Implement toast notifications for errors, warnings, and success messages
|
|
||||||
- [ ] Autocompletion page titles
|
- [ ] Autocompletion page titles
|
||||||
|
- [ ] Implement toast notifications for errors, warnings, and success messages
|
||||||
- [ ] Implement CLI (`cli`)
|
- [ ] Implement CLI (`cli`)
|
||||||
- [ ] Init Clipanion project
|
- [ ] Init Clipanion project
|
||||||
- [ ] Implement `wikipedia-game-solver internal-links --from="Node.js" --to="Linux"` command to get all the possible paths between 2 pages.
|
- [ ] Implement `wikipedia-game-solver internal-links --from="Node.js" --to="Linux"` command to get all the possible paths between 2 pages.
|
||||||
|
10
apps/api/.c8rc.json
Normal file
10
apps/api/.c8rc.json
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"reporter": ["text", "html", "json"],
|
||||||
|
"exclude": [
|
||||||
|
"src/adonisrc.ts",
|
||||||
|
"src/tests/**",
|
||||||
|
"src/database/**",
|
||||||
|
"src/config/**",
|
||||||
|
"src/bin/**"
|
||||||
|
]
|
||||||
|
}
|
@ -9,4 +9,6 @@ DATABASE_USER=wikipedia_user
|
|||||||
DATABASE_PASSWORD=password
|
DATABASE_PASSWORD=password
|
||||||
DATABASE_NAME=wikipedia
|
DATABASE_NAME=wikipedia
|
||||||
DATABASE_HOST=127.0.0.1
|
DATABASE_HOST=127.0.0.1
|
||||||
DATABASE_PORT=3306
|
DATABASE_PORT=5432
|
||||||
|
|
||||||
|
LIMITER_STORE=database
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
"start": "node --import=tsx ./src/bin/server.ts",
|
"start": "node --import=tsx ./src/bin/server.ts",
|
||||||
"dev": "node --import=tsx --watch --watch-preserve-output ./src/bin/server.ts",
|
"dev": "node --import=tsx --watch --watch-preserve-output ./src/bin/server.ts",
|
||||||
"ace": "node --import=tsx ./src/bin/console.ts",
|
"ace": "node --import=tsx ./src/bin/console.ts",
|
||||||
"test": "node --import=tsx ./src/bin/test.ts",
|
"test": "c8 node --import=tsx ./src/bin/test.ts",
|
||||||
"lint:eslint": "eslint src --max-warnings 0 --report-unused-disable-directives",
|
"lint:eslint": "eslint src --max-warnings 0 --report-unused-disable-directives",
|
||||||
"lint:typescript": "tsc --noEmit"
|
"lint:typescript": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
@ -19,11 +19,12 @@
|
|||||||
"@adonisjs/core": "catalog:",
|
"@adonisjs/core": "catalog:",
|
||||||
"@adonisjs/cors": "catalog:",
|
"@adonisjs/cors": "catalog:",
|
||||||
"@adonisjs/lucid": "catalog:",
|
"@adonisjs/lucid": "catalog:",
|
||||||
|
"@adonisjs/limiter": "catalog:",
|
||||||
"@repo/utils": "workspace:*",
|
"@repo/utils": "workspace:*",
|
||||||
"@repo/wikipedia-game-solver": "workspace:*",
|
"@repo/wikipedia-game-solver": "workspace:*",
|
||||||
"@vinejs/vine": "catalog:",
|
"@vinejs/vine": "catalog:",
|
||||||
"luxon": "catalog:",
|
"luxon": "catalog:",
|
||||||
"mysql2": "catalog:",
|
"pg": "catalog:",
|
||||||
"reflect-metadata": "catalog:",
|
"reflect-metadata": "catalog:",
|
||||||
"tsx": "catalog:",
|
"tsx": "catalog:",
|
||||||
"pino-pretty": "catalog:"
|
"pino-pretty": "catalog:"
|
||||||
@ -39,6 +40,8 @@
|
|||||||
"@total-typescript/ts-reset": "catalog:",
|
"@total-typescript/ts-reset": "catalog:",
|
||||||
"@types/luxon": "catalog:",
|
"@types/luxon": "catalog:",
|
||||||
"@types/node": "catalog:",
|
"@types/node": "catalog:",
|
||||||
|
"better-sqlite3": "catalog:",
|
||||||
|
"c8": "catalog:",
|
||||||
"eslint": "catalog:",
|
"eslint": "catalog:",
|
||||||
"openapi-types": "catalog:",
|
"openapi-types": "catalog:",
|
||||||
"typescript": "catalog:"
|
"typescript": "catalog:"
|
||||||
|
@ -34,6 +34,9 @@ export default defineConfig({
|
|||||||
async () => {
|
async () => {
|
||||||
return await import("@adonisjs/auth/auth_provider")
|
return await import("@adonisjs/auth/auth_provider")
|
||||||
},
|
},
|
||||||
|
async () => {
|
||||||
|
return await import("@adonisjs/limiter/limiter_provider")
|
||||||
|
},
|
||||||
],
|
],
|
||||||
preloads: [
|
preloads: [
|
||||||
async () => {
|
async () => {
|
||||||
|
@ -13,9 +13,9 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||||||
*/
|
*/
|
||||||
public override async handle(
|
public override async handle(
|
||||||
error: unknown,
|
error: unknown,
|
||||||
ctx: HttpContext,
|
context: HttpContext,
|
||||||
): Promise<unknown> {
|
): Promise<unknown> {
|
||||||
return await super.handle(error, ctx)
|
return await super.handle(error, context)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -25,8 +25,8 @@ export default class HttpExceptionHandler extends ExceptionHandler {
|
|||||||
*/
|
*/
|
||||||
public override async report(
|
public override async report(
|
||||||
error: unknown,
|
error: unknown,
|
||||||
ctx: HttpContext,
|
context: HttpContext,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return await super.report(error, ctx)
|
return await super.report(error, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
12
apps/api/src/app/middleware/app_key_security_middleware.ts
Normal file
12
apps/api/src/app/middleware/app_key_security_middleware.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { APP_KEY, APP_KEY_HEADER_NAME } from "#config/app.js"
|
||||||
|
import type { HttpContext } from "@adonisjs/core/http"
|
||||||
|
import type { NextFn } from "@adonisjs/core/types/http"
|
||||||
|
|
||||||
|
export default class AppKeySecurityMiddleware {
|
||||||
|
public async handle(context: HttpContext, next: NextFn): Promise<void> {
|
||||||
|
if (context.request.header(APP_KEY_HEADER_NAME) === APP_KEY) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
return context.response.unauthorized({ message: "Unauthorized access" })
|
||||||
|
}
|
||||||
|
}
|
@ -3,23 +3,22 @@ import type { HttpContext } from "@adonisjs/core/http"
|
|||||||
import type { NextFn } from "@adonisjs/core/types/http"
|
import type { NextFn } from "@adonisjs/core/types/http"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Auth middleware is used authenticate HTTP requests and deny
|
* Auth middleware is used authenticate HTTP requests and deny access to unauthenticated users.
|
||||||
* access to unauthenticated users.
|
|
||||||
*/
|
*/
|
||||||
export default class AuthMiddleware {
|
export default class AuthMiddleware {
|
||||||
/**
|
/**
|
||||||
* The URL to redirect to, when authentication fails
|
* The URL to redirect to, when authentication fails.
|
||||||
*/
|
*/
|
||||||
redirectTo = "/login"
|
redirectTo = "/login"
|
||||||
|
|
||||||
public async handle(
|
public async handle(
|
||||||
ctx: HttpContext,
|
context: HttpContext,
|
||||||
next: NextFn,
|
next: NextFn,
|
||||||
options: {
|
options: {
|
||||||
guards?: Array<keyof Authenticators>
|
guards?: Array<keyof Authenticators>
|
||||||
} = {},
|
} = {},
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
await ctx.auth.authenticateUsing(options.guards, {
|
await context.auth.authenticateUsing(options.guards, {
|
||||||
loginRoute: this.redirectTo,
|
loginRoute: this.redirectTo,
|
||||||
})
|
})
|
||||||
return next()
|
return next()
|
||||||
|
@ -5,13 +5,13 @@ import type { NextFn } from "@adonisjs/core/types/http"
|
|||||||
/**
|
/**
|
||||||
* The container bindings middleware binds classes to their request specific value using the container resolver.
|
* The container bindings middleware binds classes to their request specific value using the container resolver.
|
||||||
*
|
*
|
||||||
* - We bind "HttpContext" class to the "ctx" object.
|
* - We bind "HttpContext" class to the "context" object.
|
||||||
* - And bind "Logger" class to the "ctx.logger" object.
|
* - And bind "Logger" class to the "context.logger" object.
|
||||||
*/
|
*/
|
||||||
export default class ContainerBindingsMiddleware {
|
export default class ContainerBindingsMiddleware {
|
||||||
public async handle(ctx: HttpContext, next: NextFn): Promise<void> {
|
public async handle(context: HttpContext, next: NextFn): Promise<void> {
|
||||||
ctx.containerResolver.bindValue(HttpContext, ctx)
|
context.containerResolver.bindValue(HttpContext, context)
|
||||||
ctx.containerResolver.bindValue(Logger, ctx.logger)
|
context.containerResolver.bindValue(Logger, context.logger)
|
||||||
|
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
|
30
apps/api/src/app/models/page.ts
Normal file
30
apps/api/src/app/models/page.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { BaseModel, column, manyToMany } from "@adonisjs/lucid/orm"
|
||||||
|
import type { ManyToMany } from "@adonisjs/lucid/types/relations"
|
||||||
|
|
||||||
|
export default class Page extends BaseModel {
|
||||||
|
protected tableName = "pages"
|
||||||
|
|
||||||
|
@column({ columnName: "id", serializeAs: "id", isPrimary: true })
|
||||||
|
declare id: number
|
||||||
|
|
||||||
|
@column({
|
||||||
|
columnName: "title",
|
||||||
|
serializeAs: "title",
|
||||||
|
})
|
||||||
|
declare title: string
|
||||||
|
|
||||||
|
@manyToMany(
|
||||||
|
() => {
|
||||||
|
return Page
|
||||||
|
},
|
||||||
|
{
|
||||||
|
pivotTable: "internal_links",
|
||||||
|
localKey: "id",
|
||||||
|
relatedKey: "id",
|
||||||
|
pivotForeignKey: "from_page_id",
|
||||||
|
pivotRelatedForeignKey: "to_page_id",
|
||||||
|
serializeAs: "internalLinks",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
declare internalLinks: ManyToMany<typeof Page>
|
||||||
|
}
|
@ -25,7 +25,7 @@ export default class User extends compose(BaseModel, AuthFinder) {
|
|||||||
columnName: "full_name",
|
columnName: "full_name",
|
||||||
serializeAs: "fullName",
|
serializeAs: "fullName",
|
||||||
})
|
})
|
||||||
declare fullName: string | null
|
declare fullName: string
|
||||||
|
|
||||||
@column({
|
@column({
|
||||||
columnName: "email",
|
columnName: "email",
|
||||||
@ -49,7 +49,7 @@ export default class User extends compose(BaseModel, AuthFinder) {
|
|||||||
autoCreate: true,
|
autoCreate: true,
|
||||||
autoUpdate: true,
|
autoUpdate: true,
|
||||||
})
|
})
|
||||||
declare updatedAt: DateTime | null
|
declare updatedAt: DateTime
|
||||||
|
|
||||||
static accessTokens = DbAccessTokensProvider.forModel(User)
|
static accessTokens = DbAccessTokensProvider.forModel(User)
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { test } from "@japa/runner"
|
import { test } from "@japa/runner"
|
||||||
|
|
||||||
test.group("GET /", () => {
|
test.group("GET /", () => {
|
||||||
test("should get hello world", async ({ client }) => {
|
test("should succeeds and get hello world", async ({ client }) => {
|
||||||
// Arrange - Given
|
// Arrange - Given
|
||||||
|
|
||||||
// Act - When
|
// Act - When
|
33
apps/api/src/app/routes/health/__tests__/get.test.ts
Normal file
33
apps/api/src/app/routes/health/__tests__/get.test.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import { APP_KEY, APP_KEY_HEADER_NAME } from "#config/app.js"
|
||||||
|
import { test } from "@japa/runner"
|
||||||
|
|
||||||
|
test.group("GET /health", () => {
|
||||||
|
test("should succeeds and get `isHealthy: true`", async ({
|
||||||
|
client,
|
||||||
|
assert,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const response = await client
|
||||||
|
.get("/health")
|
||||||
|
.header(APP_KEY_HEADER_NAME, APP_KEY)
|
||||||
|
const responseBody = response.body()
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(200)
|
||||||
|
assert.equal(responseBody.isHealthy, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
test("should fails and unauthorized when the app key is not provided", async ({
|
||||||
|
client,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const response = await client.get("/health")
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(401)
|
||||||
|
})
|
||||||
|
})
|
16
apps/api/src/app/routes/health/get.ts
Normal file
16
apps/api/src/app/routes/health/get.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import { healthChecks } from "#start/health.js"
|
||||||
|
import { middleware } from "#start/kernel.js"
|
||||||
|
import type { HttpContext } from "@adonisjs/core/http"
|
||||||
|
import router from "@adonisjs/core/services/router"
|
||||||
|
|
||||||
|
class Controller {
|
||||||
|
public async handle(context: HttpContext): Promise<void> {
|
||||||
|
const report = await healthChecks.run()
|
||||||
|
if (report.isHealthy) {
|
||||||
|
return context.response.ok(report)
|
||||||
|
}
|
||||||
|
return context.response.serviceUnavailable(report)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
router.get("/health", [Controller]).use(middleware.appKeySecurity())
|
@ -1 +1,4 @@
|
|||||||
import "./get.js"
|
import "#app/routes/get.js"
|
||||||
|
import "#app/routes/health/get.js"
|
||||||
|
import "#app/routes/wikipedia/pages/[id]/get.js"
|
||||||
|
import "#app/routes/wikipedia/pages/get.js"
|
||||||
|
@ -0,0 +1,52 @@
|
|||||||
|
import { PageFactory } from "#database/factories/page_factory.js"
|
||||||
|
import testUtils from "@adonisjs/core/services/test_utils"
|
||||||
|
import db from "@adonisjs/lucid/services/db"
|
||||||
|
import { test } from "@japa/runner"
|
||||||
|
|
||||||
|
test.group("GET /wikipedia/pages/[id]", (group) => {
|
||||||
|
group.each.setup(async () => {
|
||||||
|
return await testUtils.db().truncate()
|
||||||
|
})
|
||||||
|
|
||||||
|
test("should succeeds and get the page with the given id, and get all its internal links", async ({
|
||||||
|
client,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
const page = await PageFactory.create()
|
||||||
|
const pages = await PageFactory.createMany(10)
|
||||||
|
const internalLinksPages = pages.slice(0, 5)
|
||||||
|
await Promise.all(
|
||||||
|
internalLinksPages.map(async (internalLinkPage) => {
|
||||||
|
await db.table("internal_links").insert({
|
||||||
|
from_page_id: page.id,
|
||||||
|
to_page_id: internalLinkPage.id,
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const response = await client.get(`/wikipedia/pages/${page.id}`)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(200)
|
||||||
|
response.assertBody({
|
||||||
|
...page.toJSON(),
|
||||||
|
internalLinks: internalLinksPages.map((page) => {
|
||||||
|
return page.toJSON()
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
test("should fails with a 404 status code when the page with the given id does not exist", async ({
|
||||||
|
client,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
const page = await PageFactory.create()
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const response = await client.get(`/wikipedia/pages/${page.id + 1}`)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(404)
|
||||||
|
})
|
||||||
|
})
|
24
apps/api/src/app/routes/wikipedia/pages/[id]/get.ts
Normal file
24
apps/api/src/app/routes/wikipedia/pages/[id]/get.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import Page from "#app/models/page.js"
|
||||||
|
import { throttle } from "#start/limiter.js"
|
||||||
|
import type { HttpContext } from "@adonisjs/core/http"
|
||||||
|
import router from "@adonisjs/core/services/router"
|
||||||
|
import vine from "@vinejs/vine"
|
||||||
|
|
||||||
|
const requestValidator = vine.compile(
|
||||||
|
vine.object({
|
||||||
|
params: vine.object({
|
||||||
|
id: vine.number().withoutDecimals().positive(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
class Controller {
|
||||||
|
public async handle(context: HttpContext): Promise<Page> {
|
||||||
|
const payload = await context.request.validateUsing(requestValidator)
|
||||||
|
const page = await Page.findOrFail(payload.params.id)
|
||||||
|
await page.load("internalLinks")
|
||||||
|
return page
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
router.get("/wikipedia/pages/:id", [Controller]).use(throttle)
|
@ -0,0 +1,93 @@
|
|||||||
|
import Page from "#app/models/page.js"
|
||||||
|
import { PageFactory } from "#database/factories/page_factory.js"
|
||||||
|
import testUtils from "@adonisjs/core/services/test_utils"
|
||||||
|
import { test } from "@japa/runner"
|
||||||
|
|
||||||
|
test.group("GET /wikipedia/pages", (group) => {
|
||||||
|
group.each.setup(async () => {
|
||||||
|
return await testUtils.db().truncate()
|
||||||
|
})
|
||||||
|
|
||||||
|
test("should succeeds and get the page with the given title", async ({
|
||||||
|
client,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
const page = await PageFactory.create()
|
||||||
|
await PageFactory.createMany(10)
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const searchParams = new URLSearchParams({ title: page.title })
|
||||||
|
const response = await client.get(
|
||||||
|
`/wikipedia/pages?${searchParams.toString()}`,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(200)
|
||||||
|
response.assertBody([page.toJSON()])
|
||||||
|
})
|
||||||
|
|
||||||
|
test("should succeeds and get the pages with title that starts with the title given and limit", async ({
|
||||||
|
client,
|
||||||
|
assert,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
const limit = 4
|
||||||
|
const title = "No"
|
||||||
|
const pagesMatching = await Page.createMany([
|
||||||
|
{ title: "Node.js" },
|
||||||
|
{ title: "North_America" },
|
||||||
|
{ title: "NoSQL" },
|
||||||
|
{ title: "No" },
|
||||||
|
{ title: "Nobel_Prize" },
|
||||||
|
{ title: "Norway" },
|
||||||
|
])
|
||||||
|
await Page.createMany([{ title: "Linux" }, { title: "Abc" }])
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const searchParams = new URLSearchParams({
|
||||||
|
title,
|
||||||
|
limit: limit.toString(),
|
||||||
|
})
|
||||||
|
const response = await client.get(
|
||||||
|
`/wikipedia/pages?${searchParams.toString()}`,
|
||||||
|
)
|
||||||
|
const responseBody = response.body()
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(200)
|
||||||
|
response.assertBody(
|
||||||
|
pagesMatching.slice(0, limit).map((page) => {
|
||||||
|
return page.toJSON()
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
assert.equal(responseBody.length, limit)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('should fails when "title" is not provided', async ({ client }) => {
|
||||||
|
// Act - When
|
||||||
|
const response = await client.get("/wikipedia/pages")
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(422)
|
||||||
|
})
|
||||||
|
|
||||||
|
test('should fails when "limit" is too high (more than 100)', async ({
|
||||||
|
client,
|
||||||
|
}) => {
|
||||||
|
// Arrange - Given
|
||||||
|
const title = "No"
|
||||||
|
const limit = 101
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const searchParams = new URLSearchParams({
|
||||||
|
title,
|
||||||
|
limit: limit.toString(),
|
||||||
|
})
|
||||||
|
const response = await client.get(
|
||||||
|
`/wikipedia/pages?${searchParams.toString()}`,
|
||||||
|
)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
response.assertStatus(422)
|
||||||
|
})
|
||||||
|
})
|
37
apps/api/src/app/routes/wikipedia/pages/get.ts
Normal file
37
apps/api/src/app/routes/wikipedia/pages/get.ts
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import Page from "#app/models/page.js"
|
||||||
|
import { throttle } from "#start/limiter.js"
|
||||||
|
import type { HttpContext } from "@adonisjs/core/http"
|
||||||
|
import router from "@adonisjs/core/services/router"
|
||||||
|
import { sanitizePageTitle } from "@repo/wikipedia-game-solver/wikipedia-utils"
|
||||||
|
import vine from "@vinejs/vine"
|
||||||
|
|
||||||
|
const requestValidator = vine.compile(
|
||||||
|
vine.object({
|
||||||
|
title: vine
|
||||||
|
.string()
|
||||||
|
.minLength(1)
|
||||||
|
.maxLength(255)
|
||||||
|
.transform((value) => {
|
||||||
|
return sanitizePageTitle(value)
|
||||||
|
}),
|
||||||
|
limit: vine
|
||||||
|
.number()
|
||||||
|
.parse((value) => {
|
||||||
|
return value ?? 5
|
||||||
|
})
|
||||||
|
.withoutDecimals()
|
||||||
|
.range([1, 100]),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
class Controller {
|
||||||
|
public async handle(context: HttpContext): Promise<Page[]> {
|
||||||
|
const payload = await context.request.validateUsing(requestValidator)
|
||||||
|
const pages = await Page.query()
|
||||||
|
.whereLike("title", `${payload.title}%`)
|
||||||
|
.limit(payload.limit)
|
||||||
|
return pages
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
router.get("/wikipedia/pages", [Controller]).use(throttle)
|
@ -7,6 +7,8 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
process.env["NODE_ENV"] = "test"
|
process.env["NODE_ENV"] = "test"
|
||||||
|
process.env["PORT"] = "3333"
|
||||||
|
process.env["LIMITER_STORE"] = "memory"
|
||||||
|
|
||||||
import { Ignitor, prettyPrintError } from "@adonisjs/core"
|
import { Ignitor, prettyPrintError } from "@adonisjs/core"
|
||||||
import { configure, processCLIArgs, run } from "@japa/runner"
|
import { configure, processCLIArgs, run } from "@japa/runner"
|
||||||
|
@ -9,7 +9,9 @@ import app from "@adonisjs/core/services/app"
|
|||||||
* The encryption module will fail to decrypt data if the key is lost or changed.
|
* The encryption module will fail to decrypt data if the key is lost or changed.
|
||||||
* Therefore it is recommended to keep the app key secure.
|
* Therefore it is recommended to keep the app key secure.
|
||||||
*/
|
*/
|
||||||
export const appKey = new Secret(env.get("APP_KEY"))
|
export const APP_KEY_HEADER_NAME = "x-app-key"
|
||||||
|
export const APP_KEY = env.get("APP_KEY")
|
||||||
|
export const appKey = new Secret(APP_KEY)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The configuration settings used by the HTTP server
|
* The configuration settings used by the HTTP server
|
||||||
|
@ -15,7 +15,7 @@ const bodyParserConfig = defineConfig({
|
|||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Config for the JSON parser
|
* Config for the JSON parser.
|
||||||
*/
|
*/
|
||||||
json: {
|
json: {
|
||||||
convertEmptyStringsToNull: true,
|
convertEmptyStringsToNull: true,
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
import env from "#start/env.js"
|
import env from "#start/env.js"
|
||||||
|
import app from "@adonisjs/core/services/app"
|
||||||
import { defineConfig } from "@adonisjs/lucid"
|
import { defineConfig } from "@adonisjs/lucid"
|
||||||
|
|
||||||
const databaseConfig = defineConfig({
|
const databaseConfig = defineConfig({
|
||||||
connection: "mysql",
|
prettyPrintDebugQueries: !app.inProduction,
|
||||||
|
connection: app.inTest ? "sqlite" : "postgres",
|
||||||
connections: {
|
connections: {
|
||||||
mysql: {
|
postgres: {
|
||||||
client: "mysql2",
|
debug: app.inDev,
|
||||||
|
client: "pg",
|
||||||
connection: {
|
connection: {
|
||||||
host: env.get("DATABASE_HOST"),
|
host: env.get("DATABASE_HOST"),
|
||||||
port: env.get("DATABASE_PORT"),
|
port: env.get("DATABASE_PORT"),
|
||||||
@ -18,6 +21,17 @@ const databaseConfig = defineConfig({
|
|||||||
paths: ["database/migrations"],
|
paths: ["database/migrations"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
sqlite: {
|
||||||
|
client: "better-sqlite3",
|
||||||
|
connection: {
|
||||||
|
filename: ":memory:",
|
||||||
|
},
|
||||||
|
useNullAsDefault: true,
|
||||||
|
migrations: {
|
||||||
|
naturalSort: true,
|
||||||
|
paths: ["database/migrations"],
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -16,8 +16,7 @@ const hashConfig = defineConfig({
|
|||||||
export default hashConfig
|
export default hashConfig
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Inferring types for the list of hashers you have configured
|
* Inferring types for the list of hashers you have configured in your application.
|
||||||
* in your application.
|
|
||||||
*/
|
*/
|
||||||
declare module "@adonisjs/core/types" {
|
declare module "@adonisjs/core/types" {
|
||||||
export interface HashersList extends InferHashers<typeof hashConfig> {}
|
export interface HashersList extends InferHashers<typeof hashConfig> {}
|
||||||
|
26
apps/api/src/config/limiter.ts
Normal file
26
apps/api/src/config/limiter.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import env from "#start/env.js"
|
||||||
|
import { defineConfig, stores } from "@adonisjs/limiter"
|
||||||
|
|
||||||
|
const limiterConfig = defineConfig({
|
||||||
|
default: env.get("LIMITER_STORE"),
|
||||||
|
stores: {
|
||||||
|
/**
|
||||||
|
* Database store to save rate limiting data inside a database.
|
||||||
|
*/
|
||||||
|
database: stores.database({
|
||||||
|
tableName: "rate_limits",
|
||||||
|
clearExpiredByTimeout: true,
|
||||||
|
}),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Memory store could be used during testing.
|
||||||
|
*/
|
||||||
|
memory: stores.memory({}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
export default limiterConfig
|
||||||
|
|
||||||
|
declare module "@adonisjs/limiter/types" {
|
||||||
|
export interface LimitersList extends InferLimiters<typeof limiterConfig> {}
|
||||||
|
}
|
14
apps/api/src/database/factories/page_factory.ts
Normal file
14
apps/api/src/database/factories/page_factory.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import Page from "#app/models/page.js"
|
||||||
|
import factory from "@adonisjs/lucid/factories"
|
||||||
|
import { sanitizePageTitle } from "@repo/wikipedia-game-solver/wikipedia-utils"
|
||||||
|
|
||||||
|
export const PageFactory = factory
|
||||||
|
.define(Page, async ({ faker }) => {
|
||||||
|
return {
|
||||||
|
title: sanitizePageTitle(faker.commerce.productName()),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.relation("internalLinks", () => {
|
||||||
|
return []
|
||||||
|
})
|
||||||
|
.build()
|
@ -6,12 +6,12 @@ export default class CreateUsersTable extends BaseSchema {
|
|||||||
public override async up(): Promise<void> {
|
public override async up(): Promise<void> {
|
||||||
void this.schema.createTable(this.tableName, (table) => {
|
void this.schema.createTable(this.tableName, (table) => {
|
||||||
table.increments("id").notNullable()
|
table.increments("id").notNullable()
|
||||||
table.string("full_name").nullable()
|
table.string("full_name").notNullable()
|
||||||
table.string("email", 254).notNullable().unique()
|
table.string("email", 254).notNullable().unique()
|
||||||
table.string("password").notNullable()
|
table.string("password").notNullable()
|
||||||
|
|
||||||
table.timestamp("created_at").notNullable()
|
table.timestamp("created_at").notNullable()
|
||||||
table.timestamp("updated_at").nullable()
|
table.timestamp("updated_at").notNullable()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,9 +10,9 @@ export default class CreateAccessTokensTable extends BaseSchema {
|
|||||||
.integer("tokenable_id")
|
.integer("tokenable_id")
|
||||||
.notNullable()
|
.notNullable()
|
||||||
.unsigned()
|
.unsigned()
|
||||||
.references("id")
|
.references("users.id")
|
||||||
.inTable("users")
|
|
||||||
.onDelete("CASCADE")
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE")
|
||||||
|
|
||||||
table.string("type").notNullable()
|
table.string("type").notNullable()
|
||||||
table.string("name").nullable()
|
table.string("name").nullable()
|
||||||
|
@ -0,0 +1,16 @@
|
|||||||
|
import { BaseSchema } from "@adonisjs/lucid/schema"
|
||||||
|
|
||||||
|
export default class CreatePagesTable extends BaseSchema {
|
||||||
|
protected tableName = "pages"
|
||||||
|
|
||||||
|
public override async up(): Promise<void> {
|
||||||
|
void this.schema.createTable(this.tableName, (table) => {
|
||||||
|
table.increments("id").notNullable()
|
||||||
|
table.string("title", 255).notNullable().unique()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async down(): Promise<void> {
|
||||||
|
void this.schema.dropTable(this.tableName)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
import { BaseSchema } from "@adonisjs/lucid/schema"
|
||||||
|
|
||||||
|
export default class CreateInternalLinksTable extends BaseSchema {
|
||||||
|
protected tableName = "internal_links"
|
||||||
|
|
||||||
|
public override async up(): Promise<void> {
|
||||||
|
void this.schema.createTable(this.tableName, (table) => {
|
||||||
|
table.primary(["from_page_id", "to_page_id"])
|
||||||
|
table
|
||||||
|
.integer("from_page_id")
|
||||||
|
.unsigned()
|
||||||
|
.notNullable()
|
||||||
|
.references("pages.id")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE")
|
||||||
|
table
|
||||||
|
.integer("to_page_id")
|
||||||
|
.unsigned()
|
||||||
|
.notNullable()
|
||||||
|
.references("pages.id")
|
||||||
|
.onDelete("CASCADE")
|
||||||
|
.onUpdate("CASCADE")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async down(): Promise<void> {
|
||||||
|
void this.schema.dropTable(this.tableName)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
import { BaseSchema } from "@adonisjs/lucid/schema"
|
||||||
|
|
||||||
|
export default class CreateRateLimitsTable extends BaseSchema {
|
||||||
|
protected tableName = "rate_limits"
|
||||||
|
|
||||||
|
public override async up(): Promise<void> {
|
||||||
|
void this.schema.createTable(this.tableName, (table) => {
|
||||||
|
table.string("key", 255).notNullable().primary()
|
||||||
|
table.integer("points", 9).notNullable().defaultTo(0)
|
||||||
|
table.bigint("expire").unsigned()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public override async down(): Promise<void> {
|
||||||
|
void this.schema.dropTable(this.tableName)
|
||||||
|
}
|
||||||
|
}
|
@ -19,11 +19,16 @@ export default await Env.create(new URL("../..", import.meta.url), {
|
|||||||
] as const),
|
] as const),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Variables for configuring database connection
|
* Variables for configuring database connection.
|
||||||
*/
|
*/
|
||||||
DATABASE_HOST: Env.schema.string({ format: "host" }),
|
DATABASE_HOST: Env.schema.string({ format: "host" }),
|
||||||
DATABASE_PORT: Env.schema.number(),
|
DATABASE_PORT: Env.schema.number(),
|
||||||
DATABASE_USER: Env.schema.string(),
|
DATABASE_USER: Env.schema.string(),
|
||||||
DATABASE_PASSWORD: Env.schema.string(),
|
DATABASE_PASSWORD: Env.schema.string(),
|
||||||
DATABASE_NAME: Env.schema.string(),
|
DATABASE_NAME: Env.schema.string(),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Variables for configuring the limiter package.
|
||||||
|
*/
|
||||||
|
LIMITER_STORE: Env.schema.enum(["database", "memory"] as const),
|
||||||
})
|
})
|
||||||
|
13
apps/api/src/start/health.ts
Normal file
13
apps/api/src/start/health.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import {
|
||||||
|
DiskSpaceCheck,
|
||||||
|
HealthChecks,
|
||||||
|
MemoryHeapCheck,
|
||||||
|
} from "@adonisjs/core/health"
|
||||||
|
import { DbCheck } from "@adonisjs/lucid/database"
|
||||||
|
import db from "@adonisjs/lucid/services/db"
|
||||||
|
|
||||||
|
export const healthChecks = new HealthChecks().register([
|
||||||
|
new DiskSpaceCheck(),
|
||||||
|
new MemoryHeapCheck(),
|
||||||
|
new DbCheck(db.connection()),
|
||||||
|
])
|
@ -8,17 +8,14 @@ import router from "@adonisjs/core/services/router"
|
|||||||
import server from "@adonisjs/core/services/server"
|
import server from "@adonisjs/core/services/server"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The error handler is used to convert an exception
|
* The error handler is used to convert an exception to a HTTP response.
|
||||||
* to a HTTP response.
|
|
||||||
*/
|
*/
|
||||||
server.errorHandler(async () => {
|
server.errorHandler(async () => {
|
||||||
return await import("#app/exceptions/handler.js")
|
return await import("#app/exceptions/handler.js")
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The server middleware stack runs middleware on all the HTTP
|
* The server middleware stack runs middleware on all the HTTP requests, even if there is no route registered for the requested URL.
|
||||||
* requests, even if there is no route registered for
|
|
||||||
* the request URL.
|
|
||||||
*/
|
*/
|
||||||
server.use([
|
server.use([
|
||||||
async () => {
|
async () => {
|
||||||
@ -48,6 +45,9 @@ router.use([
|
|||||||
* Named middleware collection must be explicitly assigned to the routes or the routes group.
|
* Named middleware collection must be explicitly assigned to the routes or the routes group.
|
||||||
*/
|
*/
|
||||||
export const middleware = router.named({
|
export const middleware = router.named({
|
||||||
|
appKeySecurity: async () => {
|
||||||
|
return await import("#app/middleware/app_key_security_middleware.js")
|
||||||
|
},
|
||||||
auth: async () => {
|
auth: async () => {
|
||||||
return await import("#app/middleware/auth_middleware.js")
|
return await import("#app/middleware/auth_middleware.js")
|
||||||
},
|
},
|
||||||
|
16
apps/api/src/start/limiter.ts
Normal file
16
apps/api/src/start/limiter.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
/**
|
||||||
|
* Define HTTP limiters
|
||||||
|
*
|
||||||
|
* The "limiter.define" method creates an HTTP middleware to apply rate limits on a route or a group of routes. Feel free to define as many throttle middleware as needed.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { APP_KEY, APP_KEY_HEADER_NAME } from "#config/app.js"
|
||||||
|
import app from "@adonisjs/core/services/app"
|
||||||
|
import limiter from "@adonisjs/limiter/services/main"
|
||||||
|
|
||||||
|
export const throttle = limiter.define("global", (context) => {
|
||||||
|
if (app.inTest || context.request.header(APP_KEY_HEADER_NAME) === APP_KEY) {
|
||||||
|
return limiter.noLimit()
|
||||||
|
}
|
||||||
|
return limiter.allowRequests(120).every("1 minute")
|
||||||
|
})
|
@ -26,7 +26,11 @@ export const plugins: Config["plugins"] = [
|
|||||||
* The teardown functions are executer after all the tests.
|
* The teardown functions are executer after all the tests.
|
||||||
*/
|
*/
|
||||||
export const runnerHooks: Required<Pick<Config, "setup" | "teardown">> = {
|
export const runnerHooks: Required<Pick<Config, "setup" | "teardown">> = {
|
||||||
setup: [],
|
setup: [
|
||||||
|
async () => {
|
||||||
|
return await testUtils.db().truncate()
|
||||||
|
},
|
||||||
|
],
|
||||||
teardown: [],
|
teardown: [],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,27 +1,20 @@
|
|||||||
services:
|
services:
|
||||||
wikipedia-solver-dev-database:
|
wikipedia-solver-dev-database:
|
||||||
container_name: "wikipedia-solver-dev-database"
|
container_name: "wikipedia-solver-dev-database"
|
||||||
image: "mariadb:10.6.17"
|
image: "postgres:16.3"
|
||||||
restart: "unless-stopped"
|
restart: "unless-stopped"
|
||||||
env_file: ".env"
|
env_file: ".env"
|
||||||
environment:
|
environment:
|
||||||
MARIADB_USER: ${DATABASE_USER}
|
POSTGRES_USER: ${DATABASE_USER}
|
||||||
MARIADB_PASSWORD: ${DATABASE_PASSWORD}
|
POSTGRES_PASSWORD: ${DATABASE_PASSWORD}
|
||||||
MARIADB_ROOT_PASSWORD: ${DATABASE_PASSWORD}
|
POSTGRES_DB: ${DATABASE_NAME}
|
||||||
MARIADB_DATABASE: ${DATABASE_NAME}
|
|
||||||
command: |
|
command: |
|
||||||
--innodb_buffer_pool_size=4G
|
--max_wal_size=4GB
|
||||||
--key-buffer-size=4G
|
|
||||||
--innodb_log_buffer_size=256M
|
|
||||||
--innodb_log_file_size=1G
|
|
||||||
--innodb_write_io_threads=16
|
|
||||||
--innodb_flush_log_at_trx_commit=0
|
|
||||||
--max_allowed_packet=1G
|
|
||||||
ports:
|
ports:
|
||||||
- "${DATABASE_PORT-3306}:${DATABASE_PORT-3306}"
|
- "${DATABASE_PORT-5432}:${DATABASE_PORT-5432}"
|
||||||
volumes:
|
volumes:
|
||||||
- "wikipedia-solver-dev-mariadb-data:/var/lib/mysql"
|
- "wikipedia-solver-dev-postgres-data:/var/lib/postgresql/data"
|
||||||
# - "./sql:/docker-entrypoint-initdb.d/"
|
- "./data:/data/"
|
||||||
|
|
||||||
wikipedia-solver-dev-adminer:
|
wikipedia-solver-dev-adminer:
|
||||||
container_name: "wikipedia-solver-dev-adminer"
|
container_name: "wikipedia-solver-dev-adminer"
|
||||||
@ -38,4 +31,4 @@ services:
|
|||||||
- "./data/adminer/fonts/:/var/www/html/fonts"
|
- "./data/adminer/fonts/:/var/www/html/fonts"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
wikipedia-solver-dev-mariadb-data:
|
wikipedia-solver-dev-postgres-data:
|
||||||
|
25
compose.yaml
25
compose.yaml
@ -27,27 +27,20 @@ services:
|
|||||||
|
|
||||||
wikipedia-solver-database:
|
wikipedia-solver-database:
|
||||||
container_name: "wikipedia-solver-database"
|
container_name: "wikipedia-solver-database"
|
||||||
image: "mariadb:10.6.17"
|
image: "postgres:16.3"
|
||||||
restart: "unless-stopped"
|
restart: "unless-stopped"
|
||||||
env_file: ".env"
|
env_file: ".env"
|
||||||
environment:
|
environment:
|
||||||
MARIADB_USER: ${DATABASE_USER}
|
POSTGRES_USER: ${DATABASE_USER}
|
||||||
MARIADB_PASSWORD: ${DATABASE_PASSWORD}
|
POSTGRES_PASSWORD: ${DATABASE_PASSWORD}
|
||||||
MARIADB_ROOT_PASSWORD: ${DATABASE_PASSWORD}
|
POSTGRES_DB: ${DATABASE_NAME}
|
||||||
MARIADB_DATABASE: ${DATABASE_NAME}
|
|
||||||
command: |
|
command: |
|
||||||
--innodb_buffer_pool_size=4G
|
--max_wal_size=4GB
|
||||||
--key-buffer-size=4G
|
|
||||||
--innodb_log_buffer_size=256M
|
|
||||||
--innodb_log_file_size=1G
|
|
||||||
--innodb_write_io_threads=16
|
|
||||||
--innodb_flush_log_at_trx_commit=0
|
|
||||||
--max_allowed_packet=1G
|
|
||||||
ports:
|
ports:
|
||||||
- "${DATABASE_PORT-3306}:${DATABASE_PORT-3306}"
|
- "${DATABASE_PORT-5432}:${DATABASE_PORT-5432}"
|
||||||
volumes:
|
volumes:
|
||||||
- "wikipedia-solver-mariadb-data:/var/lib/mysql"
|
- "wikipedia-solver-postgres-data:/var/lib/postgresql/data"
|
||||||
# - "./sql:/docker-entrypoint-initdb.d/"
|
- "./data:/data/"
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
wikipedia-solver-mariadb-data:
|
wikipedia-solver-postgres-data:
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
DATABASE_USER=wikipedia_user
|
|
||||||
DATABASE_PASSWORD=password
|
|
||||||
DATABASE_NAME=wikipedia
|
|
@ -2,7 +2,11 @@
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
./download-wikipedia-dump.sh
|
./download-wikipedia-dump.sh
|
||||||
node --max-old-space-size=8096 database-wikipedia.js
|
node --max-old-space-size=8096 generate-sql-files.js
|
||||||
|
|
||||||
|
# Inside the Database container
|
||||||
|
docker exec -it wikipedia-solver-dev-database sh
|
||||||
|
/data/execute-sql.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
## Utils
|
## Utils
|
||||||
@ -11,13 +15,7 @@ Show the first 10 line of sql file: `head -n 10 ./dump/page.sql`
|
|||||||
|
|
||||||
Show the first 10 characters of sql file: `head -c 10 ./dump/page.sql`
|
Show the first 10 characters of sql file: `head -c 10 ./dump/page.sql`
|
||||||
|
|
||||||
To inspect volume size used by database: `docker system df -v | grep 'wikipedia-solver-mariadb-data'`
|
To inspect volume size used by database: `docker system df -v`
|
||||||
|
|
||||||
To enter in the database container: `docker exec -it wikipedia-solver-database sh`
|
|
||||||
|
|
||||||
Then: `mariadb --password="${DATABASE_PASSWORD}" --user="${DATABASE_USER}"`
|
|
||||||
|
|
||||||
And `use wikipedia;`, for example: `SELECT * FROM pages LIMIT 10;` or to execute a SQL script: `source /docker-entrypoint-initdb.d/3-internal-links-inserts.sql;`.
|
|
||||||
|
|
||||||
## Remove a volume
|
## Remove a volume
|
||||||
|
|
||||||
@ -32,15 +30,22 @@ docker volume rm data_wikipedia-solver-mariadb-data
|
|||||||
docker-compose down --volumes
|
docker-compose down --volumes
|
||||||
```
|
```
|
||||||
|
|
||||||
## MySQL Related
|
## PostgreSQL Related
|
||||||
|
|
||||||
<https://stackoverflow.com/questions/43954631/issues-with-wikipedia-dump-table-pagelinks>
|
<https://stackoverflow.com/questions/12206600/how-to-speed-up-insertion-performance-in-postgresql>
|
||||||
|
|
||||||
MySQL any way to import a huge (32 GB) sql dump faster?: <https://stackoverflow.com/questions/40384864/importing-wikipedia-dump-to-mysql>
|
```sh
|
||||||
|
docker exec -it wikipedia-solver-dev-database sh
|
||||||
|
|
||||||
Import data.sql MySQL Docker Container: <https://stackoverflow.com/questions/43880026/import-data-sql-mysql-docker-container>
|
psql --username="${DATABASE_USER}" --dbname="${DATABASE_NAME}"
|
||||||
|
```
|
||||||
|
|
||||||
<https://dba.stackexchange.com/questions/83125/mysql-any-way-to-import-a-huge-32-gb-sql-dump-faster>
|
```sql
|
||||||
|
-- Execute script with inserts
|
||||||
|
\i /data/sql-pages-inserts/0001-pages-inserts.sql
|
||||||
|
|
||||||
|
/data/sql-internal-links-inserts/0001-internal-links.sh
|
||||||
|
```
|
||||||
|
|
||||||
## Dumps Links
|
## Dumps Links
|
||||||
|
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
services:
|
|
||||||
wikipedia-solver-database:
|
|
||||||
container_name: "wikipedia-solver-database"
|
|
||||||
image: "mariadb:10.6.17"
|
|
||||||
restart: "unless-stopped"
|
|
||||||
env_file: ".env"
|
|
||||||
environment:
|
|
||||||
MARIADB_USER: ${DATABASE_USER}
|
|
||||||
MARIADB_PASSWORD: ${DATABASE_PASSWORD}
|
|
||||||
MARIADB_ROOT_PASSWORD: ${DATABASE_PASSWORD}
|
|
||||||
MARIADB_DATABASE: ${DATABASE_NAME}
|
|
||||||
command: |
|
|
||||||
--innodb_buffer_pool_size=4G
|
|
||||||
--key-buffer-size=4G
|
|
||||||
--innodb_log_buffer_size=256M
|
|
||||||
--innodb_log_file_size=1G
|
|
||||||
--innodb_write_io_threads=16
|
|
||||||
--innodb_flush_log_at_trx_commit=0
|
|
||||||
--max_allowed_packet=1G
|
|
||||||
volumes:
|
|
||||||
- "wikipedia-solver-mariadb-data:/var/lib/mysql"
|
|
||||||
- "./sql:/docker-entrypoint-initdb.d/"
|
|
||||||
|
|
||||||
adminer:
|
|
||||||
container_name: "adminer"
|
|
||||||
image: "adminer:4.8.1"
|
|
||||||
restart: "unless-stopped"
|
|
||||||
ports:
|
|
||||||
- "8080:8080"
|
|
||||||
env_file: ".env"
|
|
||||||
environment:
|
|
||||||
ADMINER_DEFAULT_SERVER: "wikipedia-solver-database"
|
|
||||||
volumes:
|
|
||||||
- "./adminer/default-orange.css:/var/www/html/adminer.css"
|
|
||||||
- "./adminer/logo.png:/var/www/html/logo.png"
|
|
||||||
- "./adminer/fonts/:/var/www/html/fonts"
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
wikipedia-solver-mariadb-data:
|
|
8
data/execute-sql.sh
Executable file
8
data/execute-sql.sh
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
/data/sql/0000-sql-init.sh
|
||||||
|
|
||||||
|
/data/sql-pages-inserts/0000-pages.sh
|
||||||
|
/data/sql-internal-links-inserts/0000-internal-links.sh
|
||||||
|
|
||||||
|
/data/sql/0999-sql-end.sh
|
@ -7,7 +7,6 @@ import {
|
|||||||
} from "./utils.js"
|
} from "./utils.js"
|
||||||
|
|
||||||
const SQL_DUMP_PATH = path.join(process.cwd(), "dump")
|
const SQL_DUMP_PATH = path.join(process.cwd(), "dump")
|
||||||
const SQL_OUTPUT_PATH = path.join(process.cwd(), "sql")
|
|
||||||
const SQL_FILENAME_NUMBER_PAD = 4
|
const SQL_FILENAME_NUMBER_PAD = 4
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -52,9 +51,42 @@ const cleanPagesSQL = async () => {
|
|||||||
let current = ""
|
let current = ""
|
||||||
let lastPercent = 0
|
let lastPercent = 0
|
||||||
|
|
||||||
|
let pagesFileCount = 1
|
||||||
|
|
||||||
|
const INSERT_INTO_START_OUTPUT = "INSERT INTO pages (id, title) VALUES "
|
||||||
|
|
||||||
|
const BATCH_SIZE = 1_000_000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {string[]}
|
||||||
|
*/
|
||||||
|
let batch = []
|
||||||
|
|
||||||
|
const flushBatch = async () => {
|
||||||
|
if (batch.length > 0) {
|
||||||
|
const batchString = batch.join(",")
|
||||||
|
const fileName = `${zeroPad(pagesFileCount, SQL_FILENAME_NUMBER_PAD)}-pages-inserts.sql`
|
||||||
|
const sqlOutputPath = path.join(
|
||||||
|
process.cwd(),
|
||||||
|
"sql-pages-inserts",
|
||||||
|
fileName,
|
||||||
|
)
|
||||||
|
await fs.promises.writeFile(
|
||||||
|
sqlOutputPath,
|
||||||
|
`${INSERT_INTO_START_OUTPUT}${batchString};`,
|
||||||
|
{
|
||||||
|
encoding: "utf-8",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
console.log(`flushBatch - ${fileName}, batch.length: ${batch.length}`)
|
||||||
|
pagesFileCount += 1
|
||||||
|
batch = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
sqlInputFileStream
|
sqlInputFileStream
|
||||||
.on("data", (dataInput) => {
|
.on("data", async (dataInput) => {
|
||||||
const bytesReadRatio = sqlInputFileStream.bytesRead / sqlInputStat.size
|
const bytesReadRatio = sqlInputFileStream.bytesRead / sqlInputStat.size
|
||||||
const bytesReadPercent = bytesReadRatio * 100
|
const bytesReadPercent = bytesReadRatio * 100
|
||||||
|
|
||||||
@ -98,13 +130,21 @@ const cleanPagesSQL = async () => {
|
|||||||
|
|
||||||
if (namespace === "0" && !isRedirect) {
|
if (namespace === "0" && !isRedirect) {
|
||||||
wikipediaPagesKeyId[id] = title
|
wikipediaPagesKeyId[id] = title
|
||||||
|
batch.push(`(${id},E${title})`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (batch.length >= BATCH_SIZE) {
|
||||||
|
sqlInputFileStream.pause()
|
||||||
|
await flushBatch()
|
||||||
|
sqlInputFileStream.resume()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.on("error", (error) => {
|
.on("error", (error) => {
|
||||||
return reject(error)
|
return reject(error)
|
||||||
})
|
})
|
||||||
.on("close", () => {
|
.on("close", async () => {
|
||||||
|
await flushBatch()
|
||||||
console.log("cleanPagesSQL - Bytes read (100%).")
|
console.log("cleanPagesSQL - Bytes read (100%).")
|
||||||
return resolve(wikipediaPagesKeyId)
|
return resolve(wikipediaPagesKeyId)
|
||||||
})
|
})
|
||||||
@ -113,30 +153,6 @@ const cleanPagesSQL = async () => {
|
|||||||
|
|
||||||
const wikipediaPagesKeyId = await cleanPagesSQL()
|
const wikipediaPagesKeyId = await cleanPagesSQL()
|
||||||
|
|
||||||
const cleanPagesSQLWriteToFile = async () => {
|
|
||||||
console.log("cleanPagesSQLWriteToFile - Writing to file...")
|
|
||||||
const sqlOutputPath = path.join(
|
|
||||||
SQL_OUTPUT_PATH,
|
|
||||||
`${zeroPad(1, SQL_FILENAME_NUMBER_PAD)}-pages-inserts.sql`,
|
|
||||||
)
|
|
||||||
const INSERT_INTO_START_OUTPUT = "INSERT INTO pages (id, title) VALUES "
|
|
||||||
|
|
||||||
const wikipediaPagesString = Object.entries(wikipediaPagesKeyId)
|
|
||||||
.map(([id, title]) => {
|
|
||||||
return `(${id},${title})`
|
|
||||||
})
|
|
||||||
.join(",")
|
|
||||||
|
|
||||||
await fs.promises.writeFile(
|
|
||||||
sqlOutputPath,
|
|
||||||
`${INSERT_INTO_START_OUTPUT}${wikipediaPagesString};`,
|
|
||||||
{ encoding: "utf-8" },
|
|
||||||
)
|
|
||||||
console.log("cleanPagesSQLWriteToFile - Done.")
|
|
||||||
}
|
|
||||||
|
|
||||||
await cleanPagesSQLWriteToFile()
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Function to clean the `pagelinks.sql` file by:
|
* Function to clean the `pagelinks.sql` file by:
|
||||||
* - Removing all lines that don't start with `INSERT INTO...`.
|
* - Removing all lines that don't start with `INSERT INTO...`.
|
||||||
@ -145,7 +161,7 @@ await cleanPagesSQLWriteToFile()
|
|||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
const cleanInternalLinksSQL = async () => {
|
const cleanInternalLinksSQL = async () => {
|
||||||
let internalLinksFileCount = 2
|
let internalLinksFileCount = 1
|
||||||
const INSERT_INTO_START_OUTPUT =
|
const INSERT_INTO_START_OUTPUT =
|
||||||
"INSERT INTO internal_links (from_page_id, to_page_id) VALUES "
|
"INSERT INTO internal_links (from_page_id, to_page_id) VALUES "
|
||||||
|
|
||||||
@ -174,7 +190,11 @@ const cleanInternalLinksSQL = async () => {
|
|||||||
if (batch.length > 0) {
|
if (batch.length > 0) {
|
||||||
const batchString = batch.join(",")
|
const batchString = batch.join(",")
|
||||||
const fileName = `${zeroPad(internalLinksFileCount, SQL_FILENAME_NUMBER_PAD)}-internal-links-inserts.sql`
|
const fileName = `${zeroPad(internalLinksFileCount, SQL_FILENAME_NUMBER_PAD)}-internal-links-inserts.sql`
|
||||||
const sqlOutputPath = path.join(SQL_OUTPUT_PATH, fileName)
|
const sqlOutputPath = path.join(
|
||||||
|
process.cwd(),
|
||||||
|
"sql-internal-links-inserts",
|
||||||
|
fileName,
|
||||||
|
)
|
||||||
await fs.promises.writeFile(
|
await fs.promises.writeFile(
|
||||||
sqlOutputPath,
|
sqlOutputPath,
|
||||||
`${INSERT_INTO_START_OUTPUT}${batchString};`,
|
`${INSERT_INTO_START_OUTPUT}${batchString};`,
|
6
data/sql-internal-links-inserts/0000-internal-links.sh
Executable file
6
data/sql-internal-links-inserts/0000-internal-links.sh
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
for sqlInsert in /data/sql-internal-links-inserts/*.sql; do
|
||||||
|
echo "${sqlInsert}"
|
||||||
|
time psql --username="${DATABASE_USER}" --dbname="${DATABASE_NAME}" --file="${sqlInsert}"
|
||||||
|
done
|
6
data/sql-pages-inserts/0000-pages.sh
Executable file
6
data/sql-pages-inserts/0000-pages.sh
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
for sqlInsert in /data/sql-pages-inserts/*.sql; do
|
||||||
|
echo "${sqlInsert}"
|
||||||
|
time psql --username="${DATABASE_USER}" --dbname="${DATABASE_NAME}" --file="${sqlInsert}"
|
||||||
|
done
|
3
data/sql/0000-sql-init.sh
Executable file
3
data/sql/0000-sql-init.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
time psql --username="${DATABASE_USER}" --dbname="${DATABASE_NAME}" --file="/data/sql/0000-sql-init.sql"
|
2
data/sql/0000-sql-init.sql
Normal file
2
data/sql/0000-sql-init.sql
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE pages DISABLE TRIGGER ALL;
|
||||||
|
ALTER TABLE internal_links DISABLE TRIGGER ALL;
|
@ -1,28 +0,0 @@
|
|||||||
CREATE TABLE `pages` (
|
|
||||||
`id` INT(8) UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
||||||
`title` VARBINARY(255) NOT NULL DEFAULT '',
|
|
||||||
-- `is_redirect` tinyint(1) unsigned NOT NULL DEFAULT 0,
|
|
||||||
|
|
||||||
PRIMARY KEY (`id`),
|
|
||||||
UNIQUE KEY (`title`)
|
|
||||||
) ENGINE=MyISAM AUTO_INCREMENT=76684425 DEFAULT CHARSET=binary ROW_FORMAT=COMPRESSED;
|
|
||||||
|
|
||||||
-- VARBINARY usage instead of VARCHAR explanation: <https://stackoverflow.com/a/13397437>
|
|
||||||
-- > War on varchar. Changed all occurrences of varchar(N) and varchar(N) binary to varbinary(N). varchars cause problems ("Invalid mix of collations" errors) on MySQL databases with certain configs, most notably the default MySQL config.
|
|
||||||
|
|
||||||
CREATE TABLE `internal_links` (
|
|
||||||
-- `id` INT(8) UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
||||||
`from_page_id` INT(8) UNSIGNED NOT NULL,
|
|
||||||
`to_page_id` INT(8) UNSIGNED NOT NULL,
|
|
||||||
|
|
||||||
-- PRIMARY KEY (`id`)
|
|
||||||
PRIMARY KEY (`from_page_id`, `to_page_id`),
|
|
||||||
FOREIGN KEY (`from_page_id`) REFERENCES `pages` (`id`) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (`to_page_id`) REFERENCES `pages` (`id`) ON DELETE CASCADE
|
|
||||||
) ENGINE=MyISAM DEFAULT CHARSET=binary ROW_FORMAT=COMPRESSED;
|
|
||||||
|
|
||||||
SET @@session.unique_checks = 0;
|
|
||||||
SET @@session.foreign_key_checks = 0;
|
|
||||||
|
|
||||||
SET FOREIGN_KEY_CHECKS = 0;
|
|
||||||
SET UNIQUE_CHECKS = 0;
|
|
@ -1,11 +0,0 @@
|
|||||||
-- SET @@session.foreign_key_checks = 0;
|
|
||||||
-- SET FOREIGN_KEY_CHECKS = 0;
|
|
||||||
|
|
||||||
-- ALTER TABLE `internal_links` ADD CONSTRAINT fk_from_page_id FOREIGN KEY (`from_page_id`) REFERENCES `pages` (`id`);
|
|
||||||
-- ALTER TABLE `internal_links` ADD CONSTRAINT fk_to_page_id FOREIGN KEY (`to_page_id`) REFERENCES `pages` (`id`);
|
|
||||||
|
|
||||||
SET @@session.unique_checks = 1;
|
|
||||||
SET @@session.foreign_key_checks = 1;
|
|
||||||
|
|
||||||
SET FOREIGN_KEY_CHECKS = 1;
|
|
||||||
SET UNIQUE_CHECKS = 1;
|
|
3
data/sql/0999-sql-end.sh
Executable file
3
data/sql/0999-sql-end.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
time psql --username="${DATABASE_USER}" --dbname="${DATABASE_NAME}" --file="/data/sql/0999-sql-end.sql"
|
2
data/sql/0999-sql-end.sql
Normal file
2
data/sql/0999-sql-end.sql
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE pages ENABLE TRIGGER ALL;
|
||||||
|
ALTER TABLE internal_links ENABLE TRIGGER ALL;
|
@ -26,7 +26,7 @@
|
|||||||
"editorconfig-checker": "5.1.8",
|
"editorconfig-checker": "5.1.8",
|
||||||
"playwright": "catalog:",
|
"playwright": "catalog:",
|
||||||
"prettier": "3.3.3",
|
"prettier": "3.3.3",
|
||||||
"prettier-plugin-tailwindcss": "0.6.5",
|
"prettier-plugin-tailwindcss": "0.6.6",
|
||||||
"replace-in-files-cli": "3.0.0",
|
"replace-in-files-cli": "3.0.0",
|
||||||
"semantic-release": "23.1.1",
|
"semantic-release": "23.1.1",
|
||||||
"turbo": "2.0.12",
|
"turbo": "2.0.12",
|
||||||
|
@ -7,3 +7,60 @@
|
|||||||
export const capitalize = (string: string): string => {
|
export const capitalize = (string: string): string => {
|
||||||
return string.charAt(0).toUpperCase() + string.slice(1)
|
return string.charAt(0).toUpperCase() + string.slice(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trim any of the specified characters from the start and end of a string.
|
||||||
|
* @param string
|
||||||
|
* @param characters
|
||||||
|
* @returns
|
||||||
|
* @example trimAny("_____foo bar ", [" ", "_"]) // "foo bar"
|
||||||
|
*/
|
||||||
|
export const trimAny = (string: string, characters: string[]): string => {
|
||||||
|
let start = 0
|
||||||
|
let end = string.length
|
||||||
|
|
||||||
|
while (
|
||||||
|
start < end &&
|
||||||
|
(characters as Array<string | undefined>).includes(string[start])
|
||||||
|
) {
|
||||||
|
start += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
while (
|
||||||
|
end > start &&
|
||||||
|
(characters as Array<string | undefined>).includes(string[end - 1])
|
||||||
|
) {
|
||||||
|
end -= 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return start > 0 || end < string.length
|
||||||
|
? string.substring(start, end)
|
||||||
|
: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reduces consecutive occurrences of specified characters in a string to a single occurrence.
|
||||||
|
*
|
||||||
|
* @param input
|
||||||
|
* @param characters
|
||||||
|
* @returns
|
||||||
|
* @example reduceConsecutiveCharacters("Hello___there!!", ["_", "!"]) // "Hello_there!"
|
||||||
|
*/
|
||||||
|
export const reduceConsecutiveCharacters = (
|
||||||
|
input: string,
|
||||||
|
characters: string[],
|
||||||
|
): string => {
|
||||||
|
let result = ""
|
||||||
|
let previousCharacter = ""
|
||||||
|
for (const currentCharacter of input) {
|
||||||
|
if (characters.includes(currentCharacter)) {
|
||||||
|
if (currentCharacter !== previousCharacter) {
|
||||||
|
result += currentCharacter
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result += currentCharacter
|
||||||
|
}
|
||||||
|
previousCharacter = currentCharacter
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { describe, expect, it } from "vitest"
|
import { describe, expect, it } from "vitest"
|
||||||
|
|
||||||
import { capitalize } from "../strings.js"
|
import { capitalize, reduceConsecutiveCharacters, trimAny } from "../strings.js"
|
||||||
|
|
||||||
describe("capitalize", () => {
|
describe("capitalize", () => {
|
||||||
it("should capitalize the first letter of a string", () => {
|
it("should capitalize the first letter of a string", () => {
|
||||||
@ -39,3 +39,111 @@ describe("capitalize", () => {
|
|||||||
expect(output).toEqual(expected)
|
expect(output).toEqual(expected)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("trimAny", () => {
|
||||||
|
it("should trim any of the specified characters from the start and end of a string", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "_____foo bar "
|
||||||
|
const characters = [" ", "_"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = trimAny(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "foo bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should trim any of the specified characters from the start and end of a string even if the start and end characters are different", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "_ __ _foo bar _"
|
||||||
|
const characters = [" ", "_"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = trimAny(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "foo bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return the same string when the input does not start or end with any of the specified characters", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "foo bar"
|
||||||
|
const characters = [" ", "_"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = trimAny(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "foo bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return an empty string when the input is an empty string", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = ""
|
||||||
|
const characters = [" ", "_"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = trimAny(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = ""
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return an empty string when the input starts and ends with the specified characters", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = " _ "
|
||||||
|
const characters = [" ", "_"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = trimAny(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = ""
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("reduceConsecutiveCharacters", () => {
|
||||||
|
it("should reduce consecutive occurrences of specified characters in a string to a single occurrence", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Hello___there!!"
|
||||||
|
const characters = ["_", "!"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = reduceConsecutiveCharacters(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Hello_there!"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return the same string when there are no consecutive occurrences of specified characters", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Hello there!"
|
||||||
|
const characters = ["_", "!"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = reduceConsecutiveCharacters(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Hello there!"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return an empty string when the input is an empty string", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = ""
|
||||||
|
const characters = ["_", "!"]
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = reduceConsecutiveCharacters(input, characters)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = ""
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
"type": "module",
|
"type": "module",
|
||||||
"exports": {
|
"exports": {
|
||||||
"./WikipediaClient": "./src/WikipediaClient.tsx",
|
"./WikipediaClient": "./src/WikipediaClient.tsx",
|
||||||
"./wikipedia-api": "./src/wikipedia-api.ts"
|
"./wikipedia-api": "./src/wikipedia-api.ts",
|
||||||
|
"./wikipedia-utils": "./src/wikipedia-utils.ts"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint:eslint": "eslint src --max-warnings 0 --report-unused-disable-directives",
|
"lint:eslint": "eslint src --max-warnings 0 --report-unused-disable-directives",
|
||||||
@ -17,6 +18,7 @@
|
|||||||
"@repo/config-tailwind": "workspace:*",
|
"@repo/config-tailwind": "workspace:*",
|
||||||
"@repo/i18n": "workspace:*",
|
"@repo/i18n": "workspace:*",
|
||||||
"@repo/ui": "workspace:*",
|
"@repo/ui": "workspace:*",
|
||||||
|
"@repo/utils": "workspace:*",
|
||||||
"ky": "catalog:",
|
"ky": "catalog:",
|
||||||
"next": "catalog:",
|
"next": "catalog:",
|
||||||
"next-intl": "catalog:",
|
"next-intl": "catalog:",
|
||||||
|
@ -4,11 +4,11 @@ import { Button } from "@repo/ui/Design/Button"
|
|||||||
import { Link } from "@repo/ui/Design/Link"
|
import { Link } from "@repo/ui/Design/Link"
|
||||||
import { Typography } from "@repo/ui/Design/Typography"
|
import { Typography } from "@repo/ui/Design/Typography"
|
||||||
import { useState } from "react"
|
import { useState } from "react"
|
||||||
|
import { getWikipediaPageInternalLinks } from "./wikipedia-api"
|
||||||
import {
|
import {
|
||||||
fromLocaleToWikipediaLocale,
|
fromLocaleToWikipediaLocale,
|
||||||
getWikipediaLink,
|
getWikipediaLink,
|
||||||
getWikipediaPageInternalLinks,
|
} from "./wikipedia-utils"
|
||||||
} from "./wikipedia-api"
|
|
||||||
|
|
||||||
export const WikipediaClient: React.FC = () => {
|
export const WikipediaClient: React.FC = () => {
|
||||||
const [isLoading, setIsLoading] = useState(false)
|
const [isLoading, setIsLoading] = useState(false)
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
import { describe, expect, it } from "vitest"
|
|
||||||
|
|
||||||
import { sum } from "../wikipedia-api"
|
|
||||||
|
|
||||||
describe("sum", () => {
|
|
||||||
it("adds 1 + 2 to equal 3", () => {
|
|
||||||
expect(sum(1, 2)).toBe(3)
|
|
||||||
})
|
|
||||||
})
|
|
@ -0,0 +1,135 @@
|
|||||||
|
import { describe, expect, it } from "vitest"
|
||||||
|
import {
|
||||||
|
fromLocaleToWikipediaLocale,
|
||||||
|
fromSanitizedPageTitleToPageTitle,
|
||||||
|
getWikipediaLink,
|
||||||
|
sanitizePageTitle,
|
||||||
|
} from "../wikipedia-utils"
|
||||||
|
|
||||||
|
describe("fromLocaleToWikipediaLocale", () => {
|
||||||
|
it("should return the correct Wikipedia locale", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "en-US"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = fromLocaleToWikipediaLocale(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "en"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getWikipediaLink", () => {
|
||||||
|
it("should return the correct Wikipedia link for the given locale", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "en"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = getWikipediaLink(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "https://en.wikipedia.org"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("sanitizePageTitle", () => {
|
||||||
|
it("should return the correct sanitized page title", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "foo bar"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Foo_bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should preserve the characters case", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Foo Bar"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Foo_Bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should remove leading and trailing spaces/underscores (rule 1)", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = " Abc_def__"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Abc_def"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reduce consecutive spaces/underscores to a single one (rule 2)", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Abc def"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Abc_def"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should replace spaces by underscores (rule 3)", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Abc def"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Abc_def"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should capitalize the first character (rule 4)", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "abc_def"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Abc_def"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have a maximum of 255 characters (rule 5)", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "a".repeat(256)
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = sanitizePageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "A" + "a".repeat(254)
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("fromSanitizedPageTitleToPageTitle", () => {
|
||||||
|
it("should return the correct page title", () => {
|
||||||
|
// Arrange - Given
|
||||||
|
const input = "Foo_bar"
|
||||||
|
|
||||||
|
// Act - When
|
||||||
|
const output = fromSanitizedPageTitleToPageTitle(input)
|
||||||
|
|
||||||
|
// Assert - Then
|
||||||
|
const expected = "Foo bar"
|
||||||
|
expect(output).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
@ -1,33 +1,11 @@
|
|||||||
import type { Locale } from "@repo/i18n/config"
|
|
||||||
import ky from "ky"
|
import ky from "ky"
|
||||||
|
import { getWikipediaLink, type WikipediaLocale } from "./wikipedia-utils"
|
||||||
export const sum = (a: number, b: number): number => {
|
|
||||||
return a + b
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see https://www.mediawiki.org/wiki/Wikimedia_REST_API#Terms_and_conditions
|
* @see https://www.mediawiki.org/wiki/Wikimedia_REST_API#Terms_and_conditions
|
||||||
* To avoid impacting other API users, limit your clients to no more than 200 requests/sec to this API overall. Many entry points additionally specify and enforce more restrictive rate limits (HTTP 429 error).
|
* To avoid impacting other API users, limit your clients to no more than 200 requests/sec to this API overall. Many entry points additionally specify and enforce more restrictive rate limits (HTTP 429 error).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export const WIKIPEDIA_LOCALES = ["en", "fr"] as const
|
|
||||||
export type WikipediaLocale = (typeof WIKIPEDIA_LOCALES)[number]
|
|
||||||
|
|
||||||
const WIKIPEDIA_LOCALES_MAP: Record<Locale, WikipediaLocale> = {
|
|
||||||
"en-US": "en",
|
|
||||||
"fr-FR": "fr",
|
|
||||||
}
|
|
||||||
|
|
||||||
export const fromLocaleToWikipediaLocale = (
|
|
||||||
locale: Locale,
|
|
||||||
): WikipediaLocale => {
|
|
||||||
return WIKIPEDIA_LOCALES_MAP[locale]
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getWikipediaLink = (locale: WikipediaLocale): string => {
|
|
||||||
return `https://${locale}.wikipedia.org`
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WikipediaQueryLinksResponse {
|
interface WikipediaQueryLinksResponse {
|
||||||
continue?: {
|
continue?: {
|
||||||
plcontinue: string
|
plcontinue: string
|
||||||
|
70
packages/wikipedia-game-solver/src/wikipedia-utils.ts
Normal file
70
packages/wikipedia-game-solver/src/wikipedia-utils.ts
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import type { Locale } from "@repo/i18n/config"
|
||||||
|
import {
|
||||||
|
capitalize,
|
||||||
|
reduceConsecutiveCharacters,
|
||||||
|
trimAny,
|
||||||
|
} from "@repo/utils/strings"
|
||||||
|
|
||||||
|
export const WIKIPEDIA_LOCALES = ["en", "fr"] as const
|
||||||
|
export type WikipediaLocale = (typeof WIKIPEDIA_LOCALES)[number]
|
||||||
|
|
||||||
|
const WIKIPEDIA_LOCALES_MAP: Record<Locale, WikipediaLocale> = {
|
||||||
|
"en-US": "en",
|
||||||
|
"fr-FR": "fr",
|
||||||
|
}
|
||||||
|
|
||||||
|
export const fromLocaleToWikipediaLocale = (
|
||||||
|
locale: Locale,
|
||||||
|
): WikipediaLocale => {
|
||||||
|
return WIKIPEDIA_LOCALES_MAP[locale]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getWikipediaLink = (locale: WikipediaLocale): string => {
|
||||||
|
return `https://${locale}.wikipedia.org`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a page title to a sanitized version (also called "canonical form") that can be used in a URL.
|
||||||
|
*
|
||||||
|
* The sanitized page title is stored as text with the following restrictions:
|
||||||
|
* 1. leading and trailing spaces (` `) and underscores (`_`) are removed.
|
||||||
|
* 2. consecutive spaces/underscores are reduced to a single one.
|
||||||
|
* 3. spaces are replaced by underscores.
|
||||||
|
* 4. first character is capitalized.
|
||||||
|
* 5. maximum of 255 characters.
|
||||||
|
* @param pageTitle
|
||||||
|
* @see https://www.mediawiki.org/wiki/Manual:Page_title
|
||||||
|
* @see https://en.wikipedia.org/wiki/Wikipedia:Naming_conventions_(technical_restrictions)
|
||||||
|
* @returns
|
||||||
|
* @example sanitizePageTitle("foo bar") // "Foo_bar"
|
||||||
|
* @example sanitizePageTitle("Foo Bar") // "Foo_Bar"
|
||||||
|
*/
|
||||||
|
export const sanitizePageTitle = (pageTitle: string): string => {
|
||||||
|
const rule1 = trimAny(pageTitle, [" ", "_"])
|
||||||
|
const rule2 = reduceConsecutiveCharacters(rule1, [" ", "_"])
|
||||||
|
const rule3 = rule2.replaceAll(" ", "_")
|
||||||
|
const rule4 = capitalize(rule3)
|
||||||
|
const rule5 = rule4.slice(0, 255)
|
||||||
|
return rule5
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a sanitized page title to a page title.
|
||||||
|
*
|
||||||
|
* A page title is the title of a wiki page, which is a human-readable and unique identifier for a page.
|
||||||
|
*
|
||||||
|
* Underscores (`_`) are replaced by spaces (` `).
|
||||||
|
*
|
||||||
|
* A page title is not to be confused with a display title.
|
||||||
|
* A display title is the preferred title associated with a wiki page (stored separately), with less restrictions than a page title.
|
||||||
|
* @param sanitizedPageTitle
|
||||||
|
* @see https://www.mediawiki.org/wiki/Manual:Page_title
|
||||||
|
* @see https://www.mediawiki.org/wiki/Display_title
|
||||||
|
* @returns
|
||||||
|
* @example fromSanitizedPageTitleToPageTitle("Foo_bar") // "Foo bar"
|
||||||
|
*/
|
||||||
|
export const fromSanitizedPageTitleToPageTitle = (
|
||||||
|
sanitizedPageTitle: string,
|
||||||
|
): string => {
|
||||||
|
return sanitizedPageTitle.replaceAll("_", " ")
|
||||||
|
}
|
768
pnpm-lock.yaml
generated
768
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -21,7 +21,7 @@ catalog:
|
|||||||
# TypeScript
|
# TypeScript
|
||||||
"typescript": "5.5.4"
|
"typescript": "5.5.4"
|
||||||
"@total-typescript/ts-reset": "0.5.1"
|
"@total-typescript/ts-reset": "0.5.1"
|
||||||
"@types/node": "22.1.0"
|
"@types/node": "22.2.0"
|
||||||
"tsx": "4.17.0"
|
"tsx": "4.17.0"
|
||||||
|
|
||||||
# AdonisJS
|
# AdonisJS
|
||||||
@ -29,7 +29,9 @@ catalog:
|
|||||||
"@adonisjs/core": "6.12.1"
|
"@adonisjs/core": "6.12.1"
|
||||||
"@adonisjs/cors": "2.2.1"
|
"@adonisjs/cors": "2.2.1"
|
||||||
"@adonisjs/lucid": "21.2.0"
|
"@adonisjs/lucid": "21.2.0"
|
||||||
"mysql2": "3.11.0"
|
"@adonisjs/limiter": "2.3.2"
|
||||||
|
"pg": "8.12.0"
|
||||||
|
"better-sqlite3": "11.1.2"
|
||||||
"@adonisjs/assembler": "7.7.0"
|
"@adonisjs/assembler": "7.7.0"
|
||||||
"@vinejs/vine": "2.1.0"
|
"@vinejs/vine": "2.1.0"
|
||||||
"luxon": "3.5.0"
|
"luxon": "3.5.0"
|
||||||
@ -80,6 +82,7 @@ catalog:
|
|||||||
"start-server-and-test": "2.0.5"
|
"start-server-and-test": "2.0.5"
|
||||||
"@vitest/browser": "2.0.5"
|
"@vitest/browser": "2.0.5"
|
||||||
"@vitest/coverage-istanbul": "2.0.5"
|
"@vitest/coverage-istanbul": "2.0.5"
|
||||||
|
"c8": "10.1.2"
|
||||||
"@vitest/ui": "2.0.5"
|
"@vitest/ui": "2.0.5"
|
||||||
"vitest": "2.0.5"
|
"vitest": "2.0.5"
|
||||||
"@testing-library/react": "16.0.0"
|
"@testing-library/react": "16.0.0"
|
||||||
|
Reference in New Issue
Block a user