diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..06d77ed6 --- /dev/null +++ b/.env.example @@ -0,0 +1,30 @@ +# Your Cloudflare account tag. +# +# Needed for: +# - Directory cache scripts +CLOUDFLARE_ACCOUNT_ID= + +# Cloudflare V4 API token. +# +# Needed for: +# - Directory cache scripts +# +# Required permissions: +# - `Workers KV Storage`: Edit +# - `Workers R2 Storage`: Read +# +# See https://developers.cloudflare.com/fundamentals/api/get-started/create-token/ +CLOUDFLARE_API_TOKEN= + +# S3 credentials for your R2 bucket. +# +# Needed for: +# - Directory listings in the worker. +# - Directory cache scripts +# +# Required permissions: +# - `Object Read Only` +# +# See https://dash.cloudflare.com/?account=/r2/api-tokens +S3_ACCESS_KEY_ID= +S3_ACCESS_KEY_SECRET= diff --git a/.github/workflows/update-links.yml b/.github/workflows/update-links.yml index b428a047..a0e2caea 100644 --- a/.github/workflows/update-links.yml +++ b/.github/workflows/update-links.yml @@ -6,6 +6,11 @@ permissions: on: # Triggered by https://github.com/nodejs/node/blob/main/.github/workflows/update-release-links.yml workflow_dispatch: + inputs: + version: + description: 'Node.js version (ex/ `v20.0.0`)' + required: true + type: string concurrency: group: update-redirect-links @@ -49,11 +54,13 @@ jobs: - name: Install dependencies run: npm ci && npm update nodejs-latest-linker --save - - name: Update Redirect Links - run: node scripts/build-r2-symlinks.mjs && node --run format + - name: Update Directory Cache + run: node scripts/update-directory-cache.mjs "$VERSION_INPUT" && node --run format env: - CF_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }} - CF_SECRET_ACCESS_KEY: ${{ secrets.CF_SECRET_ACCESS_KEY }} + VERSION_INPUT: '${{ inputs.version }}' + CLOUDFLARE_API_TOKEN: ${{ secrets.CF_API_TOKEN }} + S3_ACCESS_KEY_ID: ${{ secrets.CF_ACCESS_KEY_ID }} + S3_ACCESS_KEY_SECRET: ${{ secrets.CF_SECRET_ACCESS_KEY }} - name: Commit Changes id: git_auto_commit diff --git a/.gitignore b/.gitignore index 1c6a8066..eb628556 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ node_modules/ dist/ .dev.vars .sentryclirc +.env diff --git a/common/README.md b/common/README.md new file mode 100644 index 00000000..a37f34a6 --- /dev/null +++ b/common/README.md @@ -0,0 +1,3 @@ +# Common + +Utilities used in local scripts and in the deployed worker. diff --git a/src/constants/limits.ts b/common/limits.mjs similarity index 51% rename from src/constants/limits.ts rename to common/limits.mjs index 5c1f95de..0c53edce 100644 --- a/src/constants/limits.ts +++ b/common/limits.mjs @@ -1,8 +1,13 @@ /** - * Max amount of retries for R2 requests + * Max amount of retries for requests to R2 */ export const R2_RETRY_LIMIT = 5; +/** + * Max amount of retries for requests to KV + */ +export const KV_RETRY_LIMIT = 5; + /** * Max amount of keys to be returned in a S3 request */ diff --git a/common/listR2Directory.mjs b/common/listR2Directory.mjs new file mode 100644 index 00000000..4743653f --- /dev/null +++ b/common/listR2Directory.mjs @@ -0,0 +1,111 @@ +import { ListObjectsV2Command } from '@aws-sdk/client-s3'; +import { R2_RETRY_LIMIT, S3_MAX_KEYS } from './limits.mjs'; + +/** + * List the contents of a directory in R2. + * + * @param {import('@aws-sdk/client-s3').S3Client} client + * @param {string} bucket + * @param {string | undefined} [directory=undefined] + * @param {number} retryCount + * @returns {Promise} + */ +export async function listR2Directory( + client, + bucket, + directory = undefined, + retryCount = R2_RETRY_LIMIT +) { + /** + * @type {Set} + */ + const subdirectories = new Set(); + + /** + * @type {Set} + */ + const files = new Set(); + + let hasIndexHtmlFile = false; + let directoryLastModified = new Date(0); + + let isTruncated; + let continuationToken; + do { + /** + * @type {import('@aws-sdk/client-s3').ListObjectsV2Output | undefined} + */ + let data = undefined; + + let retriesLeft = retryCount; + while (retriesLeft) { + try { + data = await client.send( + new ListObjectsV2Command({ + Bucket: bucket, + Delimiter: '/', + Prefix: directory, + ContinuationToken: continuationToken, + MaxKeys: S3_MAX_KEYS, + }) + ); + + break; + } catch (err) { + retriesLeft--; + + if (retriesLeft === 0) { + throw new Error('exhausted R2 retries', { cause: err }); + } + } + } + + if (!data) { + return undefined; + } + + isTruncated = data.IsTruncated; + continuationToken = data.NextContinuationToken; + + data.CommonPrefixes?.forEach(subdirectory => { + if (subdirectory.Prefix) { + subdirectories.add( + subdirectory.Prefix.substring(directory?.length ?? 0) + ); + } + }); + + data.Contents?.forEach(file => { + if (!file.Key) { + return; + } + + if (!hasIndexHtmlFile && file.Key.match(/index.htm(?:l)$/)) { + hasIndexHtmlFile = true; + } + + files.add({ + name: file.Key.substring(directory?.length ?? 0), + lastModified: file.LastModified, + size: file.Size, + }); + + // Set the directory's last modified date to be the same as the most + // recently updated file + if (file.LastModified > directoryLastModified) { + directoryLastModified = file.LastModified; + } + }); + } while (isTruncated); + + if (subdirectories.size === 0 && files.size === 0) { + return undefined; + } + + return { + subdirectories: Array.from(subdirectories), + hasIndexHtmlFile, + files: Array.from(files), + lastModified: directoryLastModified, + }; +} diff --git a/common/listR2Directory.test.ts b/common/listR2Directory.test.ts new file mode 100644 index 00000000..40635bf1 --- /dev/null +++ b/common/listR2Directory.test.ts @@ -0,0 +1,177 @@ +import { test, expect } from 'vitest'; +import { ListObjectsV2Command } from '@aws-sdk/client-s3'; +import { listR2Directory } from './listR2Directory.mjs'; +import { R2_RETRY_LIMIT } from './limits.mjs'; + +test('adds subdirectories and files properly', async () => { + const now = new Date(); + + // Add a second so we can check the directory's last modified is determined properly + const directoryLastModified = new Date(now.getTime() + 1000); + + const client = { + async send(cmd: ListObjectsV2Command) { + expect(cmd.input.Bucket).toStrictEqual('dist-prod'); + expect(cmd.input.Prefix).toStrictEqual('some/directory/'); + + return { + IsTruncated: false, + NextContinuationToken: undefined, + CommonPrefixes: [ + { Prefix: 'some/directory/subdirectory1/' }, + { Prefix: 'some/directory/subdirectory2/' }, + { Prefix: 'some/directory/subdirectory3/' }, + ], + Contents: [ + { + Key: 'some/directory/file.txt', + LastModified: now, + Size: 1, + }, + { + Key: 'some/directory/file2.txt', + LastModified: directoryLastModified, + Size: 2, + }, + { + Key: 'some/directory/file3.txt', + LastModified: now, + Size: 3, + }, + ], + }; + }, + }; + + // @ts-expect-error don't need full client + const result = await listR2Directory(client, 'dist-prod', 'some/directory/'); + + expect(result).toStrictEqual({ + subdirectories: ['subdirectory1/', 'subdirectory2/', 'subdirectory3/'], + hasIndexHtmlFile: false, + files: [ + { + name: 'file.txt', + lastModified: now, + size: 1, + }, + { + name: 'file2.txt', + lastModified: directoryLastModified, + size: 2, + }, + { + name: 'file3.txt', + lastModified: now, + size: 3, + }, + ], + lastModified: directoryLastModified, + }); +}); + +test('handles truncation properly', async () => { + const now = new Date(); + + const client = { + async send(cmd: ListObjectsV2Command) { + expect(cmd.input.Bucket).toStrictEqual('dist-prod'); + expect(cmd.input.Prefix).toStrictEqual('some/directory/'); + + switch (cmd.input.ContinuationToken) { + case undefined: { + return { + IsTruncated: true, + NextContinuationToken: '1', + CommonPrefixes: [{ Prefix: 'some/directory/subdirectory1/' }], + Contents: [ + { + Key: 'some/directory/file.txt', + LastModified: now, + Size: 1, + }, + ], + }; + } + case '1': { + return { + IsTruncated: true, + NextContinuationToken: '2', + CommonPrefixes: [{ Prefix: 'some/directory/subdirectory2/' }], + Contents: [ + { + Key: 'some/directory/file2.txt', + LastModified: now, + Size: 2, + }, + ], + }; + } + case '2': { + return { + IsTruncated: false, + NextContinuationToken: undefined, + CommonPrefixes: [{ Prefix: 'some/directory/subdirectory3/' }], + Contents: [ + { + Key: 'some/directory/file3.txt', + LastModified: now, + Size: 3, + }, + ], + }; + } + } + }, + }; + + // @ts-expect-error don't need full client + const result = await listR2Directory(client, 'dist-prod', 'some/directory/'); + + expect(result).toStrictEqual({ + subdirectories: ['subdirectory1/', 'subdirectory2/', 'subdirectory3/'], + hasIndexHtmlFile: false, + files: [ + { + name: 'file.txt', + lastModified: now, + size: 1, + }, + { + name: 'file2.txt', + lastModified: now, + size: 2, + }, + { + name: 'file3.txt', + lastModified: now, + size: 3, + }, + ], + lastModified: now, + }); +}); + +test('retries properly', async () => { + let retries = R2_RETRY_LIMIT; + + let requestsSent = 0; + const client = { + async send() { + requestsSent++; + + throw new TypeError('dummy'); + }, + }; + + const result = listR2Directory( + // @ts-expect-error don't need full client + client, + 'dist-prod', + 'some/directory/', + retries + ); + + await expect(result).rejects.toThrow('exhausted R2 retries'); + expect(requestsSent).toBe(retries); +}); diff --git a/e2e-tests/directory.test.ts b/e2e-tests/directory.test.ts index 22eb76e5..f6d0c0e7 100644 --- a/e2e-tests/directory.test.ts +++ b/e2e-tests/directory.test.ts @@ -1,6 +1,9 @@ import { env, fetchMock, createExecutionContext } from 'cloudflare:test'; -import { test, beforeAll, afterEach, expect } from 'vitest'; -import { populateR2WithDevBucket } from './util'; +import { test, beforeAll, afterEach, expect, describe, vi } from 'vitest'; +import { + populateDirectoryCacheWithDevBucket, + populateR2WithDevBucket, +} from './util'; import worker from '../src/worker'; import type { Env } from '../src/env'; import { CACHE_HEADERS } from '../src/constants/cache'; @@ -16,10 +19,11 @@ const mockedEnv: Env = { 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', }; -const s3Url = new URL(mockedEnv.S3_ENDPOINT); -s3Url.host = `${mockedEnv.BUCKET_NAME}.${s3Url.host}`; +describe('s3', () => { + const s3Url = new URL(mockedEnv.S3_ENDPOINT); + s3Url.host = `${mockedEnv.BUCKET_NAME}.${s3Url.host}`; -const S3_DIRECTORY_RESULT = ` + const S3_DIRECTORY_RESULT = ` dist-prod @@ -37,7 +41,7 @@ const S3_DIRECTORY_RESULT = ` `; -const S3_EMPTY_DIRECTORY_RESULT = ` + const S3_EMPTY_DIRECTORY_RESULT = ` dist-prod @@ -46,92 +50,182 @@ const S3_EMPTY_DIRECTORY_RESULT = ` false `; -beforeAll(async () => { - fetchMock.activate(); - fetchMock.disableNetConnect(); + beforeAll(async () => { + fetchMock.activate(); + fetchMock.disableNetConnect(); - await populateR2WithDevBucket(); -}); + await populateR2WithDevBucket(); + }); -afterEach(() => { - fetchMock.assertNoPendingInterceptors(); -}); + afterEach(() => { + fetchMock.assertNoPendingInterceptors(); + }); + + // These paths are cached and don't send requests to S3 + for (const path of ['/dist/', '/docs/']) { + test(`GET \`${path}\` returns 200`, async () => { + const ctx = createExecutionContext(); + + const res = await worker.fetch( + new Request(`https://localhost${path}`), + mockedEnv, + ctx + ); + + // Consume body promise + await res.text(); + + expect(res.status).toBe(200); + }); + } + + for (const path of ['/api/', '/download/', '/metrics/']) { + test(`GET \`${path}\` returns 200`, async () => { + fetchMock + .get(s3Url.origin) + .intercept({ + path: /.*/, + }) + .reply(200, S3_DIRECTORY_RESULT); + + const ctx = createExecutionContext(); + + const res = await worker.fetch( + new Request(`https://localhost${path}`), + mockedEnv, + ctx + ); + + // Consume body promise + await res.text(); + + expect(res.status).toBe(200); + }); + } + + test('GET `/dist/unknown-directory/` returns 404', async () => { + fetchMock + .get(s3Url.origin) + .intercept({ + path: /.*/, + query: { + prefix: 'nodejs/release/unknown-directory/', + }, + }) + .reply(200, S3_EMPTY_DIRECTORY_RESULT); -// These paths are cached and don't send requests to S3 -for (const path of ['/dist/', '/docs/']) { - test(`GET \`${path}\` returns 200`, async () => { const ctx = createExecutionContext(); const res = await worker.fetch( - new Request(`https://localhost${path}`), + new Request('https://localhost/dist/unknown-directory/'), mockedEnv, ctx ); - // Consume body promise - await res.text(); + expect(res.status).toBe(404); + expect(res.headers.get('cache-control')).toStrictEqual( + CACHE_HEADERS.failure + ); + expect(await res.text()).toStrictEqual('Directory not found'); + }); + + test('GET `/dist` redirects to `/dist/`', async () => { + const ctx = createExecutionContext(); + + const res = await worker.fetch( + new Request('https://localhost/dist'), + mockedEnv, + ctx + ); + + expect(res.status).toBe(301); + expect(res.headers.get('location')).toStrictEqual( + 'https://localhost/dist/' + ); + }); +}); - expect(res.status).toBe(200); +describe('kv', () => { + const mockedEnv: Env = { + ...env, + ENVIRONMENT: 'e2e-tests', + CACHING: false, + LOG_ERRORS: true, + USE_KV: true, + }; + + beforeAll(async () => { + await populateDirectoryCacheWithDevBucket(); + + vi.mock( + import('../src/constants/latestVersions.json'), + async importOriginal => { + const original = await importOriginal(); + + // Point all `latest-` directories to one that exists in the dev bucket + Object.keys(original.default).forEach(branch => { + let updatedValue: string; + if (branch === 'node-latest.tar.gz') { + updatedValue = 'latest/node-v20.0.0.tar.gz'; + } else { + updatedValue = 'v20.0.0'; + } + + // @ts-expect-error + original.default[branch] = updatedValue; + }); + + return original; + } + ); }); -} -for (const path of ['/api/', '/download/', '/metrics/']) { - test(`GET \`${path}\` returns 200`, async () => { - fetchMock - .get(s3Url.origin) - .intercept({ - path: /.*/, - }) - .reply(200, S3_DIRECTORY_RESULT); + // Ensure essential endpoints are routable + for (const path of ['/dist/', '/docs/', '/api/', '/download/', '/metrics/']) { + test(`GET \`${path}\` returns 200`, async () => { + const ctx = createExecutionContext(); + + const res = await worker.fetch( + new Request(`https://localhost${path}`), + mockedEnv, + ctx + ); + + // Consume body promise + await res.text(); + + expect(res.status).toBe(200); + }); + } + test('GET `/dist/unknown-directory/` returns 404', async () => { const ctx = createExecutionContext(); const res = await worker.fetch( - new Request(`https://localhost${path}`), + new Request('https://localhost/dist/unknown-directory/'), mockedEnv, ctx ); - // Consume body promise - await res.text(); - - expect(res.status).toBe(200); + expect(res.status).toBe(404); + expect(res.headers.get('cache-control')).toStrictEqual( + CACHE_HEADERS.failure + ); + expect(await res.text()).toStrictEqual('Directory not found'); }); -} - -test('GET `/dist/unknown-directory/` returns 404', async () => { - fetchMock - .get(s3Url.origin) - .intercept({ - path: /.*/, - query: { - prefix: 'nodejs/release/unknown-directory/', - }, - }) - .reply(200, S3_EMPTY_DIRECTORY_RESULT); - - const ctx = createExecutionContext(); - - const res = await worker.fetch( - new Request('https://localhost/dist/unknown-directory/'), - mockedEnv, - ctx - ); - - expect(res.status).toBe(404); - expect(res.headers.get('cache-control')).toStrictEqual(CACHE_HEADERS.failure); - expect(await res.text()).toStrictEqual('Directory not found'); -}); -test('GET `/dist` redirects to `/dist/`', async () => { - const ctx = createExecutionContext(); + test('GET `/dist` redirects to `/dist/`', async () => { + const ctx = createExecutionContext(); - const res = await worker.fetch( - new Request('https://localhost/dist'), - mockedEnv, - ctx - ); + const res = await worker.fetch( + new Request('https://localhost/dist'), + mockedEnv, + ctx + ); - expect(res.status).toBe(301); - expect(res.headers.get('location')).toStrictEqual('https://localhost/dist/'); + expect(res.status).toBe(301); + expect(res.headers.get('location')).toStrictEqual( + 'https://localhost/dist/' + ); + }); }); diff --git a/e2e-tests/util.ts b/e2e-tests/util.ts index 9339f2e0..0badb4aa 100644 --- a/e2e-tests/util.ts +++ b/e2e-tests/util.ts @@ -1,7 +1,8 @@ import { env } from 'cloudflare:test'; import { inject } from 'vitest'; -import type { Env } from '../env'; -import type { Directory } from '../../vitest-setup'; +import type { Env } from '../src/env'; +import type { Directory } from '../vitest-setup'; +import { ReadDirectoryResult } from '../src/providers/provider'; async function populateR2BucketDirectory(directory: Directory): Promise { const promises: Array> = []; @@ -27,6 +28,39 @@ async function populateR2BucketDirectory(directory: Directory): Promise { await Promise.all(promises); } +function populateDirectoryCache(directory: Directory): Array> { + let hasIndexHtmlFile = false; + const cachedDirectory: ReadDirectoryResult = { + subdirectories: Object.keys(directory.subdirectories), + files: Object.keys(directory.files).map(name => { + const file = directory.files[name]; + + if (!hasIndexHtmlFile && name.match(/index.htm(?:l)$/)) { + hasIndexHtmlFile = true; + } + + return { + name, + lastModified: new Date(file.lastModified), + size: file.size, + }; + }), + hasIndexHtmlFile: false, + lastModified: new Date(), + }; + cachedDirectory.hasIndexHtmlFile = hasIndexHtmlFile; + + const promises: Array> = [ + env.DIRECTORY_CACHE.put( + `${directory.name}/`, + JSON.stringify(cachedDirectory) + ), + ...Object.values(directory.subdirectories).map(populateDirectoryCache), + ]; + + return promises; +} + /** * Writes the contents of the dev bucket into the R2 bucket given in {@link env} */ @@ -38,6 +72,14 @@ export async function populateR2WithDevBucket(): Promise { await populateR2BucketDirectory(devBucket); } +export async function populateDirectoryCacheWithDevBucket(): Promise { + // Grab the contents of the dev bucket + const devBucket = inject('devBucket'); + + // Write it to KV + await Promise.all(populateDirectoryCache(devBucket)); +} + declare module 'cloudflare:test' { // eslint-disable-next-line @typescript-eslint/no-empty-object-type interface ProvidedEnv extends Env {} diff --git a/package-lock.json b/package-lock.json index 7747556b..b5164128 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23,6 +23,7 @@ "@types/node": "^25.0.3", "@typescript-eslint/eslint-plugin": "^8.52.0", "@typescript-eslint/parser": "^8.32.1", + "cloudflare": "^5.2.0", "eslint": "^9.39.2", "eslint-config-prettier": "^10.1.8", "eslint-plugin-prettier": "^5.5.4", @@ -988,7 +989,6 @@ "integrity": "sha512-Wj7/AMtE9MRnAXa6Su3Lk0LNCfqDYgfwVjwRFVum9U7wsto1imuHqk4kTm7Jni+5A0Hn7dttL6O/zjvUvoo+8A==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "defu": "^6.1.4", "exsolve": "^1.0.7", @@ -3623,11 +3623,21 @@ "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.16.0" } }, + "node_modules/@types/node-fetch": { + "version": "2.6.13", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.13.tgz", + "integrity": "sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.4" + } + }, "node_modules/@types/uuid": { "version": "9.0.8", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", @@ -3678,7 +3688,6 @@ "integrity": "sha512-iIACsx8pxRnguSYhHiMn2PvhvfpopO9FXHyn1mG5txZIsAaB6F0KwbFnUQN3KCiG3Jcuad/Cao2FAs1Wp7vAyg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.52.0", "@typescript-eslint/types": "8.52.0", @@ -3966,7 +3975,6 @@ "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/utils": "3.2.4", "pathe": "^2.0.3", @@ -3982,7 +3990,6 @@ "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", @@ -4020,12 +4027,24 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "license": "MIT", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/acorn": { "version": "8.14.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "dev": true, - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -4065,6 +4084,19 @@ "node": ">= 6.0.0" } }, + "node_modules/agentkeepalive": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz", + "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -4124,6 +4156,13 @@ "node": ">=12" } }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true, + "license": "MIT" + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -4178,6 +4217,20 @@ "node": ">=8" } }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -4261,6 +4314,39 @@ "node": ">= 10.0" } }, + "node_modules/cloudflare": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/cloudflare/-/cloudflare-5.2.0.tgz", + "integrity": "sha512-dVzqDpPFYR9ApEC9e+JJshFJZXcw4HzM8W+3DHzO5oy9+8rLC53G7x6fEf9A7/gSuSCxuvndzui5qJKftfIM9A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7" + } + }, + "node_modules/cloudflare/node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/cloudflare/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "license": "MIT" + }, "node_modules/color": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", @@ -4304,6 +4390,19 @@ "simple-swizzle": "^0.2.2" } }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", @@ -4381,6 +4480,16 @@ "dev": true, "license": "MIT" }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/detect-libc": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz", @@ -4409,6 +4518,21 @@ "tslib": "^2.0.3" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", @@ -4444,6 +4568,26 @@ "url": "https://github.com/sponsors/antfu" } }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-module-lexer": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", @@ -4451,6 +4595,35 @@ "dev": true, "license": "MIT" }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/esbuild": { "version": "0.25.4", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.4.tgz", @@ -4510,7 +4683,6 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -4571,7 +4743,6 @@ "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", "dev": true, "license": "MIT", - "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -4747,6 +4918,16 @@ "node": ">=0.10.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/exit-hook": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-2.2.1.tgz", @@ -4882,6 +5063,44 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==", + "dev": true, + "license": "MIT" + }, + "node_modules/formdata-node": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz", + "integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.3" + }, + "engines": { + "node": ">= 12.20" + } + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -4896,6 +5115,55 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-tsconfig": { "version": "4.10.0", "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz", @@ -4981,6 +5249,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -4990,6 +5271,48 @@ "node": ">=8" } }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/html-minifier-terser": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz", @@ -5036,6 +5359,16 @@ "node": ">= 6" } }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + } + }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -5262,6 +5595,16 @@ "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/mime": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", @@ -5275,6 +5618,29 @@ "node": ">=10.0.0" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/miniflare": { "version": "4.20251011.1", "resolved": "https://registry.npmjs.org/miniflare/-/miniflare-4.20251011.1.tgz", @@ -5375,6 +5741,27 @@ "tslib": "^2.0.3" } }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "deprecated": "Use your platform's native DOMException instead", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "engines": { + "node": ">=10.5.0" + } + }, "node_modules/node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", @@ -5612,7 +5999,6 @@ "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "dev": true, "license": "MIT", - "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -6144,7 +6530,6 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -6225,7 +6610,6 @@ "integrity": "sha512-ytQKuwgmrrkDTFP4LjR0ToE2nqgy886GpvRSpU0JAnrdBYppuY5rLkRUYPU1yCryb24SsKBTL/hlDQAEFVwtZg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "esbuild": "~0.25.0", "get-tsconfig": "^4.7.5" @@ -6267,7 +6651,6 @@ "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -6306,7 +6689,6 @@ "integrity": "sha512-i7qRCmY42zmCwnYlh9H2SvLEypEFGye5iRmEMKjcGi7zk9UquigRjFtTLz0TYqr0ZGLZhaMHl/foy1bZR+Cwlw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "pathe": "^2.0.3" } @@ -6335,7 +6717,6 @@ "integrity": "sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", @@ -6452,7 +6833,6 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -6466,7 +6846,6 @@ "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@types/chai": "^5.2.2", "@vitest/expect": "3.2.4", @@ -6547,6 +6926,16 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz", + "integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", @@ -6604,7 +6993,6 @@ "dev": true, "hasInstallScript": true, "license": "Apache-2.0", - "peer": true, "bin": { "workerd": "bin/workerd" }, @@ -6789,7 +7177,6 @@ "dev": true, "hasInstallScript": true, "license": "Apache-2.0", - "peer": true, "bin": { "workerd": "bin/workerd" }, diff --git a/package.json b/package.json index a6d512b2..c395782e 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "@types/node": "^25.0.3", "@typescript-eslint/eslint-plugin": "^8.52.0", "@typescript-eslint/parser": "^8.32.1", + "cloudflare": "^5.2.0", "eslint": "^9.39.2", "eslint-config-prettier": "^10.1.8", "eslint-plugin-prettier": "^5.5.4", diff --git a/scripts/build-directory-cache.mjs b/scripts/build-directory-cache.mjs new file mode 100755 index 00000000..0fd05fe0 --- /dev/null +++ b/scripts/build-directory-cache.mjs @@ -0,0 +1,87 @@ +#!/usr/bin/env node + +/** + * This is probably not the script you want to run. + * + * This script builds the directory cache from scratch, with an optional + * ability to purge the contents of the cache before doing so. + * + * This script should only be ran when there isn't already a directory cache or + * the current directory cache is corrupted in some way. For other situations, + * try `./update-directory-cache.mjs`. + * + * Usage: build-directory-cache.mjs [--purge] + * --purge: Use with caution. It will delete all elements in the cache and + * then rebuild it. + */ + +import { + DIRECTORY_CACHE_NAMESPACE_ID, + PROD_BUCKET, + RELEASE_DIR, +} from './constants.mjs'; +import { addSymlinksToDirectoryCache } from './utils/addSymlinksToDirectoryCache.mjs'; +import { getLatestVersionMapping } from './utils/getLatestVersionMapping.mjs'; +import { + createCloudflareClient, + deleteKvNamespaceKeys, + listKvNamespace, + writeKeysToKv, +} from './utils/kv.mjs'; +import { createS3Client, listR2DirectoryRecursive } from './utils/r2.mjs'; + +// Ensure all necessary environment variables are set +for (const envVar of [ + 'CLOUDFLARE_API_TOKEN', + 'S3_ACCESS_KEY_ID', + 'S3_ACCESS_KEY_SECRET', +]) { + if (!process.env[envVar]) { + throw new TypeError(`${envVar} missing from process.env`); + } +} + +// Create the clients we need to access the data +const s3Client = createS3Client( + process.env.S3_ACCESS_KEY_ID, + process.env.S3_ACCESS_KEY_SECRET +); + +const cfClient = createCloudflareClient(); + +// List the entire R2 bucket +const directories = await listR2DirectoryRecursive(s3Client, PROD_BUCKET); +console.log(`Listed ${directories.size} directories from ${PROD_BUCKET}`); + +if (!directories.get(RELEASE_DIR)) { + throw new TypeError( + `release directory ${RELEASE_DIR} missing in ${PROD_BUCKET}` + ); +} + +const latestVersions = await getLatestVersionMapping( + s3Client, + directories.get(RELEASE_DIR), + directories +); + +await addSymlinksToDirectoryCache(s3Client, directories, latestVersions); + +// Purge directory cache if wanted +if (process.argv[2] === '--purge') { + const cachedDirectoryKeys = await listKvNamespace( + cfClient, + DIRECTORY_CACHE_NAMESPACE_ID + ); + console.log(`Purging ${cachedDirectoryKeys.size} keys`); + + await deleteKvNamespaceKeys( + cfClient, + DIRECTORY_CACHE_NAMESPACE_ID, + Array.from(cachedDirectoryKeys) + ); +} + +// Update keys in the directory cache +console.log(`Writing ${directories.size} keys to directory cache...`); +await writeKeysToKv(cfClient, DIRECTORY_CACHE_NAMESPACE_ID, directories); diff --git a/scripts/build-r2-symlinks.mjs b/scripts/build-r2-symlinks.mjs deleted file mode 100644 index 0c9783d0..00000000 --- a/scripts/build-r2-symlinks.mjs +++ /dev/null @@ -1,320 +0,0 @@ -#!/usr/bin/env node -'use strict'; - -import { basename, dirname, join } from 'node:path'; -import { readFile, writeFile } from 'node:fs/promises'; -import { - HeadObjectCommand, - ListObjectsV2Command, - S3Client, -} from '@aws-sdk/client-s3'; -import { Linker } from 'nodejs-latest-linker/common.js'; -import { DOCS_DIR, ENDPOINT, PROD_BUCKET, RELEASE_DIR } from './constants.mjs'; - -const DOCS_DIRECTORY_OUT = join( - import.meta.dirname, - '..', - 'src', - 'constants', - 'docsDirectory.json' -); - -const LATEST_VERSIONS_OUT = join( - import.meta.dirname, - '..', - 'src', - 'constants', - 'latestVersions.json' -); - -const CACHED_DIRECTORIES_OUT = join( - import.meta.dirname, - '..', - 'src', - 'constants', - 'cachedDirectories.json' -); - -const FILE_SYMLINKS = join( - import.meta.dirname, - '..', - 'src', - 'constants', - 'fileSymlinks.json' -); - -if (!process.env.CF_ACCESS_KEY_ID) { - throw new TypeError('CF_ACCESS_KEY_ID missing'); -} - -if (!process.env.CF_SECRET_ACCESS_KEY) { - throw new TypeError('CF_SECRET_ACCESS_KEY missing'); -} - -const client = new S3Client({ - endpoint: ENDPOINT, - region: 'auto', - credentials: { - accessKeyId: process.env.CF_ACCESS_KEY_ID, - secretAccessKey: process.env.CF_SECRET_ACCESS_KEY, - }, -}); - -// Cache the contents of `nodejs/docs/` so we can reference it in the worker -await writeDocsDirectoryFile(client); - -// Grab all of the files & directories in `nodejs/release/` -const releases = await listDirectory(client, RELEASE_DIR); - -// Create the latest version mapping with the contents of `nodejs/release/` -const latestVersions = await getLatestVersionMapping(client, releases); - -// Write it so we can use it in the worker -await writeFile(LATEST_VERSIONS_OUT, JSON.stringify(latestVersions)); - -// Filter the latest version map so we only have the `latest-*` directories -const latestVersionDirectories = Object.keys(latestVersions) - .filter(version => version !== 'node-latest.tar.gz') - .map(version => `${version}/`); - -// Create the complete listing of `nodejs/release/` by adding what R2 returned -// and the latest version directories (which are the symlinks) -const releaseDirectorySubdirectories = releases.subdirectories - .concat(latestVersionDirectories) - .sort(); - -// This is the path in R2 for the latest tar archive of Node. -const nodeLatestPath = `nodejs/release/${latestVersions['node-latest.tar.gz'].replaceAll('latest', latestVersions['latest'])}`; - -// Stat the file that `node-latest.tar.gz` points to so we can have accurate -// size & last modified info for the directory listing -const nodeLatest = await headFile(client, nodeLatestPath); -if (!nodeLatest) { - throw new TypeError( - `node-latest.tar.gz points to ${latestVersions['node-latest.tar.gz']} which doesn't exist in the prod bucket` - ); -} - -/** - * Preprocess these directories since they have symlinks in them that don't - * actually exist in R2 but need to be present when we give a directory listing - * result - * @type {Record} - */ -const cachedDirectories = { - 'nodejs/release/': { - subdirectories: releaseDirectorySubdirectories, - hasIndexHtmlFile: false, - files: [ - ...releases.files, - { - name: 'node-latest.tar.gz', - lastModified: nodeLatest.lastModified, - size: nodeLatest.size, - }, - ], - lastModified: releases.lastModified, - }, - 'nodejs/docs/': { - // We reuse the releases listing result here instead of listing the docs - // directory since it's more complete. The docs folder does have some actual - // directories in it, but most of it is symlinks and aren't present in R2. - subdirectories: releaseDirectorySubdirectories, - hasIndexHtmlFile: false, - files: [], - lastModified: releases.lastModified, - }, -}; - -// Some older versions of Node exist in `nodejs/release/` and have folders -// with symlinks to them. For example, node-v0.1.100.tar.gz lives under -// `nodejs/release/`, but there's also `nodejs/release/v0.1.100/node-v0.1.100.tar.gz` -// which is just a symlink to it. -// Let's add these to our cached directories. -const fileSymlinks = JSON.parse(await readFile(FILE_SYMLINKS, 'utf8')); - -// Delete this for now, we'll add it back again later -delete fileSymlinks['node-config-schema.json']; - -for (const file of Object.keys(fileSymlinks)) { - // Stat the actual file so we can get it's size, last modified - const actualFile = await headFile(client, fileSymlinks[file]); - - const directory = `${dirname(file)}/`; - - if (directory in cachedDirectories) { - // Directory was already cached, let's just append the file to the result - cachedDirectories[directory].files.push({ - ...actualFile, - name: basename(file), - }); - } else { - // List the directory that the symlink is in so we can append the symlink to - // what's actually there - const contents = await listDirectory(client, directory); - contents.files.push({ - ...actualFile, - name: basename(file), - }); - - cachedDirectories[directory] = contents; - } -} - -await writeFile(CACHED_DIRECTORIES_OUT, JSON.stringify(cachedDirectories)); - -// Update the node-config-schema.json file symlink to point to the latest -// version -fileSymlinks['node-config-schema.json'] = - `${RELEASE_DIR}${latestVersions['latest']}/docs/node-config-schema.json`; - -await writeFile(FILE_SYMLINKS, JSON.stringify(fileSymlinks)); - -/** - * @param {S3Client} client - * @param {string} directory - * @returns {Promise} - */ -async function listDirectory(client, directory) { - /** - * @type {Array} - */ - const subdirectories = []; - - /** - * @type {Array} - */ - const files = []; - - let hasIndexHtmlFile = false; - - let truncated = true; - let continuationToken; - let lastModified = new Date(0); - while (truncated) { - const data = await client.send( - new ListObjectsV2Command({ - Bucket: PROD_BUCKET, - Delimiter: '/', - Prefix: directory, - ContinuationToken: continuationToken, - }) - ); - - if (data.CommonPrefixes) { - data.CommonPrefixes.forEach(value => { - if (value.Prefix) { - subdirectories.push(value.Prefix.substring(directory.length)); - } - }); - } - - if (data.Contents) { - data.Contents.forEach(value => { - if (value.Key) { - if (value.Key.match(/index.htm(?:l)$/)) { - hasIndexHtmlFile = true; - } - - files.push({ - name: value.Key.substring(directory.length), - lastModified: value.LastModified, - size: value.Size, - }); - - if (value.LastModified > lastModified) { - lastModified = value.LastModified; - } - } - }); - } - - truncated = data.IsTruncated; - continuationToken = data.NextContinuationToken; - } - - return { subdirectories, hasIndexHtmlFile, files, lastModified }; -} - -/** - * @param {S3Client} client - * @param {string} path - * @returns {Promise} - */ -async function headFile(client, path) { - const data = await client.send( - new HeadObjectCommand({ - Bucket: PROD_BUCKET, - Key: path, - }) - ); - - if (!data.LastModified || !data.ContentLength) { - return undefined; - } - - return { - name: path, - lastModified: data.LastModified, - size: data.ContentLength, - }; -} - -/** - * @param {S3Client} client - */ -async function writeDocsDirectoryFile(client) { - // Grab all of the directories in `nodejs/docs/` - const docs = await listDirectory(client, DOCS_DIR); - - // Cache the contents of `nodejs/docs/` so we can refer to it in the worker w/o - // making a call to R2. - await writeFile( - DOCS_DIRECTORY_OUT, - JSON.stringify( - docs.subdirectories.map(subdirectory => - subdirectory.substring(0, subdirectory.length - 1) - ) - ) - ); -} - -/** - * @param {S3Client} client - * @param {import('../src/providers/provider.js').ReadDirectoryResult} releases - * @returns {Promise>} - */ -async function getLatestVersionMapping(client, releases) { - const linker = new Linker({ baseDir: RELEASE_DIR, docs: DOCS_DIR }); - - /** - * Creates mappings to the latest versions of Node - * @type {Map} - * @example { 'nodejs/release/latest-v20.x': 'nodejs/release/v20.x.x' } - */ - const links = await linker.getLinks( - [...releases.subdirectories, ...releases.files.map(file => file.name)], - async directory => { - const { subdirectories, files } = await listDirectory( - client, - `${directory}/` - ); - return [...subdirectories, ...files.map(file => file.name)]; - } - ); - - /** - * @type {Record} - * @example {'latest-v20.x': 'v20.x.x'} - */ - const latestVersions = {}; - - for (const [key, value] of links) { - const trimmedKey = key.substring(RELEASE_DIR.length); - const trimmedValue = value.substring(RELEASE_DIR.length); - - latestVersions[trimmedKey] = trimmedValue; - } - - return latestVersions; -} diff --git a/scripts/constants.mjs b/scripts/constants.mjs index 7c5eb4fe..b2759cd1 100644 --- a/scripts/constants.mjs +++ b/scripts/constants.mjs @@ -1,20 +1,59 @@ -'use strict'; - import { dirname, join } from 'node:path'; import { readdir, readFile, stat } from 'node:fs/promises'; -export const ENDPOINT = - process.env.ENDPOINT ?? - 'https://07be8d2fbc940503ca1be344714cb0d1.r2.cloudflarestorage.com'; +export const CLOUDFLARE_ACCOUNT_ID = + process.env.CLOUDFLARE_ACCOUNT_ID ?? '07be8d2fbc940503ca1be344714cb0d1'; -export const PROD_BUCKET = process.env.PROD_BUCKET ?? 'dist-prod'; +export const R2_ENDPOINT = + process.env.R2_ENDPOINT ?? + `https://${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`; -export const STAGING_BUCKET = process.env.STAGING_BUCKET ?? 'dist-staging'; +/** + * The id of the KV namespace used for caching directories + */ +export const DIRECTORY_CACHE_NAMESPACE_ID = + process.env.DIRECTORY_CACHE_NAMESPACE_ID ?? 'TODO'; -export const R2_RETRY_COUNT = 3; +export const PROD_BUCKET = process.env.PROD_BUCKET ?? 'dist-prod'; +export const STAGING_BUCKET = process.env.STAGING_BUCKET ?? 'dist-staging'; export const RELEASE_DIR = 'nodejs/release/'; - export const DOCS_DIR = 'nodejs/docs/'; +export const NODE_LATEST_FILE_NAME = 'node-latest.tar.gz'; + export const DEV_BUCKET_PATH = join(import.meta.dirname, '..', 'dev-bucket'); +export const CACHED_DIRECTORIES_PATH = join( + import.meta.dirname, + '..', + 'src', + 'constants', + 'cachedDirectories.json' +); +export const DOCS_DIRECTORY_PATH = join( + import.meta.dirname, + '..', + 'src', + 'constants', + 'docsDirectory.json' +); +export const LATEST_VERSIONS_MAPPING_PATH = join( + import.meta.dirname, + '..', + 'src', + 'constants', + 'latestVersions.json' +); +export const STATIC_FILE_SYMLINKS_PATH = join( + import.meta.dirname, + '..', + 'src', + 'constants', + 'fileSymlinks.json' +); + +/** + * Paths in the R2 bucket that we should always be updating whenever a new + * version releases. + */ +export const ALWAYS_UPDATED_PATHS = [RELEASE_DIR, DOCS_DIR]; diff --git a/scripts/update-directory-cache.mjs b/scripts/update-directory-cache.mjs new file mode 100644 index 00000000..14c85116 --- /dev/null +++ b/scripts/update-directory-cache.mjs @@ -0,0 +1,90 @@ +#!/usr/bin/env node + +/** + * Update the most commonly updated paths in the directory cache for a Node.js + * release. + * + * Usage: ./update-directory-cache.mjs + * Example: ./update-directory-cache.mjs v24.0.0 + */ + +import { listR2Directory } from '../common/listR2Directory.mjs'; +import { + ALWAYS_UPDATED_PATHS, + DIRECTORY_CACHE_NAMESPACE_ID, + PROD_BUCKET, + RELEASE_DIR, +} from './constants.mjs'; +import { addSymlinksToDirectoryCache } from './utils/addSymlinksToDirectoryCache.mjs'; +import { getLatestVersionMapping } from './utils/getLatestVersionMapping.mjs'; +import { createCloudflareClient, writeKeysToKv } from './utils/kv.mjs'; +import { createS3Client, listR2DirectoryRecursive } from './utils/r2.mjs'; + +const VERSION = process.argv[2].toLowerCase(); + +if (!VERSION) { + throw new TypeError('version missing from args'); +} + +if (!VERSION.startsWith('v')) { + throw new TypeError( + 'provided version not in correct format, expected vX.X.X' + ); +} + +// Ensure all necessary environment variables are set +for (const envVar of [ + 'CLOUDFLARE_API_TOKEN', + 'S3_ACCESS_KEY_ID', + 'S3_ACCESS_KEY_SECRET', +]) { + if (!process.env[envVar]) { + throw new TypeError(`${envVar} missing from process.env`); + } +} + +// Create the clients that we need for fetching the data +const s3Client = createS3Client( + process.env.S3_ACCESS_KEY_ID, + process.env.S3_ACCESS_KEY_SECRET +); + +const cfClient = createCloudflareClient(); + +// List the directory of the new release (and subdirectories) +const directories = await listR2DirectoryRecursive( + s3Client, + PROD_BUCKET, + `${RELEASE_DIR}${VERSION}/` +); + +// Non-recursively list the paths that we always want to update +await Promise.all( + ALWAYS_UPDATED_PATHS.map(path => + listR2Directory(s3Client, PROD_BUCKET, path) + .then(value => { + if (value === undefined) { + throw new TypeError( + `directory '${path}' not found in '${PROD_BUCKET}'` + ); + } + + directories.set(path, value); + }) + .catch(err => { + throw err; + }) + ) +); + +const latestVersions = await getLatestVersionMapping( + s3Client, + directories.get(RELEASE_DIR), + directories +); + +await addSymlinksToDirectoryCache(s3Client, directories, latestVersions); + +// Update keys in the directory cache +console.log(`Writing ${directories.size} keys to directory cache...`); +await writeKeysToKv(cfClient, DIRECTORY_CACHE_NAMESPACE_ID, directories); diff --git a/scripts/utils/addSymlinksToDirectoryCache.mjs b/scripts/utils/addSymlinksToDirectoryCache.mjs new file mode 100644 index 00000000..e79bde1f --- /dev/null +++ b/scripts/utils/addSymlinksToDirectoryCache.mjs @@ -0,0 +1,245 @@ +import { readFile, writeFile } from 'node:fs/promises'; +import { basename, dirname } from 'node:path'; +import { S3Client } from '@aws-sdk/client-s3'; +import { + DOCS_DIR, + NODE_LATEST_FILE_NAME, + PROD_BUCKET, + RELEASE_DIR, + STATIC_FILE_SYMLINKS_PATH, +} from '../constants.mjs'; +import { headR2File } from './r2.mjs'; +import { CACHED_DIRECTORIES_PATH, DOCS_DIRECTORY_PATH } from '../constants.mjs'; +import { listR2Directory } from '../../common/listR2Directory.mjs'; + +/** + * Adds the `nodejs/release/node-latest.tar.gz` symlink to the directory cache + * @param {S3Client} client + * @param {import('../../src/providers/provider').ReadDirectoryResult} releaseDirectory + * @param {import('./getLatestVersionMapping.mjs').LatestVersionMapping} latestVersionMap + * @param {string} latestVersion + */ +export async function addLatestTarSymlinkToCache( + client, + releaseDirectory, + latestVersionMap, + latestVersion +) { + const nodeLatestPath = `${RELEASE_DIR}${latestVersionMap[NODE_LATEST_FILE_NAME].replaceAll('latest', latestVersion)}`; + + // Stat the file that `node-latest.tar.gz` points to so we can have accurate + // size & last modified info for the directory listing. Also acts as a safety + // check so this doesn't point to an invalid path without us noticing. + const nodeLatest = await headR2File(client, PROD_BUCKET, nodeLatestPath); + if (!nodeLatest) { + throw new TypeError( + `couldn't map node-latest.tar.gz: path '${nodeLatestPath}' doesn't exist in the '${PROD_BUCKET}'` + ); + } + + releaseDirectory.files.push({ + name: NODE_LATEST_FILE_NAME, + lastModified: nodeLatest.lastModified, + size: nodeLatest.size, + }); +} + +/** + * Adds the `latest-*` directory symlinks to the directory cache + * @param {import('../../src/providers/provider').ReadDirectoryResult} releaseDirectory + * @param {import('./getLatestVersionMapping.mjs').LatestVersionMapping} latestVersionMap + */ +export function addLatestDirectorySymlinksToCache( + releaseDirectory, + latestVersionMap +) { + releaseDirectory.subdirectories.push( + ...Object.keys(latestVersionMap) + // We only want directories, remove the only file in the map + .filter(version => version !== NODE_LATEST_FILE_NAME) + .map(version => `${version}/`) + ); + releaseDirectory.subdirectories.sort(); +} + +/** + * Manually override the contents of `nodejs/docs` to be a union of what's + * actually in that directory as well as the versions present in + * `nodejs/release/`. + * @param {import('../../src/providers/provider').ReadDirectoryResult} releaseDirectory + * @param {import('../../src/providers/provider').ReadDirectoryResult | undefined} docsDirectory + */ +export async function addVersionSymlinksToCachedDocsDirectory( + releaseDirectory, + docsDirectory +) { + if (!docsDirectory) { + return; + } + + // Cache the current contents of the directory so we know which directories + // actually exist and which are symlinks + // TODO: look into a better/more general way of handling this + await writeFile( + DOCS_DIRECTORY_PATH, + JSON.stringify( + docsDirectory.subdirectories.map(subdirectory => + // Trim trailing `/` + subdirectory.substring(0, subdirectory.length - 1) + ), + null, + 2 + ) + ); + + // We only want the `vX.X.X/` and `latest-X/` directories + const versionDirectories = releaseDirectory.subdirectories.filter( + subdirectory => + subdirectory.startsWith('v') || subdirectory.startsWith('latest') + ); + + docsDirectory.subdirectories = Array.from( + new Set([...docsDirectory.subdirectories, ...versionDirectories]) + ).sort(); +} + +/** + * Add the static file symlinks to the directory cache + * @param {S3Client} client + * @param {import('./r2.mjs').DirectoryListMapping} cachedDirectories + * @param {string} latestVersion + */ +export async function addStaticFileSymlinksToCache( + client, + cachedDirectories, + latestVersion +) { + // Keep list of directories we modify here to include in cachedDirectories.json + // TODO: remove when KV is considered stable + const directoriesIncludedInFileCache = new Set([RELEASE_DIR, DOCS_DIR]); + + const fileSymlinksFile = await readFile(STATIC_FILE_SYMLINKS_PATH, 'utf8'); + + /** + * @type {Record} + */ + const fileSymlinks = JSON.parse(fileSymlinksFile); + + // This file is an exception for two reasons: + // 1. It's dynamic and points to the latest Node version + // 2. It's handled specially and is technically in the root directory, + // which we should never be listing thus don't need to add it to the + // directory cache. + // So, let's remove it then add it back once we're done handling the rest of + // the static file symlinks. + fileSymlinks['node-config-schema.json'] = undefined; + + // Add the symlinks to the directory cache + for (const symlink of Object.keys(fileSymlinks)) { + const sourceFilePath = fileSymlinks[symlink]; + + if (!sourceFilePath) { + // Hit a file we don't want to stat, ignore it + continue; + } + + // Stat the source file so we can get its size, last modified + const sourceFile = await headR2File( + client, + PROD_BUCKET, + fileSymlinks[symlink], + cachedDirectories + ); + + if (!sourceFile) { + throw new TypeError( + `symlink '${symlink}' points to invalid file '${sourceFile}'` + ); + } + + const directoryPath = `${dirname(symlink)}/`; + directoriesIncludedInFileCache.add(directoryPath); + + let directory = cachedDirectories.get(directoryPath); + if (!directory) { + directory = await listR2Directory(client, PROD_BUCKET, directoryPath); + cachedDirectories.set(directoryPath, directory); + } + + directory.files.push({ + ...sourceFile, + name: basename(symlink), + }); + } + + // Update the node-config-schema.json file symlink to point to the latest + // version + fileSymlinks['node-config-schema.json'] = + `${RELEASE_DIR}${latestVersion}/docs/node-config-schema.json`; + + // Update file so it can be used in the worker + await writeFile( + STATIC_FILE_SYMLINKS_PATH, + JSON.stringify(fileSymlinks, null, 2) + ); + + // Update the cached directories file + // TODO: remove when KV is considered stable + const fileContents = {}; + directoriesIncludedInFileCache.forEach(directory => { + fileContents[directory] = cachedDirectories.get(directory); + }); + + await writeFile( + CACHED_DIRECTORIES_PATH, + JSON.stringify(fileContents, null, 2) + ); +} + +/** + * R2 doesn't have symlinks, but we make heavy use of them. So, we need to add + * them into the cache ourselves. + * + * We use symlinks to point to both files and directories. + * + * The file symlinks are more or less static. There are only two that are + * constantly updated with each release: + * - `nodejs/release/node-latest.tar.gz` + * - `node-config-schema.json` + * + * The directory symlinks are dynamic with a few exceptions for a select number + * of directories in `nodejs/docs/` + * + * @param {S3Client} client + * @param {import('./r2.mjs').DirectoryListMapping} cachedDirectories + * @param {import('./getLatestVersionMapping.mjs').LatestVersionMapping} latestVersionMap + */ +export async function addSymlinksToDirectoryCache( + client, + cachedDirectories, + latestVersionMap +) { + const releaseDirectory = cachedDirectories.get(RELEASE_DIR); + if (!releaseDirectory) { + console.warn(`Release directory not found at path ${RELEASE_DIR}`); + return; + } + + const latestVersion = latestVersionMap['latest']; + + await addLatestTarSymlinkToCache( + client, + releaseDirectory, + latestVersionMap, + latestVersion + ); + + addLatestDirectorySymlinksToCache(releaseDirectory, latestVersionMap); + + await addVersionSymlinksToCachedDocsDirectory( + releaseDirectory, + cachedDirectories.get(DOCS_DIR) + ); + + await addStaticFileSymlinksToCache(client, cachedDirectories, latestVersion); +} diff --git a/scripts/utils/addSymlinksToDirectoryCache.test.ts b/scripts/utils/addSymlinksToDirectoryCache.test.ts new file mode 100644 index 00000000..a816fa6a --- /dev/null +++ b/scripts/utils/addSymlinksToDirectoryCache.test.ts @@ -0,0 +1,180 @@ +import { describe, expect, test, vi } from 'vitest'; +import { type HeadObjectCommand } from '@aws-sdk/client-s3'; +import { + DOCS_DIRECTORY_PATH, + PROD_BUCKET, + RELEASE_DIR, +} from '../constants.mjs'; +import { ReadDirectoryResult } from '../../src/providers/provider'; +import { + addLatestDirectorySymlinksToCache, + addLatestTarSymlinkToCache, + addVersionSymlinksToCachedDocsDirectory, +} from './addSymlinksToDirectoryCache.mjs'; +import { LatestVersionMapping } from './getLatestVersionMapping.mjs'; + +describe('addLatestTarSymlinkToCache', () => { + test('adds `node-latest.tar.gz` to release directory', async () => { + const now = new Date(); + const fileSize = 67; + + const client = { + async send(cmd: HeadObjectCommand) { + expect(cmd.input.Bucket).toStrictEqual(PROD_BUCKET); + expect(cmd.input.Key).toStrictEqual( + `${RELEASE_DIR}v20.0.0/node-v20.0.0.tar.gz` + ); + + return { + LastModified: now, + ContentLength: fileSize, + }; + }, + }; + + const releaseDirectory: ReadDirectoryResult = { + subdirectories: [], + hasIndexHtmlFile: false, + files: [], + lastModified: new Date(), + }; + + await addLatestTarSymlinkToCache( + // @ts-expect-error don't need full s3 client here + client, + releaseDirectory, + { + 'node-latest.tar.gz': 'latest/node-v20.0.0.tar.gz', + }, + 'v20.0.0' + ); + + expect(releaseDirectory.files).toContainEqual({ + name: 'node-latest.tar.gz', + lastModified: now, + size: fileSize, + }); + }); + + test("throws error if latest node tar doesn't exist", async () => { + const client = { + async send() { + return {}; + }, + }; + + const releaseDirectory: ReadDirectoryResult = { + subdirectories: [], + hasIndexHtmlFile: false, + files: [], + lastModified: new Date(), + }; + + const result = addLatestTarSymlinkToCache( + // @ts-expect-error don't need full s3 client here + client, + releaseDirectory, + { + 'node-latest.tar.gz': 'latest/node-v20.0.0.tar.gz', + }, + 'v20.0.0' + ); + + await expect(result).rejects.toThrowError(); + }); +}); + +test('addLatestDirectorySymlinksToCache', () => { + const now = new Date(); + + const releaseDirectory: ReadDirectoryResult = { + subdirectories: ['v20.1.2/', 'v22.2.1/', 'v19.0.0/'], + hasIndexHtmlFile: false, + files: [], + lastModified: now, + }; + + const latestVersionMap: LatestVersionMapping = { + 'latest-v20.x': 'v20.1.2', + 'latest-v22.x': 'v22.2.1', + 'node-latest.tar.gz': 'latest/node-v22.2.1', + }; + + addLatestDirectorySymlinksToCache(releaseDirectory, latestVersionMap); + + expect(releaseDirectory).toStrictEqual({ + subdirectories: [ + 'latest-v20.x/', + 'latest-v22.x/', + 'v19.0.0/', + 'v20.1.2/', + 'v22.2.1/', + ], + hasIndexHtmlFile: false, + files: [], + lastModified: now, + }); +}); + +test('addVersionSymlinksToCachedDocsDirectory', async () => { + const now = new Date(); + + const releaseDirectory: ReadDirectoryResult = { + subdirectories: [ + 'latest-v20.x/', + 'latest-v22.x/', + 'v19.0.0/', + 'v20.1.2/', + 'v22.2.1/', + 'npm/', + ], + hasIndexHtmlFile: false, + files: [ + { + name: 'index.json', + lastModified: now, + size: 1, + }, + ], + lastModified: now, + }; + + const docsDirectory: ReadDirectoryResult = { + subdirectories: ['v0.0.1/'], + hasIndexHtmlFile: false, + files: [], + lastModified: now, + }; + + vi.mock('node:fs/promises', async importOriginal => { + const original = await importOriginal(); + + return { + // @ts-expect-error + ...original, + writeFile: async (file: string, data: string) => { + expect(file).toStrictEqual(DOCS_DIRECTORY_PATH); + expect(JSON.parse(data)).toStrictEqual(['v0.0.1']); + }, + }; + }); + + await addVersionSymlinksToCachedDocsDirectory( + releaseDirectory, + docsDirectory + ); + + expect(docsDirectory).toStrictEqual({ + subdirectories: [ + 'latest-v20.x/', + 'latest-v22.x/', + 'v0.0.1/', + 'v19.0.0/', + 'v20.1.2/', + 'v22.2.1/', + ], + hasIndexHtmlFile: false, + files: [], + lastModified: now, + }); +}); diff --git a/scripts/utils/getLatestVersionMapping.mjs b/scripts/utils/getLatestVersionMapping.mjs new file mode 100644 index 00000000..b7c1a2e0 --- /dev/null +++ b/scripts/utils/getLatestVersionMapping.mjs @@ -0,0 +1,68 @@ +import { S3Client } from '@aws-sdk/client-s3'; +import { Linker } from 'nodejs-latest-linker/common.js'; +import { + DOCS_DIR, + LATEST_VERSIONS_MAPPING_PATH, + PROD_BUCKET, + RELEASE_DIR, +} from '../constants.mjs'; +import { writeFile } from 'node:fs/promises'; +import { listR2Directory } from '../../common/listR2Directory.mjs'; + +/** + * @typedef {Record} LatestVersionMapping + * + * @param {S3Client} client + * @param {import('../../src/providers/provider.js').ReadDirectoryResult} releases + * @param {import('./r2.mjs').DirectoryListMapping} cachedDirectories + * @returns {Promise} + */ +export async function getLatestVersionMapping( + client, + releases, + cachedDirectories +) { + const linker = new Linker({ baseDir: RELEASE_DIR, docs: DOCS_DIR }); + + /** + * Creates mappings to the latest versions of Node + * @type {Map} + * @example { 'nodejs/release/latest-v20.x': 'nodejs/release/v20.x.x' } + */ + const links = await linker.getLinks( + [...releases.subdirectories, ...releases.files.map(file => file.name)], + async directory => { + const r2Path = `${directory}/`; + + const listing = + cachedDirectories.get(r2Path) ?? + (await listR2Directory(client, PROD_BUCKET, r2Path)); + + return [ + ...listing.subdirectories, + ...listing.files.map(file => file.name), + ]; + } + ); + + /** + * @type {Record} + * @example {'latest-v20.x': 'v20.x.x'} + */ + const latestVersions = {}; + + for (const [key, value] of links) { + const trimmedKey = key.substring(RELEASE_DIR.length); + const trimmedValue = value.substring(RELEASE_DIR.length); + + latestVersions[trimmedKey] = trimmedValue; + } + + // Update the constants file + await writeFile( + LATEST_VERSIONS_MAPPING_PATH, + JSON.stringify(latestVersions, null, 2) + ); + + return latestVersions; +} diff --git a/scripts/utils/kv.mjs b/scripts/utils/kv.mjs new file mode 100644 index 00000000..618b4739 --- /dev/null +++ b/scripts/utils/kv.mjs @@ -0,0 +1,69 @@ +import Cloudflare from 'cloudflare'; +import { CLOUDFLARE_ACCOUNT_ID } from '../constants.mjs'; +import { KV_RETRY_LIMIT } from '../../common/limits.mjs'; + +export function createCloudflareClient() { + return new Cloudflare({ + maxRetries: KV_RETRY_LIMIT, + }); +} + +/** + * @param {import('cloudflare').Cloudflare} client + * @param {string} namespace + * @returns {Promise>} + */ +export async function listKvNamespace(client, namespace) { + /** + * @type {Set} + */ + const keys = new Set(); + + for await (const key of client.kv.namespaces.keys.list(namespace, { + account_id: CLOUDFLARE_ACCOUNT_ID, + })) { + keys.add(key.name); + } + + return keys; +} + +/** + * @param {import('cloudflare').Cloudflare} client + * @param {string} namespace + * @param {Map} values + */ +export async function writeKeysToKv(client, namespace, values) { + const keys = values.keys().toArray(); + + while (keys.length) { + // Can only write 10,000 keys at once + const batch = keys.splice(0, 10_000); + + await client.kv.namespaces.bulkUpdate(namespace, { + account_id: CLOUDFLARE_ACCOUNT_ID, + body: batch.map(key => ({ + key, + value: JSON.stringify(values.get(key)), + })), + }); + } +} + +/** + * Delete keys in a KV namespace + * @param {import('cloudflare').Cloudflare} client + * @param {string} namespace + * @param {Array} keys + */ +export async function deleteKvNamespaceKeys(client, namespace, keys) { + while (keys.length) { + // Can only delete 10,000 keys at once + const batch = keys.splice(0, 10_000); + + await client.kv.namespaces.bulkDelete(namespace, { + account_id: CLOUDFLARE_ACCOUNT_ID, + body: batch, + }); + } +} diff --git a/scripts/utils/r2.mjs b/scripts/utils/r2.mjs new file mode 100644 index 00000000..6bd90fae --- /dev/null +++ b/scripts/utils/r2.mjs @@ -0,0 +1,97 @@ +import { dirname } from 'node:path'; +import { HeadObjectCommand, S3Client } from '@aws-sdk/client-s3'; +import { R2_ENDPOINT } from '../constants.mjs'; +import { listR2Directory } from '../../common/listR2Directory.mjs'; + +/** + * @param {string} accessKeyId + * @param {string} secretAccessKey + * @returns {S3Client} + */ +export function createS3Client(accessKeyId, secretAccessKey) { + return new S3Client({ + endpoint: R2_ENDPOINT, + region: 'auto', + credentials: { + accessKeyId: accessKeyId, + secretAccessKey: secretAccessKey, + }, + }); +} + +/** + * Recursively list the contents of an R2 bucket under + * + * @typedef {Map} DirectoryListMapping + * + * @param {S3Client} client + * @param {string} bucket + * @param {string | undefined} [directory] + * @param {DirectoryListMapping} [listings] + * @returns {Promise} + */ +export async function listR2DirectoryRecursive( + client, + bucket, + directory = undefined, + listings = new Map() +) { + // List the current directory + const response = await listR2Directory(client, bucket, directory); + + // Add current directory to the listings object + listings.set(directory ?? '/', response); + + // List the subdirectories + await Promise.all( + response.subdirectories.map(subdirectory => + listR2DirectoryRecursive( + client, + bucket, + `${directory ?? ''}${subdirectory}`, + listings + ) + ) + ); + + return listings; +} + +/** + * @param {S3Client} client + * @param {string} bucket + * @param {string} path + * @param {DirectoryListMapping} [cachedDirectories=undefined] + * @returns {Promise} + */ +export async function headR2File( + client, + bucket, + path, + cachedDirectories = undefined +) { + const directoryName = `${dirname(path)}/`; + const cachedDirectory = cachedDirectories?.get(directoryName); + if (cachedDirectory) { + const fileName = path.substring(directoryName.length); + + return cachedDirectory.files.find(file => file.name === fileName); + } + + const data = await client.send( + new HeadObjectCommand({ + Bucket: bucket, + Key: path, + }) + ); + + if (!data.LastModified || !data.ContentLength) { + return undefined; + } + + return { + name: path, + lastModified: data.LastModified, + size: data.ContentLength, + }; +} diff --git a/src/constants/README.md b/src/constants/README.md index a4863cbc..227e497a 100644 --- a/src/constants/README.md +++ b/src/constants/README.md @@ -5,9 +5,8 @@ Various constants used throughout the worker ## Table of Contents - [cache.ts](./cache.ts) - Caching directives -- [cachedDirectories.json](./cachedDirectories.json) - Directories that have their listing result cached because they have symlinks in them. These are updated by the [build-r2-symlinks](../scripts/build-r2-symlinks.mjs) script. -- [docsDirectory.json](./docsDirectory.json) - The contents of `nodejs/docs/` in the `dist-prod` bucket. This is updated by the [build-r2-symlinks](../scripts/build-r2-symlinks.mjs) script. +- [cachedDirectories.json](./cachedDirectories.json) - Directories that have their listing result cached because they have symlinks in them. This file is updated automatically with each Node release. +- [docsDirectory.json](./docsDirectory.json) - The contents of `nodejs/docs/` in the `dist-prod` bucket. This file is updated automatically with each Node release. - [files.ts](./files.ts) - Constants related to files the worker serves. - [fileSymlinks.ts](./fileSymlinks.json) - Manually updated mapping of file symlinks that exist in the `dist-prod` bucket. -- [latestVersions.json] - Map of `latest-*` directories to their actual latest versions. This is updated by the [build-r2-symlinks](../scripts/build-r2-symlinks.mjs) script. -- [limits.ts](./limits.ts) - Hardcap limits that the worker shouldn't exceed. +- [latestVersions.json] - Map of `latest-*` directories to their actual latest versions. This file is updated automatically with each Node release. diff --git a/src/constants/contentTypeOverrides.json b/src/constants/contentTypeOverrides.json deleted file mode 100644 index 178c5980..00000000 --- a/src/constants/contentTypeOverrides.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "tab": "text/plain" -} diff --git a/src/constants/contentTypeOverrides.ts b/src/constants/contentTypeOverrides.ts new file mode 100644 index 00000000..f79d4591 --- /dev/null +++ b/src/constants/contentTypeOverrides.ts @@ -0,0 +1,3 @@ +export const CONTENT_TYPE_OVERRIDES = { + tab: 'text/plain', +} as const; diff --git a/src/env.ts b/src/env.ts index a929e029..9d5400dc 100644 --- a/src/env.ts +++ b/src/env.ts @@ -16,6 +16,16 @@ export interface Env { */ R2_BUCKET: R2Bucket; + /** + * KV namespace with cached directory listings + */ + DIRECTORY_CACHE: KVNamespace; + + /** + * Temp flag for whether or not to use KV instead of S3 + */ + USE_KV: boolean; + /** * Endpoint to hit when using the S3 api. */ diff --git a/src/providers/kvProvider.test.ts b/src/providers/kvProvider.test.ts new file mode 100644 index 00000000..ea493ffd --- /dev/null +++ b/src/providers/kvProvider.test.ts @@ -0,0 +1,90 @@ +import { describe, expect, test } from 'vitest'; +import { KvProvider } from './kvProvider'; + +describe('readDirectory', () => { + test('returns expected response when a directory exists', async () => { + const kvProvider = new KvProvider({ + ctx: { + env: { + DIRECTORY_CACHE: { + // @ts-expect-error don't need actual type info here + async get(path: string): Promise { + expect(path).toStrictEqual('some/file/path.json'); + + return { + subdirectories: [ + 'subdirectory1/', + 'subdirectory2/', + 'subdirectory3/', + ], + hasIndexHtmlFile: false, + files: [ + { + name: 'file1.txt', + lastModified: '2024-11-04T16:21:30.218Z', + size: 123, + }, + { + name: 'file2.txt', + lastModified: '2024-11-04T16:21:30.218Z', + size: 321, + }, + { + name: 'file3.txt', + lastModified: '2024-11-04T16:21:30.218Z', + size: 456, + }, + ], + lastModified: '2024-11-04T16:21:30.218Z', + }; + }, + }, + }, + }, + }); + + expect(await kvProvider.readDirectory('some/file/path.json')).toStrictEqual( + { + subdirectories: ['subdirectory1/', 'subdirectory2/', 'subdirectory3/'], + hasIndexHtmlFile: false, + files: [ + { + name: 'file1.txt', + lastModified: new Date('2024-11-04T16:21:30.218Z'), + size: 123, + }, + { + name: 'file2.txt', + lastModified: new Date('2024-11-04T16:21:30.218Z'), + size: 321, + }, + { + name: 'file3.txt', + lastModified: new Date('2024-11-04T16:21:30.218Z'), + size: 456, + }, + ], + lastModified: new Date('2024-11-04T16:21:30.218Z'), + } + ); + }); + + test('returns `undefined` for unknown directory', async () => { + const kvProvider = new KvProvider({ + ctx: { + env: { + DIRECTORY_CACHE: { + // @ts-expect-error don't need actual type info here + async get(): Promise { + return null; + }, + }, + }, + }, + }); + + expect( + await kvProvider.readDirectory('some/file/path.json') + ).toBeUndefined(); + }); +}); diff --git a/src/providers/kvProvider.ts b/src/providers/kvProvider.ts new file mode 100644 index 00000000..0cbfecec --- /dev/null +++ b/src/providers/kvProvider.ts @@ -0,0 +1,56 @@ +import { KV_RETRY_LIMIT } from '../../common/limits.mjs'; +import type { Context } from '../context'; +import { retryWrapper } from '../utils/provider'; +import type { + GetFileOptions, + GetFileResult, + HeadFileResult, + Provider, + ReadDirectoryResult, +} from './provider'; + +type KvProviderCtorOptions = { + ctx: Context; +}; + +export class KvProvider implements Provider { + #ctx: Context; + + constructor({ ctx }: KvProviderCtorOptions) { + this.#ctx = ctx; + } + + headFile(_: string): Promise { + throw new Error('Method not implemented.'); + } + + getFile(_: string, _2?: GetFileOptions): Promise { + throw new Error('Method not implemented.'); + } + + async readDirectory(path: string): Promise { + const result = await retryWrapper( + async () => { + return this.#ctx.env.DIRECTORY_CACHE.get( + path, + 'json' + ); + }, + KV_RETRY_LIMIT, + this.#ctx.sentry + ); + + if (result === null) { + return undefined; + } + + // Convert last modified from a string to Date object + result.lastModified = new Date(result.lastModified); + + for (const file of result.files) { + file.lastModified = new Date(file.lastModified); + } + + return result; + } +} diff --git a/src/providers/provider.ts b/src/providers/provider.ts index 76529536..7f36d0da 100644 --- a/src/providers/provider.ts +++ b/src/providers/provider.ts @@ -12,10 +12,7 @@ export interface Provider { options?: GetFileOptions ): Promise; - readDirectory( - path: string, - options?: ReadDirectoryOptions - ): Promise; + readDirectory(path: string): Promise; } /** @@ -68,10 +65,6 @@ export type File = { size: number; }; -export type ReadDirectoryOptions = { - listFiles: boolean; -}; - export type ReadDirectoryResult = { subdirectories: string[]; hasIndexHtmlFile: boolean; diff --git a/src/providers/r2Provider.ts b/src/providers/r2Provider.ts index 226178ac..e86311c4 100644 --- a/src/providers/r2Provider.ts +++ b/src/providers/r2Provider.ts @@ -1,7 +1,7 @@ import { CACHE_HEADERS } from '../constants/cache'; -import { R2_RETRY_LIMIT } from '../constants/limits'; +import { R2_RETRY_LIMIT } from '../../common/limits.mjs'; import CACHED_DIRECTORIES from '../constants/cachedDirectories.json' assert { type: 'json' }; -import contentTypeOverrides from '../constants/contentTypeOverrides.json' assert { type: 'json' }; +import { CONTENT_TYPE_OVERRIDES } from '../constants/contentTypeOverrides'; import fileSymlinks from '../constants/fileSymlinks.json' assert { type: 'json' }; import type { Context } from '../context'; import { objectHasBody } from '../utils/object'; @@ -12,10 +12,10 @@ import type { HeadFileResult, HttpResponseHeaders, Provider, - ReadDirectoryOptions, ReadDirectoryResult, } from './provider'; import { S3Provider } from './s3Provider'; +import { KvProvider } from './kvProvider'; type CachedFile = { name: string; @@ -107,10 +107,15 @@ export class R2Provider implements Provider { }; } - readDirectory( - path: string, - options?: ReadDirectoryOptions - ): Promise { + readDirectory(path: string): Promise { + if (this.ctx.env.USE_KV) { + const kvProvider = new KvProvider({ + ctx: this.ctx, + }); + + return kvProvider.readDirectory(path); + } + if (path in CACHED_DIRECTORIES) { const result: CachedDirectory = CACHED_DIRECTORIES[path as keyof typeof CACHED_DIRECTORIES]; @@ -137,7 +142,7 @@ export class R2Provider implements Provider { ctx: this.ctx, }); - return s3Provider.readDirectory(path, options); + return s3Provider.readDirectory(path); } } @@ -150,7 +155,9 @@ function r2MetadataToHeaders( const fileExtension = object.key.substring(object.key.lastIndexOf('.') + 1); const contentType = - contentTypeOverrides[fileExtension as keyof typeof contentTypeOverrides] ?? + CONTENT_TYPE_OVERRIDES[ + fileExtension as keyof typeof CONTENT_TYPE_OVERRIDES + ] ?? object.httpMetadata?.contentType ?? 'application/octet-stream'; diff --git a/src/providers/s3Provider.ts b/src/providers/s3Provider.ts index c4ba7636..a5e260af 100644 --- a/src/providers/s3Provider.ts +++ b/src/providers/s3Provider.ts @@ -1,18 +1,14 @@ -import { ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3'; +import { S3Client } from '@aws-sdk/client-s3'; import { FetchHttpHandler } from '@smithy/fetch-http-handler'; import type { Context } from '../context'; import type { - File, GetFileOptions, GetFileResult, HeadFileResult, Provider, - ReadDirectoryOptions, ReadDirectoryResult, } from './provider'; -import { retryWrapper } from '../utils/provider'; -import { R2_RETRY_LIMIT, S3_MAX_KEYS } from '../constants/limits'; -import { INDEX_HTML_REGEX } from '../constants/files'; +import { listR2Directory } from '../../common/listR2Directory.mjs'; type S3ProviderCtorOptions = { ctx: Context; @@ -26,13 +22,13 @@ type S3ProviderCtorOptions = { * directories. Using the S3 api was the recommended fix from the R2 team. */ export class S3Provider implements Provider { - private ctx: Context; - private client: S3Client; + #ctx: Context; + #client: S3Client; constructor({ ctx }: S3ProviderCtorOptions) { - this.ctx = ctx; + this.#ctx = ctx; - this.client = new S3Client({ + this.#client = new S3Client({ region: 'auto', endpoint: ctx.env.S3_ENDPOINT, credentials: { @@ -54,77 +50,13 @@ export class S3Provider implements Provider { throw new Error('Method not implemented.'); } - async readDirectory( - path: string, - options?: ReadDirectoryOptions - ): Promise { - const directories = new Set(); - const files: File[] = []; + async readDirectory(path: string): Promise { + const result = await listR2Directory( + this.#client, + this.#ctx.env.BUCKET_NAME, + path + ); - // If false, don't include any files we find in the result. Defaults to - // true since there are more cases we want the files than not - const listFiles = options !== undefined ? options.listFiles : true; - - let hasIndexHtmlFile = false; - - let isTruncated = true; - let cursor: string | undefined; - let lastModified: Date = new Date(0); - while (isTruncated) { - const result = await retryWrapper( - async () => { - return this.client.send( - new ListObjectsV2Command({ - Bucket: this.ctx.env.BUCKET_NAME, - Prefix: path, - Delimiter: '/', - MaxKeys: S3_MAX_KEYS, - ContinuationToken: cursor, - }) - ); - }, - R2_RETRY_LIMIT, - this.ctx.sentry - ); - - result.CommonPrefixes?.forEach(directory => { - directories.add(directory.Prefix!.substring(path.length)); - }); - - if (listFiles) { - result.Contents?.forEach(object => { - if (INDEX_HTML_REGEX.test(object.Key!)) { - hasIndexHtmlFile = true; - } - - files.push({ - name: object.Key!.substring(path.length), - size: object.Size!, - lastModified: object.LastModified!, - }); - - if ( - lastModified === undefined || - object.LastModified! > lastModified - ) { - lastModified = object.LastModified!; - } - }); - } - - isTruncated = result.IsTruncated ?? false; - cursor = result.NextContinuationToken; - } - - if (directories.size === 0 && files.length === 0) { - return undefined; - } - - return { - subdirectories: Array.from(directories), - hasIndexHtmlFile, - files, - lastModified, - }; + return result; } } diff --git a/vitest-setup.ts b/vitest-setup.ts index b13c8493..22631200 100644 --- a/vitest-setup.ts +++ b/vitest-setup.ts @@ -1,7 +1,7 @@ // This file is used to setup things before we switch into a wranglerd isolate. // It is ran in Node.js and has access to Node's apis. -import { dirname, join } from 'node:path'; +import { join, relative } from 'node:path'; import { readdir, readFile, stat } from 'node:fs/promises'; import type { TestProject } from 'vitest/node'; @@ -33,7 +33,7 @@ export interface Directory { async function listDirectory(directoryPath: string): Promise { const directory: Directory = { - name: dirname(directoryPath), + name: relative(DEV_BUCKET_PATH, directoryPath), subdirectories: {}, files: {}, }; diff --git a/vitest.config.ts b/vitest.config.ts index dc2e3046..8c6cda84 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -10,6 +10,7 @@ export default defineWorkersConfig({ }, miniflare: { r2Buckets: ['R2_BUCKET'], + kvNamespaces: ['DIRECTORY_CACHE'], }, }, }, diff --git a/wrangler.jsonc b/wrangler.jsonc index ac12dda9..90d4fd7d 100644 --- a/wrangler.jsonc +++ b/wrangler.jsonc @@ -28,6 +28,7 @@ "workers_dev": true, "ENVIRONMENT": "staging", "CACHING": true, + "USE_KV": true, "LOG_ERRORS": false, "S3_ENDPOINT": "https://07be8d2fbc940503ca1be344714cb0d1.r2.cloudflarestorage.com", "BUCKET_NAME": "dist-prod", @@ -40,6 +41,12 @@ "bucket_name": "dist-prod", }, ], + "kv_namespaces": [ + { + "binding": "DIRECTORY_CACHE", + "id": "TODO", + }, + ], }, "prod": { "account_id": "07be8d2fbc940503ca1be344714cb0d1", @@ -64,6 +71,12 @@ "bucket_name": "dist-prod", }, ], + "kv_namespaces": [ + { + "binding": "DIRECTORY_CACHE", + "id": "TODO", + }, + ], }, }, }