diff --git a/.dockerignore b/.dockerignore index 03faf034..6c9d86bd 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,11 +1,12 @@ * -# Project files -!/bun.lock -!/bunfig.toml +!/lib/** +!/src/** +!/.npmrc +!/deno.json +!/deno.lock !/LICENSE +!/mise.toml !/package.json -!/tsconfig.json - -# SRC -!/src/** \ No newline at end of file +!/rolldown.config.ts +!/tsconfig.json \ No newline at end of file diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..1d10a882 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,22 @@ +# https://spec.editorconfig.org/#supported-pairs + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +max_line_length = 120 +tab_width = 2 +trim_trailing_whitespace = true + +[{*.json,*.jsonc}] +insert_final_newline = false + +[{*.yaml,*.yml}] +insert_final_newline = false + +[*.html] +insert_final_newline = false diff --git a/.env.example b/.env.example index 7ca898c3..e45eff73 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,5 @@ -#? Rename or copy this file to ".env" and set the variables there. +#? +#? Rename this file to ".env" and edit the values as needed. #? #?#################### #? VARIABLE STRUCTURE: @@ -6,35 +7,65 @@ #? [ default ] : type < min - max > #? ^ ^ ^ #? | | | -#? | | +---- RANGE between two values (these included) +#? | | +---- RANGE between two values (inclusive) #? | +-------------- TYPE of the variable -#? +------------------------ DEFAULT value applied if not set +#? +------------------------ DEFAULT value if not set #? #?################### #? COMMENT STRUCTURE: #?################### -#? "#?#..." or "###..." are used to comment a section line. -#? "#?" is used to comment a help line. -#? "##" is used to comment a description line. -#? "#" is used to comment a variable line. +#? "#?#...", "###..." for section headers +#? "#?" for help +#? "##" for description +#? "#" for variable definitions #? -#? You should remove the comment on variable lines only if you want to set the variable. -########## -## SERVER: -########## -## Set log verbosity [3]:integer -#? (0=none <- 1=error <- 2=warn <- 3=info <- 4=debug) -#LOGLEVEL=3 +## Log level: [3]:integer<0-4> +#? 0=none, 1=error, 2=warn, 3=info, 4=debug +#JSPB_LOG_VERBOSITY=3 + +## Include timestamps in logs?: [true]:boolean +#JSPB_LOG_TIME=true -## Port for the server [4000]:integer -#PORT=4000 +## Hostname to bind: [::]:string +#JSPB_HOSTNAME=:: -## Is website served over HTTPS? [true]:boolean -#TLS=true +## Port to bind: [4000]:integer<0-65535> +#JSPB_PORT=4000 ############ ## DOCUMENT: ############ -## Maximum document size in kilobytes [1024]:integer -#DOCUMENT_MAXSIZE=1024 \ No newline at end of file +## Maximum size per document: [1mb]:string +#? 0=disabled, units: b/k(i)b/m(i)b/g(i)b/t(i)b +#JSPB_DOCUMENT_SIZE=1mb + +## Compress document?: [true]:boolean +#? It doesn't apply retroactively to existing documents. +#JSPB_DOCUMENT_COMPRESSION=true + +## Delete documents older than: [0]:string +#? 0=disabled, units: s/m/h/d/w/M/y +#JSPB_DOCUMENT_AGE=0 + +## Delete anonymous documents older than: [7d]:string +#? 0=disabled, units: s/m/h/d/w/M/y +#JSPB_DOCUMENT_ANONYMOUS_AGE=7d + +######## +## USER: +######## +## Allow user registration?: [true]:boolean +#? Root user can always create new users. +#JSPB_USER_REGISTER=true + +## Restore the root user?: [false]:boolean +#? Make sure to disable this again after successful recovery. +#JSPB_USER_ROOT_RECOVERY=false + +######## +## TASK: +######## +## Cleanup task cron schedule: [0 1 * * *]:string +#? https://crontab.guru/#0_1_*_*_* +#JSPB_TASK_SWEEPER=0 1 * * * diff --git a/.github/renovate.json b/.github/renovate.json index ba56533d..95ed7709 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -1,14 +1,14 @@ { - "$schema": "https://docs.renovatebot.com/renovate-schema.json", - "extends": ["config:recommended"], - "lockFileMaintenance": { - "enabled": true, - "automerge": true - }, - "packageRules": [ - { - "matchUpdateTypes": ["patch"], - "automerge": true - } - ] + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": ["config:recommended", "customManagers:biomeVersions"], + "lockFileMaintenance": { + "enabled": true, + "automerge": true + }, + "packageRules": [ + { + "matchUpdateTypes": ["patch"], + "automerge": true + } + ] } diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index a20ae755..47e5babc 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -23,7 +23,7 @@ on: - build-release concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }} cancel-in-progress: false permissions: @@ -41,18 +41,18 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 + uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0 with: - egress-policy: audit + egress-policy: "audit" - - name: Setup Bun - uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2.0.1 + - name: Setup mise-en-place + uses: jdx/mise-action@146a28175021df8ca24f8ee1828cc2a60f980bd5 # v3.5.1 - name: Save context id: ctx env: - CTX_BRANCH: ${{ github.head_ref || github.ref_name }} - CTX_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + CTX_BRANCH: "${{ github.head_ref || github.ref_name }}" + CTX_SHA: "${{ github.event.pull_request.head.sha || github.sha }}" run: | echo "branch=${CTX_BRANCH}" >>"$GITHUB_OUTPUT" echo "sha=${CTX_SHA}" >>"$GITHUB_OUTPUT" @@ -61,8 +61,8 @@ jobs: - name: Save tags id: tags env: - BRANCH: ${{ steps.ctx.outputs.branch }} - SHA_SHORT: ${{ steps.ctx.outputs.sha_short }} + BRANCH: "${{ steps.ctx.outputs.branch }}" + SHA_SHORT: "${{ steps.ctx.outputs.sha_short }}" run: | TIMESTAMP="$(date +%Y.%m.%d)" @@ -76,61 +76,46 @@ jobs: echo "extended=${TIMESTAMP}-${SHA_SHORT}" >>"$GITHUB_OUTPUT" - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: - persist-credentials: false - - - name: Install deps - run: bun install --frozen-lockfile + persist-credentials: "false" - name: Build artifact run: | - bun run build:server - - bun run build:standalone:darwin-arm64 - chmod 755 ./dist/server - tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ server | xz -z -6 >./dist/backend_${{ steps.tags.outputs.tag }}_darwin-arm64.tar.xz - tar -tJf ./dist/backend_${{ steps.tags.outputs.tag }}_darwin-arm64.tar.xz >/dev/null - - bun run build:standalone:linux-amd64-glibc - chmod 755 ./dist/server - tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ server | xz -z -6 >./dist/backend_${{ steps.tags.outputs.tag }}_linux-amd64-glibc.tar.xz - tar -tJf ./dist/backend_${{ steps.tags.outputs.tag }}_linux-amd64-glibc.tar.xz >/dev/null - - bun run build:standalone:linux-amd64-musl - chmod 755 ./dist/server - tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ server | xz -z -6 >./dist/backend_${{ steps.tags.outputs.tag }}_linux-amd64-musl.tar.xz - tar -tJf ./dist/backend_${{ steps.tags.outputs.tag }}_linux-amd64-musl.tar.xz >/dev/null - - bun run build:standalone:linux-arm64-glibc - chmod 755 ./dist/server - tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ server | xz -z -6 >./dist/backend_${{ steps.tags.outputs.tag }}_linux-arm64-glibc.tar.xz - tar -tJf ./dist/backend_${{ steps.tags.outputs.tag }}_linux-arm64-glibc.tar.xz >/dev/null - - bun run build:standalone:linux-arm64-musl - chmod 755 ./dist/server - tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ server | xz -z -6 >./dist/backend_${{ steps.tags.outputs.tag }}_linux-arm64-musl.tar.xz - tar -tJf ./dist/backend_${{ steps.tags.outputs.tag }}_linux-arm64-musl.tar.xz >/dev/null - - bun run build:standalone:windows-amd64 - chmod 755 ./dist/server.exe - zip -j -X -9 -l -o ./dist/backend_${{ steps.tags.outputs.tag }}_windows-amd64.zip .env.example LICENSE README.md ./dist/server.exe - zip -T ./dist/backend_${{ steps.tags.outputs.tag }}_windows-amd64.zip + mise run build:standalone:darwin-arm64 + chmod 755 ./dist/backend.darwin-arm64 + tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ backend.darwin-arm64 | xz -z -6 >./dist/backend-${{ steps.tags.outputs.tag }}_darwin-arm64.tar.xz + tar -tJf ./dist/backend-${{ steps.tags.outputs.tag }}_darwin-arm64.tar.xz >/dev/null + + mise run build:standalone:linux-amd64 + chmod 755 ./dist/backend.linux-amd64 + tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ backend.linux-amd64 | xz -z -6 >./dist/backend-${{ steps.tags.outputs.tag }}_linux-amd64.tar.xz + tar -tJf ./dist/backend-${{ steps.tags.outputs.tag }}_linux-amd64.tar.xz >/dev/null + + mise run build:standalone:linux-arm64 + chmod 755 ./dist/backend.linux-arm64 + tar -c --owner=0 --group=0 --mtime='now' --utc .env.example LICENSE README.md -C ./dist/ backend.linux-arm64 | xz -z -6 >./dist/backend-${{ steps.tags.outputs.tag }}_linux-arm64.tar.xz + tar -tJf ./dist/backend-${{ steps.tags.outputs.tag }}_linux-arm64.tar.xz >/dev/null + + mise run build:standalone:windows-amd64 + chmod 755 ./dist/backend.windows-amd64.exe + zip -j -X -9 -l -o ./dist/backend-${{ steps.tags.outputs.tag }}_windows-amd64.zip .env.example LICENSE README.md ./dist/backend.windows-amd64.exe + zip -T ./dist/backend-${{ steps.tags.outputs.tag }}_windows-amd64.zip - if: inputs.artifact-action == 'build-release' name: Release artifact - uses: ncipollo/release-action@440c8c1cb0ed28b9f43e4d1d670870f059653174 # v1.16.0 + uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0 with: - name: ${{ steps.tags.outputs.extended }} - tag: ${{ steps.tags.outputs.extended }} - artifacts: dist/*.tar.xz,dist/*.zip - makeLatest: true - prerelease: ${{ steps.ctx.outputs.branch != 'stable' }} - generateReleaseNotes: ${{ steps.ctx.outputs.branch == 'stable' }} + name: "${{ steps.tags.outputs.extended }}" + tag: "${{ steps.tags.outputs.extended }}" + artifacts: "dist/*.tar.xz,dist/*.zip" + makeLatest: "true" + prerelease: "${{ steps.ctx.outputs.branch != 'stable' }}" + generateReleaseNotes: "${{ steps.ctx.outputs.branch == 'stable' }}" - if: inputs.artifact-action == 'build-release' name: Attest artifact - uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: subject-path: | dist/*.tar.xz @@ -150,15 +135,15 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 + uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0 with: - egress-policy: audit + egress-policy: "audit" - name: Save context id: ctx env: - CTX_BRANCH: ${{ github.head_ref || github.ref_name }} - CTX_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + CTX_BRANCH: "${{ github.head_ref || github.ref_name }}" + CTX_SHA: "${{ github.event.pull_request.head.sha || github.sha }}" run: | echo "branch=${CTX_BRANCH}" >>"$GITHUB_OUTPUT" echo "sha=${CTX_SHA}" >>"$GITHUB_OUTPUT" @@ -167,9 +152,9 @@ jobs: - name: Save tags id: tags env: - BRANCH: ${{ steps.ctx.outputs.branch }} - SHA: ${{ steps.ctx.outputs.sha }} - SHA_SHORT: ${{ steps.ctx.outputs.sha_short }} + BRANCH: "${{ steps.ctx.outputs.branch }}" + SHA: "${{ steps.ctx.outputs.sha }}" + SHA_SHORT: "${{ steps.ctx.outputs.sha_short }}" run: | TIMESTAMP="$(date +%Y.%m.%d)" TIMESTAMP_ISO="$(date -u +%Y-%m-%dT%H:%M:%SZ)" @@ -189,20 +174,20 @@ jobs: echo "list=${TAGS[*]}" >>"$GITHUB_OUTPUT" - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: - persist-credentials: false + persist-credentials: "false" - name: Build image id: build-image uses: redhat-actions/buildah-build@7a95fa7ee0f02d552a32753e7414641a04307056 # v2.13 with: - containerfiles: Dockerfile - platforms: linux/amd64,linux/arm64 - image: ${{ github.repository }} - layers: true - oci: true - tags: ${{ steps.tags.outputs.list }} + containerfiles: "Dockerfile" + platforms: "linux/amd64,linux/arm64" + image: "${{ github.repository }}" + layers: "true" + oci: "true" + tags: "${{ steps.tags.outputs.list }}" extra-args: | --squash --identity-label=false @@ -214,23 +199,23 @@ jobs: name: Login to GHCR uses: redhat-actions/podman-login@4934294ad0449894bcd1e9f191899d7292469603 # v1.7 with: - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - registry: ${{ env.REGISTRY }} + username: "${{ github.repository_owner }}" + password: "${{ secrets.GITHUB_TOKEN }}" + registry: "${{ env.REGISTRY }}" - if: inputs.image-action == 'build-release' name: Push to GHCR id: push-image uses: redhat-actions/push-to-registry@5ed88d269cf581ea9ef6dd6806d01562096bee9c # v2.8 with: - image: ${{ steps.build-image.outputs.image }} - tags: ${{ steps.build-image.outputs.tags }} - registry: ${{ env.REGISTRY }} + image: "${{ steps.build-image.outputs.image }}" + tags: "${{ steps.build-image.outputs.tags }}" + registry: "${{ env.REGISTRY }}" - if: inputs.image-action == 'build-release' name: Attest image - uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: subject-name: "${{ env.REGISTRY }}/${{ steps.build-image.outputs.image }}" - subject-digest: ${{ steps.push-image.outputs.digest }} - push-to-registry: false + subject-digest: "${{ steps.push-image.outputs.digest }}" + push-to-registry: "false" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b2b17cf..7b91f662 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: - '*.md' concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }} cancel-in-progress: false permissions: @@ -23,42 +23,43 @@ jobs: branch: ${{ steps.ctx.outputs.branch }} sha: ${{ steps.ctx.outputs.sha }} sha_short: ${{ steps.ctx.outputs.sha_short }} + steps: - name: Harden Runner - uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 + uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0 with: - egress-policy: audit + egress-policy: "audit" - name: Save context information id: ctx env: - CTX_BRANCH: ${{ github.head_ref || github.ref_name }} - CTX_SHA: ${{ github.event.pull_request.head.sha || github.sha }} + CTX_BRANCH: "${{ github.head_ref || github.ref_name }}" + CTX_SHA: "${{ github.event.pull_request.head.sha || github.sha }}" run: | echo "branch=${CTX_BRANCH}" >>"$GITHUB_OUTPUT" echo "sha=${CTX_SHA}" >>"$GITHUB_OUTPUT" echo "sha_short=${CTX_SHA::7}" >>"$GITHUB_OUTPUT" - - name: Setup Bun - uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2.0.1 - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: - persist-credentials: false + persist-credentials: "false" - - name: Install deps - run: bun install --frozen-lockfile + - name: Setup mise-en-place + uses: jdx/mise-action@146a28175021df8ca24f8ee1828cc2a60f980bd5 # v3.5.1 - name: Run lint - run: bun run lint + run: mise run lint + + - name: Run tests + run: mise run test - name: Build server - run: bun run build:server + run: mise run build - - name: Test run server + - name: Run server run: | - bun run start:server & + mise run start:server & SERVER_PID=$! - sleep 10 + sleep 5 kill $SERVER_PID diff --git a/.gitignore b/.gitignore index 931c7046..e1a4075e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,30 +1,28 @@ * -# Project files +!/.github/ +!/.github/renovate.json +!/.github/workflows/*.yml +!/.zed/ +!/.zed/settings.json +!/lib/ +!/lib/** +!/src/ +!/src/** +!/.dockerignore +!/.editorconfig !/.env.example +!/.gitattributes +!/.gitignore +!/.npmrc !/biome.json -!/bun.lock -!/bunfig.toml !/CONTRIBUTING.md -!/lefthook.json +!/deno.json +!/deno.lock +!/Dockerfile !/LICENSE +!/mise.toml !/package.json !/README.md -!/tsconfig.json - -# SRC -!/src/ -!/src/** - -# GIT -!/.gitattributes -!/.gitignore - -# GitHub -!/.github/ -!/.github/** -!/.github/workflows/*.yml - -# Docker -!/.dockerignore -!/Dockerfile \ No newline at end of file +!/rolldown.config.ts +!/tsconfig.json \ No newline at end of file diff --git a/.zed/settings.json b/.zed/settings.json new file mode 100644 index 00000000..f87bbd2e --- /dev/null +++ b/.zed/settings.json @@ -0,0 +1,53 @@ +// -*- mode: jsonc -*- + +{ + // lsp + "lsp": { + "deno": { + "settings": { + "deno": { + "enable": true + } + } + }, + "biome": { + "settings": { + "require_config_file": true, + "config_path": "./biome.json" + } + }, + "yaml-language-server": { + "settings": { + "yaml": { + "keyOrdering": false, + "format": { + "singleQuote": false + } + } + } + } + }, + + // language specific (overrides global) + "languages": { + "JavaScript": { + "formatter": "language_server", + "language_servers": ["biome", "deno"] + }, + "TypeScript": { + "formatter": "language_server", + "language_servers": ["biome", "deno"] + }, + "JSON": { + "language_servers": ["biome", "json-language-server"], + "formatter": "language_server" + }, + "JSONC": { + "language_servers": ["biome", "json-language-server"], + "formatter": "language_server" + }, + "YAML": { + "format_on_save": "off" + } + } +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a4969752..282485a8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,25 +1,97 @@ -### API +## Dependencies + +Everything is managed by..: + +- [mise-en-place](https://mise.jdx.dev/installing-mise.html) + +After install you need to trust the project..: + +```shell +mise trust +``` + +## Scripts + +The project uses `mise` to manage scripts. To list all available scripts..: + +```shell +mise run +``` + +Scripts are grouped, meaning that a script such as `mise run build` will run +other scripts under its name to fulfil its function, in this case building the +backend and compiling the server. + +This may not be desired in every case, so it is recommended that scripts be run +in a more granular way..: + +```shell +# Build and start +mise run build:server start:server + +# Better, we can run the development server +mise run start:dev +``` + +All scripts will run from any location within the project as if you were in the +main directory, no fear. + +## Build + +Building the Backend is very straightforward..: + +```shell +mise run build +``` + +It will prepare the server bundle ready to be run in `dist/backend.js`. + +You can also build a standalone binary for different platforms..: + +```shell +# Build the server bundle +mise run build:server + +# Build standalone binary for current platform (requires server bundle) +mise run build:standalone + +# ...or other platforms (requires server bundle) +mise run build:standalone:linux-amd64 +mise run build:standalone:linux-arm64 +mise run build:standalone:darwin-arm64 +mise run build:standalone:windows-amd64 +``` + +## API The API is documented under OpenAPI specification and can be found at the following path: ```shell -/:apipath/oas.json +/api/oas.json ``` -There are several ways to interact with the API, we will cover its use with [Scalar](https://scalar.com). +You can get a quick overview with: +- [Swagger Editor](https://editor.swagger.io/?url=https://jspaste.eu/api/oas.json) +- [Scalar Client](https://client.scalar.com/?url=https://jspaste.eu/api/oas.json) -We recommend using the desktop application, however, -you can also use the [web-based environment](https://client.scalar.com). (you may need to disable the CORS Proxy) +If using Scalar Client, disable the CORS proxy and follow these steps to import the +instance `oas.json`..: -Follow these steps to import the instance's `oas.json` to Scalar..: +![](https://static.x.inetol.net/jspaste/backend/scalar-t1.webp) -![](https://static.x.inetol.net/jspaste/backend/scalar-t1.gif) +## Maintenance -### Maintenance +If you want to clear the entire project of dependencies and build remnants..: -Over time, local repositories can become messy with untracked files, registered hooks, and temporary files in the .git -folder. To clean up the repository (and possibly all your uncommitted work), run the following command..: +```shell +mise run clean +``` + +Over time, local repositories can become messy with untracked files, registered +hooks, and temporary files in the .git folder. To clean up the repository (and +possibly all your uncommitted work), run the following command..: ```shell -bun run clean:git:all -``` \ No newline at end of file +# Careful with this one! +mise run clean:git +``` diff --git a/Dockerfile b/Dockerfile index aa22e5e6..a52d8042 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,58 @@ -FROM --platform=$BUILDPLATFORM docker.io/oven/bun:1-alpine AS builder-standalone +FROM --platform=$BUILDPLATFORM cgr.dev/chainguard/glibc-dynamic:latest-dev AS builder +USER root + +RUN set -euxo pipefail; \ + wget -qO- https://mise.run | sh; \ + ln -s $HOME/.local/bin/mise /usr/bin/mise WORKDIR /build/ COPY . ./ -RUN bun install --frozen-lockfile \ - && bun run build:server +RUN set -euxo pipefail; \ + mise trust; \ + GITHUB_ACTIONS=true mise run build:server -RUN addgroup jspaste \ - && adduser -G jspaste -u 7777 -s /bin/false -D jspaste \ - && grep jspaste /etc/passwd > /tmp/.backend.passwd +RUN echo "root:x:0:root" >/tmp/.group \ + && echo "root:x:0:0:root:/backend:/bin/ash" >/tmp/.passwd \ + && echo "jspaste:x:7777:jspaste" >>/tmp/.group \ + && echo "jspaste:x:7777:7777:jspaste:/backend:/bin/ash" >>/tmp/.passwd ARG TARGETOS ARG TARGETARCH -RUN bun run build:standalone - -FROM --platform=$BUILDPLATFORM docker.io/library/alpine:3.21 +RUN set -euxo pipefail; \ + mise run build:standalone -RUN apk add --no-cache libstdc++ +FROM --platform=$BUILDPLATFORM scratch AS dist -COPY --from=builder-standalone /tmp/.backend.passwd /etc/passwd -COPY --from=builder-standalone /etc/group /etc/group +COPY --from=builder /tmp/.passwd /etc/passwd +COPY --from=builder /tmp/.group /etc/group +COPY --chown=root:root --from=cgr.dev/chainguard/wolfi-base:latest / / +COPY --chown=root:root --from=builder /tmp/.passwd /etc/passwd +COPY --chown=root:root --from=builder /tmp/.group /etc/group +RUN rm -rf /home/ -WORKDIR /backend/ -COPY --chown=jspaste:jspaste --from=builder-standalone /build/dist/server ./ -COPY --chown=jspaste:jspaste --from=builder-standalone /build/LICENSE ./ +COPY --chown=7777:7777 --from=builder /build/dist/backend /backend/server +COPY --chown=7777:7777 --from=builder /build/LICENSE /backend/ LABEL org.opencontainers.image.created="0001-01-01T00:00:00Z" \ - org.opencontainers.image.description="JSPaste Backend" \ - org.opencontainers.image.licenses="EUPL-1.2" \ - org.opencontainers.image.revision="unspecified" \ - org.opencontainers.image.source="https://github.com/jspaste/backend" \ - org.opencontainers.image.title="jspaste-backend" \ - org.opencontainers.image.url="https://github.com/jspaste/backend" \ - org.opencontainers.image.version="unspecified" + org.opencontainers.image.description="JSPaste Backend" \ + org.opencontainers.image.licenses="EUPL-1.2" \ + org.opencontainers.image.revision="unspecified" \ + org.opencontainers.image.source="https://github.com/jspaste/backend" \ + org.opencontainers.image.title="jspaste-backend" \ + org.opencontainers.image.url="https://github.com/jspaste/backend" \ + org.opencontainers.image.version="unspecified" + +ENV PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \ + SSL_CERT_DIR="/etc/ssl/certs" \ + SSL_CERT_FILE="/etc/ssl/certs/ca-certificates.crt" \ + HISTFILE="/dev/null" \ + STORAGE_PATH="/backend/storage/" EXPOSE 4000 -VOLUME /backend/storage/ +VOLUME $STORAGE_PATH -USER jspaste:jspaste - -ENTRYPOINT ["/backend/server"] \ No newline at end of file +WORKDIR /backend/ +ENTRYPOINT ["/backend/server"] diff --git a/README.md b/README.md index 7183c5a4..0251763e 100644 --- a/README.md +++ b/README.md @@ -11,13 +11,13 @@ Linux & macOS: ```shell -./server +./backend.- ``` Windows: ```powershell -powershell -c ".\server.exe" +powershell -c ".\backend.windows-.exe" ``` ### Container @@ -33,11 +33,11 @@ docker run --env-file=.env -d -p 127.0.0.1:4000:4000 \ ## Validate > [!IMPORTANT] -> All artifacts and images originate from GitHub `JSPaste/Backend` repository, no other artifacts or -> images built and distributed outside that repository are considered secure nor trusted by the JSPaste team. +> All artifacts and images originate from GitHub `JSPaste/Backend` repository, no other artifacts or images built and +> distributed outside that repository are considered secure nor trusted by the JSPaste team. -You can verify the integrity and origin of an artifact and/or image using the GitHub CLI or manually -at [JSPaste Attestations](https://github.com/jspaste/backend/attestations). +You can verify the integrity and origin of an artifact and/or image using the GitHub CLI or manually at +[JSPaste Attestations](https://github.com/jspaste/backend/attestations). Artifacts are attested and can be verified using the following command: diff --git a/biome.json b/biome.json index d6ca3679..f7f2d7ff 100644 --- a/biome.json +++ b/biome.json @@ -1,56 +1,165 @@ { - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", - "files": { - "ignore": ["./dist/**", "./storage/**", "*.spec.ts"], - "ignoreUnknown": true - }, - "formatter": { - "enabled": true, - "formatWithErrors": false, - "indentStyle": "tab", - "indentWidth": 4, - "lineEnding": "lf", - "lineWidth": 120 - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true, - "complexity": { - "noStaticOnlyClass": "off" - }, - "style": { - "noParameterAssign": "off" - }, - "suspicious": { - "noConsoleLog": "error" - } - } - }, - "css": { - "formatter": { - "enabled": true - } - }, - "javascript": { - "formatter": { - "arrowParentheses": "always", - "bracketSameLine": false, - "bracketSpacing": true, - "enabled": true, - "jsxQuoteStyle": "single", - "quoteProperties": "asNeeded", - "quoteStyle": "single", - "semicolons": "always", - "trailingCommas": "none" - } - }, - "json": { - "formatter": { - "enabled": true - } - }, - "organizeImports": { - "enabled": true - } + "$schema": "https://biomejs.dev/schemas/2.3.11/schema.json", + "files": { + "ignoreUnknown": true, + "includes": ["**", "!!dist/*", "!node_modules/*", "!!storage/*"] + }, + "assist": { + "enabled": true, + "actions": { + "recommended": true, + "source": { + "useSortedAttributes": "on", + "useSortedProperties": "on" + } + } + }, + "formatter": { + "enabled": true, + "bracketSameLine": false, + "bracketSpacing": true, + "formatWithErrors": false, + "indentStyle": "space", + "indentWidth": 2, + "lineEnding": "lf", + "lineWidth": 120 + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true, + "complexity": { + "noForEach": "error", + "noImplicitCoercions": "error", + "useSimplifiedLogicExpression": "error" + }, + "correctness": { + "noGlobalDirnameFilename": "error", + "useImportExtensions": "error", + "useJsonImportAttributes": "error", + "useSingleJsDocAsterisk": "error" + }, + "nursery": { + "noContinue": "warn", + "noDeprecatedImports": "warn", + "noEqualsToNull": "warn", + "noFloatingPromises": "warn", + "noForIn": "warn", + "noImportCycles": "warn", + "noIncrementDecrement": "warn", + "noMisusedPromises": "warn", + "noMultiAssign": "warn", + "noMultiStr": "warn", + "noParametersOnlyUsedInRecursion": "warn", + "noReturnAssign": "warn", + "noUselessCatchBinding": "warn", + "noUselessUndefined": "warn", + "useAwaitThenable": "off", + "useDestructuring": "warn", + "useExhaustiveSwitchCases": "warn", + "useExplicitType": "off", + "useFind": "warn", + "useRegexpExec": "warn" + }, + "performance": { + "noAwaitInLoops": "error", + "noBarrelFile": "error", + "noDelete": "error", + "noNamespaceImport": "error", + "noReExportAll": "error", + "useTopLevelRegex": "error" + }, + "style": { + "noCommonJs": "error", + "noEnum": "error", + "noImplicitBoolean": "error", + "noInferrableTypes": "error", + "noNamespace": "error", + "noNegationElse": "error", + "noNestedTernary": "error", + "noParameterAssign": "error", + "noParameterProperties": "error", + "noRestrictedImports": { + "level": "error", + "options": { + "paths": { + "@hono/hono": { + "message": "Use `@hono/hono/tiny` instead", + "allowImportNames": ["Context"] + } + } + } + }, + "noSubstr": "error", + "noUnusedTemplateLiteral": "error", + "noUselessElse": "error", + "noYodaExpression": "error", + "useAsConstAssertion": "error", + "useAtIndex": "error", + "useCollapsedElseIf": "error", + "useCollapsedIf": "error", + "useConsistentArrayType": { + "level": "error", + "options": { + "syntax": "shorthand" + } + }, + "useConsistentBuiltinInstantiation": "error", + "useConsistentMemberAccessibility": { + "level": "error", + "options": { + "accessibility": "explicit" + } + }, + "useConsistentObjectDefinitions": { + "level": "error", + "options": { + "syntax": "explicit" + } + }, + "useConsistentTypeDefinitions": { + "level": "error", + "options": { + "style": "type" + } + }, + "useDefaultSwitchClause": "error", + "useExplicitLengthCheck": "error", + "useForOf": "error", + "useGroupedAccessorPairs": "error", + "useNumberNamespace": "error", + "useNumericSeparators": "error", + "useObjectSpread": "error", + "useReadonlyClassProperties": "error", + "useSelfClosingElements": "error", + "useShorthandAssign": "error", + "useSingleVarDeclarator": "error", + "useThrowNewError": "error", + "useThrowOnlyError": "error", + "useTrimStartEnd": "error", + "useUnifiedTypeSignatures": "error" + }, + "suspicious": { + "noConsole": "error", + "noAlert": "error", + "noConstantBinaryExpressions": "error", + "noEmptyBlockStatements": "error", + "noEvolvingTypes": "error", + "noUnassignedVariables": "error", + "noVar": "error", + "useNumberToFixedDigitsArgument": "error", + "useStaticResponseMethods": "error" + } + } + }, + "javascript": { + "formatter": { + "arrowParentheses": "always", + "jsxQuoteStyle": "double", + "quoteProperties": "asNeeded", + "quoteStyle": "double", + "semicolons": "always", + "trailingCommas": "none" + } + } } diff --git a/bun.lock b/bun.lock deleted file mode 100644 index 2b18e6fc..00000000 --- a/bun.lock +++ /dev/null @@ -1,116 +0,0 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "dependencies": { - "@hono/zod-openapi": "~0.19.0", - "env-var": "~7.5.0", - "hono": "~4.7.0", - }, - "devDependencies": { - "@biomejs/biome": "~1.9.0", - "@types/bun": "^1.2.0", - "lefthook": "~1.11.0", - "sort-package-json": "^3.0.0", - }, - "peerDependencies": { - "typescript": "~5.8.0", - }, - }, - }, - "trustedDependencies": [ - "@biomejs/biome", - "lefthook", - ], - "packages": { - "@asteasolutions/zod-to-openapi": ["@asteasolutions/zod-to-openapi@7.3.0", "", { "dependencies": { "openapi3-ts": "^4.1.2" }, "peerDependencies": { "zod": "^3.20.2" } }, "sha512-7tE/r1gXwMIvGnXVUdIqUhCU1RevEFC4Jk6Bussa0fk1ecbnnINkZzj1EOAJyE/M3AI25DnHT/zKQL1/FPFi8Q=="], - - "@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="], - - "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="], - - "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="], - - "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="], - - "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="], - - "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="], - - "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="], - - "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="], - - "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="], - - "@hono/zod-openapi": ["@hono/zod-openapi@0.19.2", "", { "dependencies": { "@asteasolutions/zod-to-openapi": "^7.1.0", "@hono/zod-validator": "^0.4.1" }, "peerDependencies": { "hono": ">=4.3.6", "zod": "3.*" } }, "sha512-lkFa6wdQVgY7d7/m++Ixr3hvKCF5Y+zjTIPM37fex5ylCfX53A/W28gZRDuFZx3aR+noKob7lHfwdk9dURLzxw=="], - - "@hono/zod-validator": ["@hono/zod-validator@0.4.3", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-xIgMYXDyJ4Hj6ekm9T9Y27s080Nl9NXHcJkOvkXPhubOLj8hZkOL8pDnnXfvCf5xEE8Q4oMFenQUZZREUY2gqQ=="], - - "@types/bun": ["@types/bun@1.2.5", "", { "dependencies": { "bun-types": "1.2.5" } }, "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg=="], - - "@types/node": ["@types/node@22.13.10", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw=="], - - "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], - - "bun-types": ["bun-types@1.2.5", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg=="], - - "detect-indent": ["detect-indent@7.0.1", "", {}, "sha512-Mc7QhQ8s+cLrnUfU/Ji94vG/r8M26m8f++vyres4ZoojaRDpZ1eSIh/EpzLNwlWuvzSZ3UbDFspjFvTDXe6e/g=="], - - "detect-newline": ["detect-newline@4.0.1", "", {}, "sha512-qE3Veg1YXzGHQhlA6jzebZN2qVf6NX+A7m7qlhCGG30dJixrAQhYOsJjsnBjJkCSmuOPpCk30145fr8FV0bzog=="], - - "env-var": ["env-var@7.5.0", "", {}, "sha512-mKZOzLRN0ETzau2W2QXefbFjo5EF4yWq28OyKb9ICdeNhHJlOE/pHHnz4hdYJ9cNZXcJHo5xN4OT4pzuSHSNvA=="], - - "fdir": ["fdir@6.4.3", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw=="], - - "get-stdin": ["get-stdin@9.0.0", "", {}, "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA=="], - - "git-hooks-list": ["git-hooks-list@3.2.0", "", {}, "sha512-ZHG9a1gEhUMX1TvGrLdyWb9kDopCBbTnI8z4JgRMYxsijWipgjSEYoPWqBuIB0DnRnvqlQSEeVmzpeuPm7NdFQ=="], - - "hono": ["hono@4.7.4", "", {}, "sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg=="], - - "is-plain-obj": ["is-plain-obj@4.1.0", "", {}, "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg=="], - - "lefthook": ["lefthook@1.11.3", "", { "optionalDependencies": { "lefthook-darwin-arm64": "1.11.3", "lefthook-darwin-x64": "1.11.3", "lefthook-freebsd-arm64": "1.11.3", "lefthook-freebsd-x64": "1.11.3", "lefthook-linux-arm64": "1.11.3", "lefthook-linux-x64": "1.11.3", "lefthook-openbsd-arm64": "1.11.3", "lefthook-openbsd-x64": "1.11.3", "lefthook-windows-arm64": "1.11.3", "lefthook-windows-x64": "1.11.3" }, "bin": { "lefthook": "bin/index.js" } }, "sha512-HJp37y62j3j8qzAOODWuUJl4ysLwsDvCTBV6odr3jIRHR/a5e+tI14VQGIBcpK9ysqC3pGWyW5Rp9Jv1YDubyw=="], - - "lefthook-darwin-arm64": ["lefthook-darwin-arm64@1.11.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IYzAOf8Qwqk7q+LoRyy7kSk9vzpUZ5wb/vLzEAH/F86Vay9AUaWe1f2pzeLwFg18qEc1QNklT69h9p/uLQMojA=="], - - "lefthook-darwin-x64": ["lefthook-darwin-x64@1.11.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-z/Wp7UMjE1Vyl+x9sjN3NvN6qKdwgHl+cDf98MKKDg/WyPE5XnzqLm9rLLJgImjyClfH7ptTfZxEyhTG3M3XvQ=="], - - "lefthook-freebsd-arm64": ["lefthook-freebsd-arm64@1.11.3", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-QevwQ7lrv5wBCkk7LLTzT5KR3Bk/5nttSxT1UH2o0EsgirS/c2K5xSgQmV6m3CiZYuCe2Pja4BSIwN3zt17SMw=="], - - "lefthook-freebsd-x64": ["lefthook-freebsd-x64@1.11.3", "", { "os": "freebsd", "cpu": "x64" }, "sha512-PYbcyNgdJJ4J2pEO9Ss4oYo5yq4vmQGTKm3RTYbRx4viSWR65hvKCP0C4LnIqspMvmR05SJi2bqe7UBP2t60EA=="], - - "lefthook-linux-arm64": ["lefthook-linux-arm64@1.11.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-0pBMBAoafOAEg345eOPozsmRjWR0zCr6k+m5ZxwRBZbZx1bQFDqBakQ3TpFCphhcykmgFyaa1KeZJZUOrEsezA=="], - - "lefthook-linux-x64": ["lefthook-linux-x64@1.11.3", "", { "os": "linux", "cpu": "x64" }, "sha512-eiezheZ/bisBCMB2Ur0mctug/RDFyu39B5wzoE8y4z0W1yw6jHGrWMJ4Y8+5qKZ7fmdZg+7YPuMHZ2eFxOnhQA=="], - - "lefthook-openbsd-arm64": ["lefthook-openbsd-arm64@1.11.3", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-DRLTzXdtCj/TizpLcGSqXcnrqvgxeXgn/6nqzclIGqNdKCsNXDzpI0D3sP13Vwwmyoqv2etoTak2IHqZiXZDqg=="], - - "lefthook-openbsd-x64": ["lefthook-openbsd-x64@1.11.3", "", { "os": "openbsd", "cpu": "x64" }, "sha512-l7om+ZjWpYrVZyDuElwnucZhEqa7YfwlRaKBenkBxEh2zMje8O6Zodeuma1KmyDbSFvnvEjARo/Ejiot4gLXEw=="], - - "lefthook-windows-arm64": ["lefthook-windows-arm64@1.11.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-X0iTrql2gfPAkU2dzRwuHWgW5RcqCPbzJtKQ41X6Y/F7iQacRknmuYUGyC81funSvzGAsvlusMVLUvaFjIKnbA=="], - - "lefthook-windows-x64": ["lefthook-windows-x64@1.11.3", "", { "os": "win32", "cpu": "x64" }, "sha512-F+ORMn6YJXoS0EXU5LtN1FgV4QX9rC9LucZEkRmK6sKmS7hcb9IHpyb7siRGytArYzJvXVjPbxPBNSBdN4egZQ=="], - - "openapi3-ts": ["openapi3-ts@4.4.0", "", { "dependencies": { "yaml": "^2.5.0" } }, "sha512-9asTNB9IkKEzWMcHmVZE7Ts3kC9G7AFHfs8i7caD8HbI76gEjdkId4z/AkP83xdZsH7PLAnnbl47qZkXuxpArw=="], - - "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], - - "semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="], - - "sort-object-keys": ["sort-object-keys@1.1.3", "", {}, "sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg=="], - - "sort-package-json": ["sort-package-json@3.0.0", "", { "dependencies": { "detect-indent": "^7.0.1", "detect-newline": "^4.0.1", "get-stdin": "^9.0.0", "git-hooks-list": "^3.0.0", "is-plain-obj": "^4.1.0", "semver": "^7.7.1", "sort-object-keys": "^1.1.3", "tinyglobby": "^0.2.12" }, "bin": { "sort-package-json": "cli.js" } }, "sha512-vfZWx4DnFNB8R9Vg4Dnx21s20auNzWH15ZaCBfADAiyrCwemRmhWstTgvLjMek1DW3+MHcNaqkp86giCF24rMA=="], - - "tinyglobby": ["tinyglobby@0.2.12", "", { "dependencies": { "fdir": "^6.4.3", "picomatch": "^4.0.2" } }, "sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww=="], - - "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - - "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], - - "yaml": ["yaml@2.7.0", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA=="], - - "zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], - } -} diff --git a/bunfig.toml b/bunfig.toml deleted file mode 100644 index 4f06890a..00000000 --- a/bunfig.toml +++ /dev/null @@ -1,9 +0,0 @@ -telemetry = false - -[install] -auto = "disable" -saveTextLockfile = true - -[run] -bun = true -silent = true \ No newline at end of file diff --git a/deno.json b/deno.json new file mode 100644 index 00000000..0687cade --- /dev/null +++ b/deno.json @@ -0,0 +1,47 @@ +{ + "$schema": "https://raw.githubusercontent.com/denoland/deno/main/cli/schemas/config-file.v1.json", + "license": "EUPL-1.2", + "lock": true, + "nodeModulesDir": "manual", + "unstable": ["cron", "temporal", "raw-imports"], + "allowScripts": [], + "imports": { + "#/": "./src/", + "#db/": "./src/database/", + "#document/": "./src/document/", + "#endpoint/": "./src/endpoints/", + "#http/": "./src/http/", + "#task/": "./src/tasks/", + "#util/": "./src/utils/", + "@deno/loader": "jsr:@deno/loader@~0.3.11", + "@hono/hono": "jsr:@hono/hono@^4.11.4", + "@hono/openapi": "npm:hono-openapi@^1.1.2", + "@hono/standard-validator": "jsr:@hono/standard-validator@~0.2.2", + "@std/assert": "jsr:@std/assert@^1.0.16", + "@std/async": "jsr:@std/async@^1.0.16", + "@std/cache": "jsr:@std/cache@~0.2.1", + "@std/collections": "jsr:@std/collections@^1.1.3", + "@std/dotenv": "jsr:@std/dotenv@~0.225.6", + "@std/encoding": "jsr:@std/encoding@^1.0.10", + "@std/fmt": "jsr:@std/fmt@^1.0.8", + "@std/fs": "jsr:@std/fs@^1.0.21", + "@std/path": "jsr:@std/path@^1.1.4", + "@std/streams": "jsr:@std/streams@^1.0.16", + "@std/ulid": "jsr:@std/ulid@^1.0.0", + "@types/node": "npm:@types/node@^25.0.8", + "arkenv": "npm:arkenv@~0.8.3", + "arktype": "npm:arktype@^2.1.29", + "biome": "npm:@biomejs/biome@2.3.11", + "hash-wasm": "npm:hash-wasm@^4.12.0", + "nanoid": "jsr:@sitnik/nanoid@^5.1.5", + "rolldown": "npm:rolldown@1.0.0-beta.60", + "vite-bundle-analyzer": "npm:vite-bundle-analyzer@^1.3.2" + }, + "fmt": { + "exclude": ["**"] + }, + "lint": { + "exclude": ["**"] + }, + "exclude": ["./dist/", "./node_modules/", "./storage/"] +} diff --git a/deno.lock b/deno.lock new file mode 100644 index 00000000..dc73d2ab --- /dev/null +++ b/deno.lock @@ -0,0 +1,410 @@ +{ + "version": "5", + "specifiers": { + "jsr:@deno/loader@~0.3.11": "0.3.11", + "jsr:@hono/hono@^4.11.4": "4.11.4", + "jsr:@hono/hono@^4.8.3": "4.11.4", + "jsr:@hono/standard-validator@~0.2.2": "0.2.2", + "jsr:@sitnik/nanoid@^5.1.5": "5.1.5", + "jsr:@standard-schema/spec@1": "1.1.0", + "jsr:@std/assert@^1.0.16": "1.0.16", + "jsr:@std/async@^1.0.16": "1.0.16", + "jsr:@std/bytes@^1.0.6": "1.0.6", + "jsr:@std/cache@~0.2.1": "0.2.1", + "jsr:@std/collections@^1.1.3": "1.1.3", + "jsr:@std/dotenv@~0.225.6": "0.225.6", + "jsr:@std/encoding@^1.0.10": "1.0.10", + "jsr:@std/fmt@^1.0.8": "1.0.8", + "jsr:@std/fs@^1.0.21": "1.0.21", + "jsr:@std/internal@^1.0.12": "1.0.12", + "jsr:@std/path@^1.1.4": "1.1.4", + "jsr:@std/streams@^1.0.16": "1.0.16", + "jsr:@std/ulid@1": "1.0.0", + "npm:@biomejs/biome@2.3.11": "2.3.11", + "npm:@types/node@^25.0.8": "25.0.8", + "npm:arkenv@~0.8.3": "0.8.3_arktype@2.1.29", + "npm:arktype@^2.1.29": "2.1.29", + "npm:hash-wasm@^4.12.0": "4.12.0", + "npm:hono-openapi@^1.1.2": "1.1.2_@standard-community+standard-json@0.3.5__@standard-schema+spec@1.1.0__@types+json-schema@7.0.15__arktype@2.1.29__quansync@0.2.11_@standard-community+standard-openapi@0.2.9__@standard-community+standard-json@0.3.5___@standard-schema+spec@1.1.0___@types+json-schema@7.0.15___arktype@2.1.29___quansync@0.2.11__@standard-schema+spec@1.1.0__arktype@2.1.29__openapi-types@12.1.3__@types+json-schema@7.0.15_@types+json-schema@7.0.15_openapi-types@12.1.3_arktype@2.1.29", + "npm:rolldown@1.0.0-beta.60": "1.0.0-beta.60", + "npm:vite-bundle-analyzer@^1.3.2": "1.3.2" + }, + "jsr": { + "@deno/loader@0.3.11": { + "integrity": "7c62f4f09cdfc34e66ba25b5a775a1830cbb5266b3e39f67b0f620c75484df8d" + }, + "@hono/hono@4.11.4": { + "integrity": "aaf7b9d5a6b2422b0778c091b712ee1f018bc7e82138067d21eb27d7c2e1f5be" + }, + "@hono/standard-validator@0.2.2": { + "integrity": "bc94e1ab41d677a571cb6dd5012823f1162b9856ca24dfd60233734824bb0b0c", + "dependencies": [ + "jsr:@hono/hono@^4.8.3", + "jsr:@standard-schema/spec" + ] + }, + "@sitnik/nanoid@5.1.5": { + "integrity": "55bd5f57087d67b1dcb7c1f4a07efdfe77a3ac57ca0af90f162c1f676ebf8f4b" + }, + "@standard-schema/spec@1.1.0": { + "integrity": "2ccd54513cd9c960bd155ab569b1a901bc99c6f9ad29559d3f38a28c91c1822d" + }, + "@std/assert@1.0.16": { + "integrity": "6a7272ed1eaa77defe76e5ff63ca705d9c495077e2d5fd0126d2b53fc5bd6532", + "dependencies": [ + "jsr:@std/internal" + ] + }, + "@std/async@1.0.16": { + "integrity": "6c9e43035313b67b5de43e2b3ee3eadb39a488a0a0a3143097f112e025d3ee9a" + }, + "@std/bytes@1.0.6": { + "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" + }, + "@std/cache@0.2.1": { + "integrity": "b6f1abfd118d35b1c4ca90f2b3f4c709a2014ae368f244bdc7533bf1c169d759" + }, + "@std/collections@1.1.3": { + "integrity": "bf8b0818886df6a32b64c7d3b037a425111f28278d69fd0995aeb62777c986b0" + }, + "@std/dotenv@0.225.6": { + "integrity": "1d6f9db72f565bd26790fa034c26e45ecb260b5245417be76c2279e5734c421b" + }, + "@std/encoding@1.0.10": { + "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" + }, + "@std/fmt@1.0.8": { + "integrity": "71e1fc498787e4434d213647a6e43e794af4fd393ef8f52062246e06f7e372b7" + }, + "@std/fs@1.0.21": { + "integrity": "d720fe1056d78d43065a4d6e0eeb2b19f34adb8a0bc7caf3a4dbf1d4178252cd", + "dependencies": [ + "jsr:@std/internal", + "jsr:@std/path" + ] + }, + "@std/internal@1.0.12": { + "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" + }, + "@std/path@1.1.4": { + "integrity": "1d2d43f39efb1b42f0b1882a25486647cb851481862dc7313390b2bb044314b5", + "dependencies": [ + "jsr:@std/internal" + ] + }, + "@std/streams@1.0.16": { + "integrity": "85030627befb1767c60d4f65cb30fa2f94af1d6ee6e5b2515b76157a542e89c4", + "dependencies": [ + "jsr:@std/bytes" + ] + }, + "@std/ulid@1.0.0": { + "integrity": "d41c3d27a907714413649fee864b7cde8d42ee68437d22b79d5de4f81d808780" + } + }, + "npm": { + "@ark/schema@0.56.0": { + "integrity": "sha512-ECg3hox/6Z/nLajxXqNhgPtNdHWC9zNsDyskwO28WinoFEnWow4IsERNz9AnXRhTZJnYIlAJ4uGn3nlLk65vZA==", + "dependencies": [ + "@ark/util" + ] + }, + "@ark/util@0.56.0": { + "integrity": "sha512-BghfRC8b9pNs3vBoDJhcta0/c1J1rsoS1+HgVUreMFPdhz/CRAKReAu57YEllNaSy98rWAdY1gE+gFup7OXpgA==" + }, + "@biomejs/biome@2.3.11": { + "integrity": "sha512-/zt+6qazBWguPG6+eWmiELqO+9jRsMZ/DBU3lfuU2ngtIQYzymocHhKiZRyrbra4aCOoyTg/BmY+6WH5mv9xmQ==", + "optionalDependencies": [ + "@biomejs/cli-darwin-arm64", + "@biomejs/cli-darwin-x64", + "@biomejs/cli-linux-arm64", + "@biomejs/cli-linux-arm64-musl", + "@biomejs/cli-linux-x64", + "@biomejs/cli-linux-x64-musl", + "@biomejs/cli-win32-arm64", + "@biomejs/cli-win32-x64" + ], + "bin": true + }, + "@biomejs/cli-darwin-arm64@2.3.11": { + "integrity": "sha512-/uXXkBcPKVQY7rc9Ys2CrlirBJYbpESEDme7RKiBD6MmqR2w3j0+ZZXRIL2xiaNPsIMMNhP1YnA+jRRxoOAFrA==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@biomejs/cli-darwin-x64@2.3.11": { + "integrity": "sha512-fh7nnvbweDPm2xEmFjfmq7zSUiox88plgdHF9OIW4i99WnXrAC3o2P3ag9judoUMv8FCSUnlwJCM1B64nO5Fbg==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@biomejs/cli-linux-arm64-musl@2.3.11": { + "integrity": "sha512-XPSQ+XIPZMLaZ6zveQdwNjbX+QdROEd1zPgMwD47zvHV+tCGB88VH+aynyGxAHdzL+Tm/+DtKST5SECs4iwCLg==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@biomejs/cli-linux-arm64@2.3.11": { + "integrity": "sha512-l4xkGa9E7Uc0/05qU2lMYfN1H+fzzkHgaJoy98wO+b/7Gl78srbCRRgwYSW+BTLixTBrM6Ede5NSBwt7rd/i6g==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@biomejs/cli-linux-x64-musl@2.3.11": { + "integrity": "sha512-vU7a8wLs5C9yJ4CB8a44r12aXYb8yYgBn+WeyzbMjaCMklzCv1oXr8x+VEyWodgJt9bDmhiaW/I0RHbn7rsNmw==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@biomejs/cli-linux-x64@2.3.11": { + "integrity": "sha512-/1s9V/H3cSe0r0Mv/Z8JryF5x9ywRxywomqZVLHAoa/uN0eY7F8gEngWKNS5vbbN/BsfpCG5yeBT5ENh50Frxg==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@biomejs/cli-win32-arm64@2.3.11": { + "integrity": "sha512-PZQ6ElCOnkYapSsysiTy0+fYX+agXPlWugh6+eQ6uPKI3vKAqNp6TnMhoM3oY2NltSB89hz59o8xIfOdyhi9Iw==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@biomejs/cli-win32-x64@2.3.11": { + "integrity": "sha512-43VrG813EW+b5+YbDbz31uUsheX+qFKCpXeY9kfdAx+ww3naKxeVkTD9zLIWxUPfJquANMHrmW3wbe/037G0Qg==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@emnapi/core@1.8.1": { + "integrity": "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==", + "dependencies": [ + "@emnapi/wasi-threads", + "tslib" + ] + }, + "@emnapi/runtime@1.8.1": { + "integrity": "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==", + "dependencies": [ + "tslib" + ] + }, + "@emnapi/wasi-threads@1.1.0": { + "integrity": "sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==", + "dependencies": [ + "tslib" + ] + }, + "@napi-rs/wasm-runtime@1.1.1": { + "integrity": "sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==", + "dependencies": [ + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util" + ] + }, + "@oxc-project/types@0.108.0": { + "integrity": "sha512-7lf13b2IA/kZO6xgnIZA88sq3vwrxWk+2vxf6cc+omwYCRTiA5e63Beqf3fz/v8jEviChWWmFYBwzfSeyrsj7Q==" + }, + "@rolldown/binding-android-arm64@1.0.0-beta.60": { + "integrity": "sha512-hOW6iQXtpG4uCW1zGK56+KhEXGttSkTp2ykncW/nkOIF/jOKTqbM944Q73HVeMXP1mPRvE2cZwNp3xeLIeyIGQ==", + "os": ["android"], + "cpu": ["arm64"] + }, + "@rolldown/binding-darwin-arm64@1.0.0-beta.60": { + "integrity": "sha512-vyDA4HXY2mP8PPtl5UE17uGPxUNG4m1wkfa3kAkR8JWrFbarV97UmLq22IWrNhtBPa89xqerzLK8KoVmz5JqCQ==", + "os": ["darwin"], + "cpu": ["arm64"] + }, + "@rolldown/binding-darwin-x64@1.0.0-beta.60": { + "integrity": "sha512-WnxyqxAKP2BsxouwGY/RCF5UFw/LA4QOHhJ7VEl+UCelHokiwqNHRbryLAyRy3TE1FZ5eae+vAFcaetAu/kWLw==", + "os": ["darwin"], + "cpu": ["x64"] + }, + "@rolldown/binding-freebsd-x64@1.0.0-beta.60": { + "integrity": "sha512-JtyWJ+zXOHof5gOUYwdTWI2kL6b8q9eNwqB/oD4mfUFaC/COEB2+47JMhcq78dey9Ahmec3DZKRDZPRh9hNAMQ==", + "os": ["freebsd"], + "cpu": ["x64"] + }, + "@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.60": { + "integrity": "sha512-LrMoKqpHx+kCaNSk84iSBd4yVOymLIbxJQtvFjDN2CjQraownR+IXcwYDblFcj9ivmS54T3vCboXBbm3s1zbPQ==", + "os": ["linux"], + "cpu": ["arm"] + }, + "@rolldown/binding-linux-arm64-gnu@1.0.0-beta.60": { + "integrity": "sha512-sqI+Vdx1gmXJMsXN3Fsewm3wlt7RHvRs1uysSp//NLsCoh9ZFEUr4ZzGhWKOg6Rvf+njNu/vCsz96x7wssLejQ==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@rolldown/binding-linux-arm64-musl@1.0.0-beta.60": { + "integrity": "sha512-8xlqGLDtTP8sBfYwneTDu8+PRm5reNEHAuI/+6WPy9y350ls0KTFd3EJCOWEXWGW0F35ko9Fn9azmurBTjqOrQ==", + "os": ["linux"], + "cpu": ["arm64"] + }, + "@rolldown/binding-linux-x64-gnu@1.0.0-beta.60": { + "integrity": "sha512-iR4nhVouVZK1CiGGGyz+prF5Lw9Lmz30Rl36Hajex+dFVFiegka604zBwzTp5Tl0BZnr50ztnVJ30tGrBhDr8Q==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@rolldown/binding-linux-x64-musl@1.0.0-beta.60": { + "integrity": "sha512-HbfNcqNeqxFjSMf1Kpe8itr2e2lr0Bm6HltD2qXtfU91bSSikVs9EWsa1ThshQ1v2ZvxXckGjlVLtah6IoslPg==", + "os": ["linux"], + "cpu": ["x64"] + }, + "@rolldown/binding-openharmony-arm64@1.0.0-beta.60": { + "integrity": "sha512-BiiamFcgTJ+ZFOUIMO9AHXUo9WXvHVwGfSrJ+Sv0AsTd2w3VN7dJGiH3WRcxKFetljJHWvGbM4fdpY5lf6RIvw==", + "os": ["openharmony"], + "cpu": ["arm64"] + }, + "@rolldown/binding-wasm32-wasi@1.0.0-beta.60": { + "integrity": "sha512-6roXGbHMdR2ucnxXuwbmQvk8tuYl3VGu0yv13KxspyKBxxBd4RS6iykzLD6mX2gMUHhfX8SVWz7n/62gfyKHow==", + "dependencies": [ + "@napi-rs/wasm-runtime" + ], + "cpu": ["wasm32"] + }, + "@rolldown/binding-win32-arm64-msvc@1.0.0-beta.60": { + "integrity": "sha512-JBOm8/DC/CKnHyMHoJFdvzVHxUixid4dGkiTqGflxOxO43uSJMpl77pSPXvzwZ/VXwqblU2V0/PanyCBcRLowQ==", + "os": ["win32"], + "cpu": ["arm64"] + }, + "@rolldown/binding-win32-x64-msvc@1.0.0-beta.60": { + "integrity": "sha512-MKF0B823Efp+Ot8KsbwIuGhKH58pf+2rSM6VcqyNMlNBHheOM0Gf7JmEu+toc1jgN6fqjH7Et+8hAzsLVkIGfA==", + "os": ["win32"], + "cpu": ["x64"] + }, + "@rolldown/pluginutils@1.0.0-beta.60": { + "integrity": "sha512-Jz4aqXRPVtqkH1E3jRDzLO5cgN5JwW+WG0wXGE4NiJd25nougv/AHzxmKCzmVQUYnxLmTM0M4wrZp+LlC2FKLg==" + }, + "@standard-community/standard-json@0.3.5_@standard-schema+spec@1.1.0_@types+json-schema@7.0.15_arktype@2.1.29_quansync@0.2.11": { + "integrity": "sha512-4+ZPorwDRt47i+O7RjyuaxHRK/37QY/LmgxlGrRrSTLYoFatEOzvqIc85GTlM18SFZ5E91C+v0o/M37wZPpUHA==", + "dependencies": [ + "@standard-schema/spec", + "@types/json-schema", + "arktype", + "quansync" + ], + "optionalPeers": [ + "arktype" + ] + }, + "@standard-community/standard-openapi@0.2.9_@standard-community+standard-json@0.3.5__@standard-schema+spec@1.1.0__@types+json-schema@7.0.15__arktype@2.1.29__quansync@0.2.11_@standard-schema+spec@1.1.0_arktype@2.1.29_openapi-types@12.1.3_@types+json-schema@7.0.15": { + "integrity": "sha512-htj+yldvN1XncyZi4rehbf9kLbu8os2Ke/rfqoZHCMHuw34kiF3LP/yQPdA0tQ940y8nDq3Iou8R3wG+AGGyvg==", + "dependencies": [ + "@standard-community/standard-json", + "@standard-schema/spec", + "arktype", + "openapi-types" + ], + "optionalPeers": [ + "arktype" + ] + }, + "@standard-schema/spec@1.1.0": { + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==" + }, + "@tybys/wasm-util@0.10.1": { + "integrity": "sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==", + "dependencies": [ + "tslib" + ] + }, + "@types/json-schema@7.0.15": { + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==" + }, + "@types/node@25.0.8": { + "integrity": "sha512-powIePYMmC3ibL0UJ2i2s0WIbq6cg6UyVFQxSCpaPxxzAaziRfimGivjdF943sSGV6RADVbk0Nvlm5P/FB44Zg==", + "dependencies": [ + "undici-types" + ] + }, + "arkenv@0.8.3_arktype@2.1.29": { + "integrity": "sha512-fndPYpIZ/EvARTXabWG5H+gKxlJEbPgTRvXH8htimmCbdBfEXZsSOgObwdiCCCcBz33tJAYk88goDtj0Ao99NA==", + "dependencies": [ + "arktype" + ] + }, + "arkregex@0.0.5": { + "integrity": "sha512-ncYjBdLlh5/QnVsAA8De16Tc9EqmYM7y/WU9j+236KcyYNUXogpz3sC4ATIZYzzLxwI+0sEOaQLEmLmRleaEXw==", + "dependencies": [ + "@ark/util" + ] + }, + "arktype@2.1.29": { + "integrity": "sha512-jyfKk4xIOzvYNayqnD8ZJQqOwcrTOUbIU4293yrzAjA3O1dWh61j71ArMQ6tS/u4pD7vabSPe7nG3RCyoXW6RQ==", + "dependencies": [ + "@ark/schema", + "@ark/util", + "arkregex" + ] + }, + "hash-wasm@4.12.0": { + "integrity": "sha512-+/2B2rYLb48I/evdOIhP+K/DD2ca2fgBjp6O+GBEnCDk2e4rpeXIK8GvIyRPjTezgmWn9gmKwkQjjx6BtqDHVQ==" + }, + "hono-openapi@1.1.2_@standard-community+standard-json@0.3.5__@standard-schema+spec@1.1.0__@types+json-schema@7.0.15__arktype@2.1.29__quansync@0.2.11_@standard-community+standard-openapi@0.2.9__@standard-community+standard-json@0.3.5___@standard-schema+spec@1.1.0___@types+json-schema@7.0.15___arktype@2.1.29___quansync@0.2.11__@standard-schema+spec@1.1.0__arktype@2.1.29__openapi-types@12.1.3__@types+json-schema@7.0.15_@types+json-schema@7.0.15_openapi-types@12.1.3_arktype@2.1.29": { + "integrity": "sha512-toUcO60MftRBxqcVyxsHNYs2m4vf4xkQaiARAucQx3TiBPDtMNNkoh+C4I1vAretQZiGyaLOZNWn1YxfSyUA5g==", + "dependencies": [ + "@standard-community/standard-json", + "@standard-community/standard-openapi", + "@types/json-schema", + "openapi-types" + ] + }, + "openapi-types@12.1.3": { + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "quansync@0.2.11": { + "integrity": "sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==" + }, + "rolldown@1.0.0-beta.60": { + "integrity": "sha512-YYgpv7MiTp9LdLj1fzGzCtij8Yi2OKEc3HQtfbIxW4yuSgpQz9518I69U72T5ErPA/ATOXqlcisiLrWy+5V9YA==", + "dependencies": [ + "@oxc-project/types", + "@rolldown/pluginutils" + ], + "optionalDependencies": [ + "@rolldown/binding-android-arm64", + "@rolldown/binding-darwin-arm64", + "@rolldown/binding-darwin-x64", + "@rolldown/binding-freebsd-x64", + "@rolldown/binding-linux-arm-gnueabihf", + "@rolldown/binding-linux-arm64-gnu", + "@rolldown/binding-linux-arm64-musl", + "@rolldown/binding-linux-x64-gnu", + "@rolldown/binding-linux-x64-musl", + "@rolldown/binding-openharmony-arm64", + "@rolldown/binding-wasm32-wasi", + "@rolldown/binding-win32-arm64-msvc", + "@rolldown/binding-win32-x64-msvc" + ], + "bin": true + }, + "tslib@2.8.1": { + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, + "undici-types@7.16.0": { + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==" + }, + "vite-bundle-analyzer@1.3.2": { + "integrity": "sha512-Od4ILUKRvBV3LuO/E+S+c1XULlxdkRZPSf6Vzzu+UAXG0D3hZYUu9imZIkSj/PU4e1FB14yB+av8g3KiljH8zQ==", + "bin": true + } + }, + "workspace": { + "dependencies": [ + "jsr:@deno/loader@~0.3.11", + "jsr:@hono/hono@^4.11.4", + "jsr:@hono/standard-validator@~0.2.2", + "jsr:@sitnik/nanoid@^5.1.5", + "jsr:@std/assert@^1.0.16", + "jsr:@std/async@^1.0.16", + "jsr:@std/cache@~0.2.1", + "jsr:@std/collections@^1.1.3", + "jsr:@std/dotenv@~0.225.6", + "jsr:@std/encoding@^1.0.10", + "jsr:@std/fmt@^1.0.8", + "jsr:@std/fs@^1.0.21", + "jsr:@std/path@^1.1.4", + "jsr:@std/streams@^1.0.16", + "jsr:@std/ulid@1", + "npm:@biomejs/biome@2.3.11", + "npm:@types/node@^25.0.8", + "npm:arkenv@~0.8.3", + "npm:arktype@^2.1.29", + "npm:hash-wasm@^4.12.0", + "npm:hono-openapi@^1.1.2", + "npm:rolldown@1.0.0-beta.60", + "npm:vite-bundle-analyzer@^1.3.2" + ] + } +} diff --git a/lefthook.json b/lefthook.json deleted file mode 100644 index 2e8038ab..00000000 --- a/lefthook.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "$schema": "https://json.schemastore.org/lefthook.json", - "pre-commit": { - "commands": { - "fix": { - "env": { - "PATH": "$PATH:$HOME/.bun/bin" - }, - "run": "bun run fix && git update-index --again" - } - } - } -} diff --git a/lib/deno-rolldown/LICENSE b/lib/deno-rolldown/LICENSE new file mode 100644 index 00000000..24295035 --- /dev/null +++ b/lib/deno-rolldown/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018-2025 the Deno authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lib/deno-rolldown/mod.ts b/lib/deno-rolldown/mod.ts new file mode 100644 index 00000000..abd0256f --- /dev/null +++ b/lib/deno-rolldown/mod.ts @@ -0,0 +1,240 @@ +// biome-ignore-all lint: vibecode +// based on: https://github.com/denoland/deno-rolldown-plugin + +import { + type Loader, + type LoadResponse, + MediaType, + RequestedModuleType, + ResolutionMode, + Workspace, + type WorkspaceOptions +} from "@deno/loader"; +import { fromFileUrl } from "@std/path"; + +const MARegex = /.*/; + +type Module = { + specifier: string; + code: string; +}; + +/** Options for creating the Deno plugin. */ +export interface DenoPluginOptions extends WorkspaceOptions { + /** Entry points for the build (optional, can be provided in buildStart) */ + entrypoints?: string[]; + /** + * Patterns to treat as external when Deno loader can't resolve them. + * Useful for npm packages that should remain external. + */ + externalPatterns?: (string | RegExp)[]; +} + +export type BuildStartOptions = { + input?: string | string[] | Record; +}; + +export type ResolveIdOptions = { + kind: "import-statement" | "dynamic-import" | "require-call"; +}; + +export interface DenoPlugin extends Disposable { + name: string; + buildStart(options?: BuildStartOptions): Promise; + resolveId: { + filter: { id: RegExp }; + handler( + source: string, + importer: string | undefined, + options: ResolveIdOptions + ): Promise; + }; + load: { + filter: { id: RegExp }; + handler(id: string): string | undefined; + }; +} + +/** + * Creates a deno plugin for use with rolldown. + * @returns The plugin. + */ +export default function denoPlugin(pluginOptions: DenoPluginOptions = {}): DenoPlugin { + let loader: Loader | undefined; + const loads = new Map>(); + const modules = new Map(); + + return { + name: "deno-plugin", + + [Symbol.dispose]: () => { + if (loader && typeof loader[Symbol.dispose] === "function") { + loader[Symbol.dispose](); + } + }, + + buildStart: async (options) => { + let inputs: string[] = []; + + if (options?.input != null) { + const { input } = options; + if (Array.isArray(input)) { + inputs = input; + } else if (typeof input === "object") { + inputs = Object.values(input); + } else if (typeof input === "string") { + inputs = [input]; + } + } else if (pluginOptions.entrypoints?.length) { + inputs = pluginOptions.entrypoints; + } + + if (inputs.length === 0) return; + + const workspace = new Workspace({ ...pluginOptions }); + loader = await workspace.createLoader(); + await loader.addEntrypoints(inputs); + }, + + resolveId: { + filter: { id: MARegex }, + + handler: async (source, importer, options) => { + if (!loader) { + throw new Error("Deno loader not initialized. Make sure buildStart was called."); + } + + const resolutionMode = resolveKindToResolutionMode(options.kind); + const normalizedImporter = importer != null ? (modules.get(importer)?.specifier ?? importer) : undefined; + + let resolvedSpecifier: string; + try { + resolvedSpecifier = await loader.resolve(source, normalizedImporter, resolutionMode); + } catch (error: unknown) { + if ((error as { code?: string })?.code === "ERR_MODULE_NOT_FOUND") { + if (pluginOptions.externalPatterns) { + for (const pattern of pluginOptions.externalPatterns) { + if (typeof pattern === "string") { + if (source === pattern || source.startsWith(`${pattern}/`)) { + return { id: source, external: true }; + } + } else if (pattern.test(source)) { + return { id: source, external: true }; + } + } + } + + if ( + !( + source.startsWith(".") || + source.startsWith("/") || + source.startsWith("file:") || + source.startsWith("http") + ) + ) { + return { id: source, external: true }; + } + + return; + } + throw error; + } + + let loadPromise = loads.get(resolvedSpecifier); + if (!loadPromise) { + loadPromise = loader.load(resolvedSpecifier, RequestedModuleType.Default); + loads.set(resolvedSpecifier, loadPromise); + } + + const result = await loadPromise; + + if (!result) { + modules.set(resolvedSpecifier, undefined); + return resolvedSpecifier; + } + + if (result.kind === "external") { + return { id: result.specifier, external: true }; + } + + const ext = mediaTypeToExtension(result.mediaType); + let { specifier } = result; + + if (!specifier.endsWith(ext)) { + specifier += `.rolldown${ext}`; + } + + if (specifier.startsWith("file:///")) { + specifier = fromFileUrl(specifier); + } + + modules.set(specifier, { + specifier: result.specifier, + code: new TextDecoder().decode(result.code) + }); + + return specifier; + } + }, + + load: { + filter: { id: MARegex }, + + handler: (id) => { + return modules.get(id)?.code; + } + } + }; +} + +function mediaTypeToExtension(mediaType: MediaType): string { + switch (mediaType) { + case MediaType.JavaScript: + return ".js"; + case MediaType.Mjs: + return ".mjs"; + case MediaType.Cjs: + return ".cjs"; + case MediaType.Jsx: + return ".jsx"; + case MediaType.TypeScript: + case MediaType.Mts: + return ".ts"; + case MediaType.Cts: + return ".cts"; + case MediaType.Dts: + return ".d.ts"; + case MediaType.Dmts: + return ".d.mts"; + case MediaType.Dcts: + return ".d.cts"; + case MediaType.Tsx: + return ".tsx"; + case MediaType.Css: + return ".css"; + case MediaType.Json: + return ".json"; + case MediaType.Html: + return ".html"; + case MediaType.Sql: + return ".sql"; + case MediaType.Wasm: + return ".wasm"; + case MediaType.SourceMap: + return ".map"; + default: + return ""; + } +} + +function resolveKindToResolutionMode(kind: string): ResolutionMode { + switch (kind) { + case "import-statement": + case "dynamic-import": + return ResolutionMode.Import; + case "require-call": + return ResolutionMode.Require; + default: + throw new Error(`not implemented: ${kind}`); + } +} diff --git a/mise.toml b/mise.toml new file mode 100644 index 00000000..1456c634 --- /dev/null +++ b/mise.toml @@ -0,0 +1,205 @@ +[tools] +deno = "latest" + +[tasks."install"] +description = "Install all dependencies" +run = [ + { task = "install:deno" } +] + +[tasks."install:deno"] +description = "Install Deno dependencies" +run = [ + "mise exec -- deno install" +] + +[tasks."clean"] +description = "Clean project environment" +run = [ + { task = "clean:devel" } +] + +[tasks."clean:deno"] +description = "Clean deno artifacts" +run = [ + "rm -rf ./node_modules/ ./deno.lock" +] + +[tasks."clean:devel"] +description = "Clean development artifacts" +run = [ + "rm -rf ./dist/" +] + +[tasks."clean:storage"] +description = "Clean storage (careful)" +run = [ + "rm -rf ./storage/" +] + +[tasks."clean:git"] +description = "Clean git (careful)" +run = [ + { task = "clean:git:untracked" }, + { task = "clean:git:gc" }, + { task = "clean:git:hooks" }, +] + +[tasks."clean:git:gc"] +run = [ + "git gc --aggressive --prune" +] + +[tasks."clean:git:hooks"] +run = [ + "rm -rf ./.git/hooks/" +] + +[tasks."clean:git:untracked"] +run = [ + "git clean -d -x -i" +] + +[tasks."build"] +description = "Build (server)" +run = [ + { task = "build:server" }, +] + +[tasks."build:server"] +description = "Build server" +sources = ['src/**/*.ts', "deno.lock", 'rolldown.config.ts'] +outputs = ['dist/backend.js', 'dist/backend.js.map'] +run = [ + { task = "install" }, + "mise exec -- deno x -A rolldown -c rolldown.config.ts" +] + +[tasks."build:standalone_"] +hide = true +run = [ + { task = "build:server" }, + "mise exec -- deno compile --no-check --allow-all --exclude=./node_modules/ --exclude=./package.json --include=./dist/backend.js.map --output=${STANDALONE_OUTPUT:-./dist/backend} ${STANDALONE_TARGET:+--target=${STANDALONE_TARGET}} ./dist/backend.js" +] + +[tasks."build:standalone"] +description = "Build standalone binary (current os/arch)" +run = [ + { task = "build:standalone_" } +] + +[tasks."build:standalone:darwin-arm64"] +description = "Build standalone binary" +env = { STANDALONE_OUTPUT = "./dist/backend.darwin-arm64", STANDALONE_TARGET = "aarch64-apple-darwin" } +run = [ + { task = "build:standalone_" } +] + +[tasks."build:standalone:linux-amd64"] +description = "Build standalone binary" +env = { STANDALONE_OUTPUT = "./dist/backend.linux-amd64", STANDALONE_TARGET = "x86_64-unknown-linux-gnu" } +run = [ + { task = "build:standalone_" } +] + +[tasks."build:standalone:linux-arm64"] +description = "Build standalone binary" +env = { STANDALONE_OUTPUT = "./dist/backend.linux-arm64", STANDALONE_TARGET = "aarch64-unknown-linux-gnu" } +run = [ + { task = "build:standalone_" } +] + +[tasks."build:standalone:windows-amd64"] +description = "Build standalone binary" +env = { STANDALONE_OUTPUT = "./dist/backend.windows-amd64.exe", STANDALONE_TARGET = "x86_64-pc-windows-msvc" } +run = [ + { task = "build:standalone_" } +] + +[tasks."test"] +description = "Run all tests" +run = [ + "mise exec -- deno test -A" +] + +[tasks."fix"] +description = "Run all formatters" +run = [ + { task = "fix:biome" }, +] + +[tasks."fix:biome"] +description = "Run Biome formater" +run = [ + { task = "install" }, + "mise exec -- deno x -A biome check --write" +] + +[tasks."lint"] +description = "Run all linters" +run = [ + { task = "lint:deno" }, + { task = "lint:biome" }, +] + +[tasks."lint:deno"] +description = "Run Deno linter" +run = [ + { task = "install" }, + "mise exec -- deno check --quiet" +] + +[tasks."lint:biome"] +description = "Run Biome linter" +run = [ + { task = "install" }, + "mise exec -- deno x -A biome check" +] + +[tasks."tidy"] +description = "Tidy all" +run = [ + { task = "tidy:deno" }, +] + +[tasks."tidy:deno"] +description = "Tidy Deno dependencies" +run = [ + { task = "clean:deno" }, + { task = "install:deno" }, +] + +[tasks."start"] +description = "Start backend" +run = [ + { task = "start:server" }, +] + +[tasks."start:dev"] +alias = "dev" +description = "Start devel server" +run = [ + { task = "install" }, + "JSPB_DEBUG_DATABASE_EPHEMERAL=true JSPB_LOG_VERBOSITY=4 mise exec -- deno run -A ./src/index.ts" +] + +[tasks."start:build"] +description = "Start dedicated server" +run = [ + { task = "build:server" }, + { task = "start:server" }, +] + +[tasks."start:server"] +description = "Start dedicated server (requires built backend)" +dir = "{{ config_root }}/dist/" +run = [ + "JSPB_LOG_VERBOSITY=4 mise exec -- deno run -A ./backend.js" +] + +[tasks."start:server:inspector"] +description = "Start dedicated server (requires built backend)" +dir = "{{ config_root }}/dist/" +run = [ + "JSPB_LOG_VERBOSITY=4 mise exec -- deno run -A --inspect-brk ./backend.js" +] diff --git a/package.json b/package.json index 4e5d72d6..e018ade4 100644 --- a/package.json +++ b/package.json @@ -1,53 +1,4 @@ { - "$schema": "https://json.schemastore.org/package.json", - "private": true, - "license": "EUPL-1.2", - "type": "module", - "scripts": { - "build": "bun run build:server", - "build:all": "bun run build:server && bun run build:standalone", - "build:server": "bun build ./src/server.ts --target=bun --minify --sourcemap=inline --outfile=./dist/server.js", - "build:standalone": "bun build ./dist/server.js --compile --minify --sourcemap=inline --outfile=./dist/server", - "build:standalone:darwin-arm64": "bun run build:standalone -- --target=bun-darwin-arm64", - "build:standalone:linux-amd64-glibc": "bun run build:standalone -- --target=bun-linux-x64-modern", - "build:standalone:linux-amd64-musl": "bun run build:standalone -- --target=bun-linux-x64-modern-musl", - "build:standalone:linux-arm64-glibc": "bun run build:standalone -- --target=bun-linux-arm64", - "build:standalone:linux-arm64-musl": "bun run build:standalone -- --target=bun-linux-arm64-musl", - "build:standalone:windows-amd64": "bun run build:standalone -- --target=bun-windows-x64-modern", - "clean:git:all": "bun run clean:git:untracked && bun run clean:git:gc && bun run clean:git:hooks", - "clean:git:all:force": "bun run clean:git:untracked:force && bun run clean:git:gc && bun run clean:git:hooks", - "clean:git:gc": "git gc --aggressive --prune", - "clean:git:hooks": "rm -rf ./.git/hooks/ && bun install -f", - "clean:git:untracked": "git clean -d -x -i", - "clean:git:untracked:force": "git clean -d -x -f", - "dev": "bun run start:dev", - "fix": "bun run fix:biome; bun run fix:package", - "fix:biome": "bun biome check --write", - "fix:package": "bun sort-package-json --quiet", - "lint": "bun run lint:biome && bun run lint:tsc", - "lint:biome": "bun biome lint", - "lint:tsc": "bun tsc --noEmit", - "start": "bun run start:server", - "start:dev": "mkdir -p ./dist/ && LOGLEVEL=4 bun run --cwd=./dist/ ../src/server.ts", - "start:rebuild": "bun run build:server && bun run start:server", - "start:server": "mkdir -p ./dist/ && bun run --cwd=./dist/ ./server.js" - }, - "dependencies": { - "@hono/zod-openapi": "~0.19.0", - "env-var": "~7.5.0", - "hono": "~4.7.0" - }, - "devDependencies": { - "@biomejs/biome": "~1.9.0", - "@types/bun": "^1.2.0", - "lefthook": "~1.11.0", - "sort-package-json": "^3.0.0" - }, - "peerDependencies": { - "typescript": "~5.8.0" - }, - "trustedDependencies": [ - "@biomejs/biome", - "lefthook" - ] + "license": "EUPL-1.2", + "type": "module" } diff --git a/rolldown.config.ts b/rolldown.config.ts new file mode 100644 index 00000000..3578f3b3 --- /dev/null +++ b/rolldown.config.ts @@ -0,0 +1,39 @@ +import type { RolldownOptions } from "rolldown"; +import { analyzer, unstableRolldownAdapter } from "vite-bundle-analyzer"; +import deno from "./lib/deno-rolldown/mod.ts"; + +const analyze = false; + +export default { + input: "./src/index.ts", + output: { + file: "./dist/backend.js", + format: "es", + inlineDynamicImports: true, + legalComments: "none", + minify: true, + sourcemap: true + }, + resolve: { + conditionNames: ["import", "default"], + mainFields: ["module", "main"] + }, + platform: "neutral", + external: [/^(node:)/], + moduleTypes: { + ".sql": "text" + }, + optimization: { + inlineConst: true + }, + plugins: [ + deno(), + unstableRolldownAdapter( + analyzer({ + enabled: analyze, + analyzerPort: "auto", + summary: true + }) + ) + ] +} satisfies RolldownOptions; diff --git a/src/config.ts b/src/config.ts deleted file mode 100644 index 9e76dc66..00000000 --- a/src/config.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { env } from '#util/env.ts'; - -export const config = { - protocol: env.tls ? 'https://' : 'http://', - apiPath: '/api', - storagePath: 'storage/', - documentNameLengthMin: 2, - documentNameLengthMax: 32, - documentNameLengthDefault: 8 -} as const; diff --git a/src/database/index.ts b/src/database/index.ts new file mode 100644 index 00000000..3d1503ae --- /dev/null +++ b/src/database/index.ts @@ -0,0 +1,141 @@ +import { DatabaseSync, type StatementSync } from "node:sqlite"; +import { monotonicUlid, ulid } from "@std/ulid"; +import { Logger } from "#util/console.ts"; +import { generateHash } from "#util/crypto.ts"; +import { generateToken } from "#util/user.ts"; +import { constantPathDatabaseFile, constantStoreStatements } from "../global.ts"; +import { env } from "../utils/env.ts"; +import { migrations } from "./migration.ts"; +import { DocumentQuery, UserQuery } from "./query.ts"; + +const log: Logger = new Logger("database"); + +type Options = { + ephemeral?: boolean; +}; + +export class Database { + public readonly document = new DocumentQuery(this); + public readonly user = new UserQuery(this); + + private readonly database: DatabaseSync; + + public constructor(options: Options = {}) { + options.ephemeral ??= env.JSPB_DEBUG_DATABASE_EPHEMERAL; + + this.database = new DatabaseSync(options.ephemeral ? ":memory:" : constantPathDatabaseFile); + + if (options.ephemeral) { + log.warn("Using ephemeral. No changes will persist."); + return; + } + + this.exec(`PRAGMA journal_mode = WAL; + PRAGMA wal_autocheckpoint = 1024;`); + } + + public async migration(): Promise { + const query = this.prepare("PRAGMA user_version;", false).get(); + if (typeof query?.user_version !== "number") { + throw new Deno.errors.InvalidData("Failed to get version."); + } + + if (query.user_version !== migrations.length) { + if (query.user_version > migrations.length) { + throw new Deno.errors.InvalidData("Version is higher than available migrations. Update your JSPaste instance."); + } + + for (const [delta, migration] of migrations.slice(query.user_version).entries()) { + try { + // biome-ignore lint/performance/noAwaitInLoops: serialized + await this.transaction(async () => { + await migration.preMigration?.(this); + this.exec(migration.sql); + await migration.postMigration?.(this); + this.exec(`PRAGMA user_version = ${(query.user_version as number) + delta + 1};`); + }); + } catch (error) { + log.error(`Error while running migration "${migration.id}"..:`); + throw error; + } + + log.info(`Migration "${migration.id}" ran successfully.`); + } + } else { + log.debug("Already up to date."); + } + + try { + const rootId = this.user.getRoot()?.id; + + if (env.JSPB_USER_ROOT_RECOVERY && rootId) { + const token = generateToken(rootId); + const hash = generateHash(token); + + this.user.update("id", rootId, "token", hash.combo); + + log.warn("+-- The root user token was regenerated.", "|", `+--> "${token}"`); + } else if (!rootId?.startsWith("0000000001")) { + const token = this.user.create(ulid(1)); + + log.warn("+-- Note the root user token as it won't be shown again.", "|", `+--> "${token}"`); + } + } catch (error) { + log.error("Failed to handle the root user..:"); + throw error; + } + } + + public exec(sql: string): void { + this.database.exec(sql); + } + + public prepare(sql: string, cache = true): StatementSync { + if (!cache) { + return this.database.prepare(sql); + } + + let statement = constantStoreStatements.get(sql); + if (!statement) { + statement = this.database.prepare(sql); + constantStoreStatements.set(sql, statement); + } + + return statement; + } + + public transaction(callback: () => T): T { + if (this.database.isTransaction) { + const name = `_${monotonicUlid()}`; + + this.exec(`SAVEPOINT ${name};`); + try { + return callback(); + } catch (error) { + this.exec(`ROLLBACK TO ${name};`); + + throw error; + } finally { + this.exec(`RELEASE ${name};`); + } + } + + this.exec("BEGIN IMMEDIATE;"); + try { + const result = callback(); + + this.exec("COMMIT;"); + + return result; + } catch (error) { + this.exec("ROLLBACK;"); + + throw error; + } + } + + public [Symbol.dispose](): void { + constantStoreStatements.clear(); + this.database.close(); + } +} diff --git a/src/database/migration.ts b/src/database/migration.ts new file mode 100644 index 00000000..ae7f5233 --- /dev/null +++ b/src/database/migration.ts @@ -0,0 +1,91 @@ +import { mapNotNullish } from "@std/collections"; +import { ulid } from "@std/ulid"; +import type { Database } from "#db/index.ts"; +import { Logger } from "#util/console.ts"; +import { generateHash } from "#util/crypto.ts"; +import { mutable } from "../global.ts"; + +const log: Logger = new Logger("database::migration"); + +type Migration = { + id: string; + preMigration?: (database: Database) => Promise | void; + sql: string; + postMigration?: (database: Database) => Promise | void; +}; + +export const migrations: Migration[] = [ + /** + * @description + * Base schema. + * + * @date 2025-12-27 + */ + { + id: "0001.base", + sql: (await import("./migrations/0001.sql", { with: { type: "text" } })).default + }, + + /** + * @description + * Hash everything, all future sensitive columns are now hashed. + * This first stage hashes all documents passwords and moves the root user to a compatible id. + * + * @date 2026-01-06 + */ + { + id: "0002.hashingStage1", + preMigration: (database: Database) => { + // migrate document passwords + const documentsHashed = mapNotNullish(database.document.getAll(["id", "password"]), ({ id, password }) => { + if (!password) return; + + const hash = generateHash(password); + database.document.update("id", id, "password", hash.combo); + }); + + if (documentsHashed.length > 0) { + log.debug(`Hashed ${documentsHashed.length} document passwords.`); + } + + // migrate user root id + const userRootIdOld = "0000000000FFFF000000000000"; + const userRootToken = database.user.get("id", userRootIdOld)?.token; + if (userRootToken) { + const id = ulid(1); + database.user.create(id); + + for (const document of database.user.getDocuments(userRootIdOld)) { + database.document.update("id", document.id, "user_id", id); + } + + database.user.delete("id", userRootIdOld); + + const userRootId = mutable.database.user.getRoot()?.id; + if (userRootId) { + database.user.update("id", userRootId, "token", userRootToken); + } + } + + const userTokens = database.user.getAll(["token"]); + + let userTokenUnhashed = false; + for (const entry of userTokens) { + // combo separator + if (!entry.token.includes(" ")) { + userTokenUnhashed = true; + break; + } + } + + if (userTokenUnhashed) { + log.warn( + "Users with plain tokens found!", + "New users in the instance will have their token hashed,", + "In the future we will enforce that every user token is hashed." + ); + } + }, + sql: (await import("./migrations/0002.sql", { with: { type: "text" } })).default + } +] as const; diff --git a/src/database/migrations/0001.sql b/src/database/migrations/0001.sql new file mode 100644 index 00000000..bfa6be50 --- /dev/null +++ b/src/database/migrations/0001.sql @@ -0,0 +1,18 @@ +CREATE TABLE user +( + id TEXT NOT NULL PRIMARY KEY, + token TEXT NOT NULL +) STRICT; + +CREATE UNIQUE INDEX idx_user_token ON USER (token); + +CREATE TABLE document +( + id TEXT NOT NULL PRIMARY KEY, + user_id TEXT REFERENCES USER (id) ON DELETE CASCADE, + version INTEGER NOT NULL, + name TEXT NOT NULL, + password TEXT +) STRICT; + +CREATE UNIQUE INDEX idx_document_name ON DOCUMENT (name) diff --git a/src/database/migrations/0002.sql b/src/database/migrations/0002.sql new file mode 100644 index 00000000..ec5070fe --- /dev/null +++ b/src/database/migrations/0002.sql @@ -0,0 +1 @@ +DROP INDEX idx_user_token; diff --git a/src/database/query.ts b/src/database/query.ts new file mode 100644 index 00000000..ecb38589 --- /dev/null +++ b/src/database/query.ts @@ -0,0 +1,166 @@ +import type { SQLInputValue } from "node:sqlite"; +import { chunk } from "@std/collections"; +import { monotonicUlid } from "@std/ulid"; +import type { Database } from "#db/index.ts"; +import { generateHash } from "#util/crypto.ts"; +import { generateToken } from "#util/user.ts"; +import { constantDatabaseMaxElements } from "../global.ts"; +import type { DocumentVersionType } from "../utils/document.ts"; + +export type Document = { + id: string; + user_id: string | null; + version: DocumentVersionType; + name: string; + password: string | null; +}; +export type DocumentIndex = Pick; + +export type User = { + id: string; + token: string; +}; +export type UserIndex = Pick; + +abstract class Query> { + protected readonly database: Database; + private readonly table: string; + + protected constructor(database: Database, table: string) { + this.database = database; + this.table = table; + } + + protected deleteByColumn(column: K, values: Iterable): void { + let defaultValues: Iterable; + if (typeof values !== "object") { + defaultValues = [values]; + } else { + defaultValues = values; + } + + this.database.transaction(() => { + for (const batch of chunk(defaultValues, constantDatabaseMaxElements)) { + this.database + .prepare( + `DELETE + FROM ${this.table} + WHERE ${column} IN (${batch.map(() => "?").join(", ")})`, + false + ) + .run(...batch); + } + }); + } + + protected updateByColumn( + whereColumn: WK, + whereValue: Table[WK], + setColumn: SK, + setValue: Table[SK] + ): void { + this.database + .prepare(`UPDATE ${this.table} + SET ${setColumn} = :setValue + WHERE ${whereColumn} = :whereValue`) + .run({ + setValue: setValue, + whereValue: whereValue + }); + } + + protected selectByColumn(column: K, value: Table[K]): Table | undefined { + return this.database + .prepare(`SELECT * + FROM ${this.table} + WHERE ${column} = :value`) + .get({ + value: value + }) as Table | undefined; + } + + protected selectColumns(columns: K[]): Pick[] { + return this.database + .prepare(`SELECT ${columns.join(", ")} + FROM ${this.table}`) + .all() as Pick[]; + } +} + +export class DocumentQuery extends Query { + public constructor(database: Database) { + super(database, "document"); + } + + public create(params: Document): void { + this.database + .prepare( + `INSERT INTO document (id, user_id, version, name, password) + VALUES (:id, :user_id, :version, :name, :password)` + ) + .run({ + id: params.id, + user_id: params.user_id, + version: params.version, + name: params.name, + password: params.password + }); + } + + public delete = this.deleteByColumn; + public update = this.updateByColumn; + public get = this.selectByColumn; + public getAll = this.selectColumns; +} + +export class UserQuery extends Query { + public constructor(database: Database) { + super(database, "user"); + } + + public create(id: string = monotonicUlid()): string { + const token = generateToken(id); + const hash = generateHash(token); + + this.database + .prepare(`INSERT INTO user (id, token) + VALUES (:id, :token)`) + .run({ id: id, token: hash.combo }); + + return token; + } + + public delete = this.deleteByColumn; + public update = this.updateByColumn; + public get = this.selectByColumn; + + public getRoot(): User | undefined { + return this.database + .prepare(`SELECT * + FROM user + WHERE user.id + LIKE '0000000001%' + LIMIT 1`) + .get() as User | undefined; + } + + public getDocuments(id: string): Pick[] { + return this.database + .prepare(`SELECT document.id, document.name + FROM document WHERE document.user_id = :id`) + .all({ id: id }) as Pick[]; + } + + public getAll = this.selectColumns; + + public getAllWithoutDocuments(): Pick[] { + return this.database + .prepare(`SELECT user.id + FROM user + WHERE NOT EXISTS (SELECT 1 + FROM document + WHERE document.user_id = user.id + )`) + .all() as Pick[]; + } +} diff --git a/src/document/compression.ts b/src/document/compression.ts deleted file mode 100644 index 92a7e359..00000000 --- a/src/document/compression.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { type InputType, brotliCompressSync, brotliDecompressSync } from 'node:zlib'; - -export const compression = { - encode: (data: InputType): Buffer => { - return brotliCompressSync(data); - }, - - decode: (data: InputType): Buffer => { - return brotliDecompressSync(data); - } -} as const; diff --git a/src/document/crypto.ts b/src/document/crypto.ts deleted file mode 100644 index da680523..00000000 --- a/src/document/crypto.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { randomBytes } from 'node:crypto'; - -const hashAlgorithm = 'blake2b256'; -const saltLength = 16; - -export const crypto = { - hash: (password: string): Uint8Array => { - const salt = randomBytes(saltLength); - const hasher = new Bun.CryptoHasher(hashAlgorithm, salt).update(password); - - return Buffer.concat([salt, hasher.digest()]); - }, - - compare: (password: string, hash: Uint8Array): boolean => { - const salt = hash.subarray(0, saltLength); - const hasher = new Bun.CryptoHasher(hashAlgorithm, salt).update(password); - - const passwordHash = Buffer.concat([salt, hasher.digest()]); - - return hash.every((value, index) => value === passwordHash[index]); - } -} as const; diff --git a/src/document/storage.ts b/src/document/storage.ts deleted file mode 100644 index 4310dd55..00000000 --- a/src/document/storage.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { deserialize, serialize } from 'bun:jsc'; -import { validator } from '#document/validator.ts'; -import { errorHandler } from '#server/errorHandler.ts'; -import type { Document } from '#type/Document.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../config.ts'; - -export const storage = { - read: async (name: string): Promise => { - validator.validateName(name); - - const file = Bun.file(config.storagePath + name); - - if (!(await file.exists())) { - errorHandler.send(ErrorCode.documentNotFound); - } - - return deserialize(await file.arrayBuffer()); - }, - - write: async (name: string, document: Document): Promise => { - await Bun.write(config.storagePath + name, serialize(document)); - } -} as const; diff --git a/src/document/validator.ts b/src/document/validator.ts deleted file mode 100644 index 188d6138..00000000 --- a/src/document/validator.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { crypto } from '#document/crypto.ts'; -import { errorHandler } from '#server/errorHandler.ts'; -import type { Document } from '#type/Document.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { ValidatorUtils } from '#util/ValidatorUtils.ts'; -import { config } from '../config.ts'; - -export const validator = { - validateName: (name: string): void => { - if ( - !ValidatorUtils.isValidBase64URL(name) || - !ValidatorUtils.isLengthWithinRange( - Bun.stringWidth(name), - config.documentNameLengthMin, - config.documentNameLengthMax - ) - ) { - errorHandler.send(ErrorCode.documentInvalidName); - } - }, - - validateNameLength: (length: number | undefined): void => { - if ( - length && - !ValidatorUtils.isLengthWithinRange(length, config.documentNameLengthMin, config.documentNameLengthMax) - ) { - errorHandler.send(ErrorCode.documentInvalidNameLength); - } - }, - - validatePassword: (password: string, dataHash: Document['header']['passwordHash']): void => { - if (dataHash && !crypto.compare(password, dataHash)) { - errorHandler.send(ErrorCode.documentInvalidPassword); - } - }, - - validatePasswordLength: (password: string | undefined): void => { - if ( - password && - (ValidatorUtils.isEmptyString(password) || - !ValidatorUtils.isLengthWithinRange(Bun.stringWidth(password), 1, 255)) - ) { - errorHandler.send(ErrorCode.documentInvalidPasswordLength); - } - }, - - validateSecret: (secret: string, secretHash: Document['header']['secretHash']): void => { - if (!crypto.compare(secret, secretHash)) { - errorHandler.send(ErrorCode.documentInvalidSecret); - } - }, - - validateSecretLength: (secret: string): void => { - if (!ValidatorUtils.isLengthWithinRange(Bun.stringWidth(secret), 1, 255)) { - errorHandler.send(ErrorCode.documentInvalidSecretLength); - } - } -} as const; diff --git a/src/endpoints/document/v1/delete.ts b/src/endpoints/document/v1/delete.ts new file mode 100644 index 00000000..fb437636 --- /dev/null +++ b/src/endpoints/document/v1/delete.ts @@ -0,0 +1,59 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { authMiddleware } from "#http/middleware/authorization.ts"; +import { isOwner } from "#util/document.ts"; +import { errorCodeDocumentNotFound, errorCodeUserInvalidToken, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { fsDelete } from "#util/fs.ts"; +import { validatorDocumentName } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +export default new Hono().delete( + "/:name", + describeRoute({ + tags: ["DOCUMENT (v1)"], + summary: "Delete document", + description: "Deletes a published document in the instance", + security: [{}, { bearer: [] }], + responses: { + 200: { + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] }, + + // auth middleware + 401: { ...genericErrorResponse, description: constantHttpStatusCodes[401] } + } + }), + validator("param", schemaParam, validatorHandler), + authMiddleware, + (ctx) => { + const { + name + // @ts-expect-error upstream + } = ctx.req.valid("param") as typeof schemaParam.infer; + + const document = mutable.database.document.get("name", name); + if (!document?.id) { + return errorThrow(errorCodeDocumentNotFound); + } + + const userId = ctx.get("userId"); + const owner = isOwner(userId, document.user_id); + if (!owner) { + return errorThrow(errorCodeUserInvalidToken); + } + + mutable.database.document.delete("name", name); + void fsDelete(document); + + return ctx.body(null); + } +); diff --git a/src/endpoints/document/v1/get.ts b/src/endpoints/document/v1/get.ts new file mode 100644 index 00000000..127da649 --- /dev/null +++ b/src/endpoints/document/v1/get.ts @@ -0,0 +1,120 @@ +import { stream } from "@hono/hono/streaming"; +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { decodeTime } from "@std/ulid"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { verifyHash } from "#util/crypto.ts"; +import { + errorCodeDocumentInvalidPassword, + errorCodeDocumentNotFound, + errorCodeDocumentPasswordNeeded, + errorThrow, + genericErrorResponse +} from "#util/error.ts"; +import { fsRead } from "#util/fs.ts"; +import { + validatorDocumentDownload, + validatorDocumentName, + validatorDocumentPassword +} from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; +import { validatorCreationTimestamp } from "#util/validator/shared.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaQuery = type({ + "dl?": validatorDocumentDownload +}); + +const schemaHeader = type({ + "x-jspaste-password?": validatorDocumentPassword +}); + +const schemaBodyResponse = await resolver(type.unknown).toOpenAPISchema(); + +const schemaHeaderResponse = await resolver( + type({ + "x-jspaste-created": validatorCreationTimestamp + }) +).toOpenAPISchema(); + +export default new Hono().get( + "/:name", + describeRoute({ + tags: ["DOCUMENT (v1)"], + summary: "Get document", + description: `Get the content/metadata of a published document in the instance + +Note: If you only need to query the document metadata, you should use HEAD method instead`, + responses: { + 200: { + content: { + "text/plain": { + schema: schemaBodyResponse.schema + }, + "application/octet-stream": { + schema: schemaBodyResponse.schema + } + }, + headers: schemaHeaderResponse.components, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + validator("header", schemaHeader, validatorHandler), + validator("query", schemaQuery, validatorHandler), + async (ctx) => { + const { + name + // @ts-expect-error upstream + } = ctx.req.valid("param") as typeof schemaParam.infer; + const { + "x-jspaste-password": password + // @ts-expect-error upstream + } = ctx.req.valid("header") as typeof schemaHeader.infer; + const { + dl + // @ts-expect-error upstream + } = ctx.req.valid("query") as typeof schemaQuery.infer; + + const document = mutable.database.document.get("name", name); + if (!document?.id) { + return errorThrow(errorCodeDocumentNotFound); + } + if (document.password) { + if (!password) { + return errorThrow(errorCodeDocumentPasswordNeeded); + } + + if (!verifyHash(password, document.password)) { + return errorThrow(errorCodeDocumentInvalidPassword); + } + } + + ctx.res.headers.set( + "x-jspaste-created", + Temporal.Instant.fromEpochMilliseconds(decodeTime(document.id)).toString() + ); + + // https://github.com/honojs/hono/issues/1130 + if (ctx.req.method === "HEAD") { + return ctx.body(null); + } + + if (typeof dl !== "undefined") { + ctx.res.headers.set("content-disposition", `attachment; filename="jspaste_${name}"`); + } + + ctx.res.headers.set("content-type", "text/plain"); + ctx.res.headers.set("transfer-encoding", "chunked"); + + return stream(ctx, async (stream) => await stream.pipe(await fsRead(ctx, document))); + } +); diff --git a/src/endpoints/document/v1/index.ts b/src/endpoints/document/v1/index.ts new file mode 100644 index 00000000..0bb8d9a3 --- /dev/null +++ b/src/endpoints/document/v1/index.ts @@ -0,0 +1,15 @@ +import { Hono } from "@hono/hono/tiny"; +import type { Env } from "#http/handler.ts"; +import delete_ from "./delete.ts"; +import get from "./get.ts"; +import list from "./list.ts"; +import patch from "./patch.ts"; +import post from "./post.ts"; + +export const v1DocumentHandler = new Hono(); + +v1DocumentHandler.route("/", delete_); +v1DocumentHandler.route("/", get); +v1DocumentHandler.route("/", list); +v1DocumentHandler.route("/", patch); +v1DocumentHandler.route("/", post); diff --git a/src/endpoints/document/v1/list.ts b/src/endpoints/document/v1/list.ts new file mode 100644 index 00000000..b5576001 --- /dev/null +++ b/src/endpoints/document/v1/list.ts @@ -0,0 +1,56 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver } from "@hono/openapi"; +import { decodeTime } from "@std/ulid"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { authMiddleware } from "#http/middleware/authorization.ts"; +import { errorCodeUserInvalidToken, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { validatorDocumentListObject } from "#util/validator/document.ts"; + +const schemaBodyResponse = await resolver(validatorDocumentListObject.array()).toOpenAPISchema(); + +export default new Hono().get( + "/", + describeRoute({ + tags: ["DOCUMENT (v1)"], + summary: "List documents", + description: "List all user documents in the instance", + security: [{ bearer: [] }], + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] }, + + // auth middleware + 401: { ...genericErrorResponse, description: constantHttpStatusCodes[401] } + } + }), + authMiddleware, + async (ctx) => { + const userId = ctx.get("userId"); + if (!userId) { + return errorThrow(errorCodeUserInvalidToken); + } + + // https://github.com/honojs/hono/issues/1130 + if (ctx.req.method === "HEAD") { + return ctx.body(null); + } + + const documents = mutable.database.user.getDocuments(userId).map((document) => { + return { + name: document.name, + created: Temporal.Instant.fromEpochMilliseconds(decodeTime(document.id)).toString() + }; + }); + + return ctx.json(documents); + } +); diff --git a/src/endpoints/document/v1/patch.ts b/src/endpoints/document/v1/patch.ts new file mode 100644 index 00000000..240fd7af --- /dev/null +++ b/src/endpoints/document/v1/patch.ts @@ -0,0 +1,134 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { authMiddleware } from "#http/middleware/authorization.ts"; +import { bodyStream } from "#http/middleware/bodyStream.ts"; +import { generateHash } from "#util/crypto.ts"; +import { isOwner } from "#util/document.ts"; +import { env } from "#util/env.ts"; +import { + errorCodeDocumentNameAlreadyExists, + errorCodeDocumentNotFound, + errorCodeUserInvalidToken, + errorThrow, + genericErrorResponse +} from "#util/error.ts"; +import { fsWrite } from "#util/fs.ts"; +import { + validatorDocumentName, + validatorDocumentPassword, + validatorDocumentPasswordEmpty +} from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaBody = await resolver( + type.unknown.configure({ + description: "Document content.", + examples: ["Hello, World!"] + }) +).toOpenAPISchema(); + +const schemaParam = type({ + actualName: validatorDocumentName +}); + +const schemaHeader = type({ + "x-jspaste-name?": validatorDocumentName, + "x-jspaste-password?": validatorDocumentPassword.or(validatorDocumentPasswordEmpty) +}); + +export default new Hono().patch( + "/:actualName", + describeRoute({ + tags: ["DOCUMENT (v1)"], + summary: "Alter document", + description: `Edit the content/metadata of a published document in the instance + +Note: You can't move the ownership of a document, duplicate the document instead + +Note: To remove (nullify) a value, send the header with an empty value`, + security: [{}, { bearer: [] }], + requestBody: { + content: { + "text/plain": { + schema: schemaBody.schema + }, + "application/octet-stream": { + schema: schemaBody.schema + } + } + }, + responses: { + 200: { + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] }, + + // auth middleware + 401: { ...genericErrorResponse, description: constantHttpStatusCodes[401] }, + + // document name already exists + 409: { ...genericErrorResponse, description: constantHttpStatusCodes[409] }, + + // bodyLimit middleware + 413: { ...genericErrorResponse, description: constantHttpStatusCodes[413] } + } + }), + validator("param", schemaParam, validatorHandler), + validator("header", schemaHeader, validatorHandler), + authMiddleware, + bodyStream, + async (ctx) => { + let { + actualName + // @ts-expect-error upstream + } = ctx.req.valid("param") as typeof schemaParam.infer; + const { + "x-jspaste-password": newPassword, + "x-jspaste-name": newName + // @ts-expect-error upstream + } = ctx.req.valid("header") as typeof schemaHeader.infer; + + const document = mutable.database.document.get("name", actualName); + if (!document?.id) { + return errorThrow(errorCodeDocumentNotFound); + } + + const userId = ctx.get("userId"); + const owner = isOwner(userId, document.user_id); + if (!owner) { + return errorThrow(errorCodeUserInvalidToken); + } + + if (newPassword !== undefined) { + if (newPassword === "") { + mutable.database.document.update("name", actualName, "password", null); + } else { + const hash = generateHash(newPassword); + + mutable.database.document.update("name", actualName, "password", hash.combo); + } + } + + // keep newName last thing to alter in case of race conditions + if (newName) { + if (mutable.database.document.get("name", newName)?.name) { + return errorThrow(errorCodeDocumentNameAlreadyExists); + } + + mutable.database.document.update("name", actualName, "name", newName); + + actualName = newName; + } + + if (ctx.get("hasBody")) { + mutable.database.document.update("name", actualName, "version", env.JSPB_DOCUMENT_COMPRESSION); + await fsWrite(ctx, document); + } + + return ctx.body(null); + } +); diff --git a/src/endpoints/document/v1/post.ts b/src/endpoints/document/v1/post.ts new file mode 100644 index 00000000..e4c2f044 --- /dev/null +++ b/src/endpoints/document/v1/post.ts @@ -0,0 +1,124 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { monotonicUlid } from "@std/ulid"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { authMiddleware } from "#http/middleware/authorization.ts"; +import { bodyStream } from "#http/middleware/bodyStream.ts"; +import { generateHash } from "#util/crypto.ts"; +import { generateName } from "#util/document.ts"; +import { env } from "#util/env.ts"; +import { errorCodeDocumentNameAlreadyExists, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { fsWrite } from "#util/fs.ts"; +import { + validatorDocumentName, + validatorDocumentNameLength, + validatorDocumentPassword +} from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaBody = await resolver( + type.unknown.configure({ + description: "Document content.", + examples: ["Hello, World!"] + }) +).toOpenAPISchema(); + +const schemaHeader = type({ + "x-jspaste-name-length?": validatorDocumentNameLength, + "x-jspaste-name?": validatorDocumentName, + "x-jspaste-password?": validatorDocumentPassword +}); + +// Object includes not allowed fields +const schemaBodyResponse = await resolver( + type({ + name: validatorDocumentName + }) +).toOpenAPISchema(); + +export default new Hono().post( + "/", + describeRoute({ + tags: ["DOCUMENT (v1)"], + summary: "Post document", + description: "Publish a document to the instance", + security: [{}, { bearer: [] }], + requestBody: { + content: { + "text/plain": { + schema: schemaBody.schema + }, + "application/octet-stream": { + schema: schemaBody.schema + } + } + }, + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] }, + + // auth middleware + 401: { ...genericErrorResponse, description: constantHttpStatusCodes[401] }, + + // document name already exists + 409: { ...genericErrorResponse, description: constantHttpStatusCodes[409] }, + + // bodyLimit middleware + 413: { ...genericErrorResponse, description: constantHttpStatusCodes[413] } + } + }), + validator("header", schemaHeader, validatorHandler), + authMiddleware, + bodyStream, + async (ctx) => { + const { + "x-jspaste-password": password, + "x-jspaste-name": name, + "x-jspaste-name-length": nameLength + // @ts-expect-error upstream + } = ctx.req.valid("header") as typeof schemaHeader.infer; + + let setName: string; + if (name) { + if (mutable.database.document.get("name", name)?.name) { + return errorThrow(errorCodeDocumentNameAlreadyExists); + } + + setName = name; + } else { + setName = generateName(nameLength); + } + + const setId = monotonicUlid(); + + let hashCombo: string | null; + if (password) { + hashCombo = generateHash(password).combo; + } else { + hashCombo = null; + } + + mutable.database.document.create({ + id: setId, + user_id: ctx.get("userId") ?? null, + version: env.JSPB_DOCUMENT_COMPRESSION, + name: setName, + password: hashCombo + }); + await fsWrite(ctx, { id: setId }); + + return ctx.json({ + name: setName + }); + } +); diff --git a/src/endpoints/legacy/v2/documents/access.route.ts b/src/endpoints/legacy/v2/documents/access.route.ts new file mode 100644 index 00000000..17aba3c9 --- /dev/null +++ b/src/endpoints/legacy/v2/documents/access.route.ts @@ -0,0 +1,103 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { toText } from "@std/streams"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { verifyHash } from "#util/crypto.ts"; +import { + errorCodeDocumentInvalidPassword, + errorCodeDocumentNotFound, + errorCodeDocumentPasswordNeeded, + errorThrow, + genericErrorResponse +} from "#util/error.ts"; +import { fsRead } from "#util/fs.ts"; +import { validatorDocumentName, validatorDocumentPassword } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaHeader = type({ + "password?": validatorDocumentPassword +}); + +const schemaBodyResponse = await resolver( + type({ + key: type.string.configure({ + description: "The document name (formerly key)", + examples: ["abc123"] + }), + data: type.string.configure({ + description: "The document data", + examples: ["Hello, World!"] + }), + url: type.string.configure({ + deprecated: true, + description: "The document URL", + examples: ["https://jspaste.eu/abc123"] + }), + expirationTimestamp: type.number.configure({ + deprecated: true, + description: "The document expiration timestamp (always will be 0)", + examples: [0] + }) + }) +).toOpenAPISchema(); + +export default new Hono().get( + "/:name", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Get document", + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + validator("header", schemaHeader, validatorHandler), + async (ctx) => { + // https://github.com/honojs/hono/issues/1130 + if (ctx.req.method === "HEAD") { + return ctx.body(null); + } + + // @ts-expect-error upstream + const param = ctx.req.valid("param") as typeof schemaParam.infer; + // @ts-expect-error upstream + const header = ctx.req.valid("header") as typeof schemaHeader.infer; + + const document = mutable.database.document.get("name", param.name); + if (!document?.id) { + return errorThrow(errorCodeDocumentNotFound); + } + if (document.password) { + if (!header.password) { + return errorThrow(errorCodeDocumentPasswordNeeded); + } + + if (!verifyHash(header.password, document.password)) { + return errorThrow(errorCodeDocumentInvalidPassword); + } + } + + return ctx.json({ + key: param.name, + data: await toText(await fsRead(ctx, document, true)), + url: new URL(ctx.req.url).host.concat("/", param.name), + expirationTimestamp: 0 + }); + } +); diff --git a/src/endpoints/legacy/v2/documents/accessRaw.route.ts b/src/endpoints/legacy/v2/documents/accessRaw.route.ts new file mode 100644 index 00000000..a01fdd21 --- /dev/null +++ b/src/endpoints/legacy/v2/documents/accessRaw.route.ts @@ -0,0 +1,93 @@ +import { stream } from "@hono/hono/streaming"; +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { verifyHash } from "#util/crypto.ts"; +import { + errorCodeDocumentInvalidPassword, + errorCodeDocumentNotFound, + errorCodeDocumentPasswordNeeded, + errorThrow, + genericErrorResponse +} from "#util/error.ts"; +import { fsRead } from "#util/fs.ts"; +import { validatorDocumentName, validatorDocumentPassword } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaHeader = type({ + "password?": validatorDocumentPassword +}); + +const schemaQuery = type({ + "p?": validatorDocumentPassword +}); + +const schemaBodyResponse = await resolver(type.unknown).toOpenAPISchema(); + +export default new Hono().get( + "/:name/raw", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Get document data", + responses: { + 200: { + content: { + "text/plain": { + schema: schemaBodyResponse.schema + }, + "application/octet-stream": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + validator("header", schemaHeader, validatorHandler), + validator("query", schemaQuery, validatorHandler), + async (ctx) => { + // https://github.com/honojs/hono/issues/1130 + if (ctx.req.method === "HEAD") { + return ctx.body(null); + } + + // @ts-expect-error upstream + const param = ctx.req.valid("param") as typeof schemaParam.infer; + // @ts-expect-error upstream + const header = ctx.req.valid("header") as typeof schemaHeader.infer; + // @ts-expect-error upstream + const query = ctx.req.valid("query") as typeof schemaQuery.infer; + const options = { + password: header.password || query.p + }; + + const document = mutable.database.document.get("name", param.name); + if (!document?.id) { + return errorThrow(errorCodeDocumentNotFound); + } + if (document.password) { + if (!options.password) { + return errorThrow(errorCodeDocumentPasswordNeeded); + } + + if (!verifyHash(options.password, document.password)) { + return errorThrow(errorCodeDocumentInvalidPassword); + } + } + + ctx.res.headers.set("content-type", "text/plain"); + ctx.res.headers.set("transfer-encoding", "chunked"); + + return stream(ctx, async (stream) => await stream.pipe(await fsRead(ctx, document, true))); + } +); diff --git a/src/endpoints/legacy/v2/documents/edit.route.ts b/src/endpoints/legacy/v2/documents/edit.route.ts new file mode 100644 index 00000000..256567fa --- /dev/null +++ b/src/endpoints/legacy/v2/documents/edit.route.ts @@ -0,0 +1,77 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { bodyStream } from "#http/middleware/bodyStream.ts"; +import { env } from "#util/env.ts"; +import { errorCodeDocumentNotFound, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { fsWrite } from "#util/fs.ts"; +import { validatorDocumentName } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaBody = await resolver( + type.string.configure({ + description: "Data to replace in the document", + examples: ["Hello world!"] + }) +).toOpenAPISchema(); + +const schemaBodyResponse = await resolver( + type({ + edited: type.boolean.configure({ + description: "Confirmation of edition", + examples: [true] + }) + }) +).toOpenAPISchema(); + +export default new Hono().patch( + "/:name", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Edit document", + requestBody: { + content: { + "text/plain": { + schema: schemaBody.schema + } + } + }, + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + bodyStream, + async (ctx) => { + // @ts-expect-error upstream + const param = ctx.req.valid("param") as typeof schemaParam.infer; + + const document = mutable.database.document.get("name", param.name); + if (!document?.id || document.user_id) { + return errorThrow(errorCodeDocumentNotFound); + } + + mutable.database.document.update("name", param.name, "version", env.JSPB_DOCUMENT_COMPRESSION); + await fsWrite(ctx, document); + + return ctx.json({ + edited: true + }); + } +); diff --git a/src/endpoints/legacy/v2/documents/exists.route.ts b/src/endpoints/legacy/v2/documents/exists.route.ts new file mode 100644 index 00000000..72c10a1d --- /dev/null +++ b/src/endpoints/legacy/v2/documents/exists.route.ts @@ -0,0 +1,47 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { genericErrorResponse } from "#util/error.ts"; +import { validatorDocumentName } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaBodyResponse = await resolver(type.boolean).toOpenAPISchema(); + +export default new Hono().get( + "/:name/exists", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Check document", + responses: { + 200: { + content: { + "text/plain": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + (ctx) => { + // https://github.com/honojs/hono/issues/1130 + if (ctx.req.method === "HEAD") { + return ctx.body(null); + } + + // @ts-expect-error upstream + const param = ctx.req.valid("param") as typeof schemaParam.infer; + + return ctx.text(mutable.database.document.get("name", param.name)?.name ? "true" : "false"); + } +); diff --git a/src/endpoints/legacy/v2/documents/index.ts b/src/endpoints/legacy/v2/documents/index.ts new file mode 100644 index 00000000..5ba784b7 --- /dev/null +++ b/src/endpoints/legacy/v2/documents/index.ts @@ -0,0 +1,17 @@ +import { Hono } from "@hono/hono/tiny"; +import type { Env } from "#http/handler.ts"; +import access from "./access.route.ts"; +import accessRaw from "./accessRaw.route.ts"; +import edit from "./edit.route.ts"; +import exists from "./exists.route.ts"; +import publish from "./publish.route.ts"; +import remove from "./remove.route.ts"; + +export const v2LegacyDocumentHandler = new Hono(); + +v2LegacyDocumentHandler.route("/", access); +v2LegacyDocumentHandler.route("/", accessRaw); +v2LegacyDocumentHandler.route("/", edit); +v2LegacyDocumentHandler.route("/", exists); +v2LegacyDocumentHandler.route("/", publish); +v2LegacyDocumentHandler.route("/", remove); diff --git a/src/endpoints/legacy/v2/documents/publish.route.ts b/src/endpoints/legacy/v2/documents/publish.route.ts new file mode 100644 index 00000000..8c83cfa3 --- /dev/null +++ b/src/endpoints/legacy/v2/documents/publish.route.ts @@ -0,0 +1,117 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { monotonicUlid } from "@std/ulid"; +import { type } from "arktype"; +import { + constantDocumentNameLengthMax, + constantDocumentNameLengthMin, + constantHttpStatusCodes, + mutable +} from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { bodyStream } from "#http/middleware/bodyStream.ts"; +import { generateHash } from "#util/crypto.ts"; +import { generateName } from "#util/document.ts"; +import { env } from "#util/env.ts"; +import { errorCodeDocumentNameAlreadyExists, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { fsWrite } from "#util/fs.ts"; +import { validatorDocumentName, validatorDocumentPassword } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaBody = await resolver( + type.string.configure({ + description: "Data to replace in the document", + examples: ["Hello world!"] + }) +).toOpenAPISchema(); + +const schemaHeader = type({ + "password?": validatorDocumentPassword, + "key?": validatorDocumentName, + "keylength?": type.number.atLeast(constantDocumentNameLengthMin).atMost(constantDocumentNameLengthMax).configure({ + description: "The document name length" + }) +}); + +const schemaBodyResponse = await resolver( + type({ + key: type.string.configure({ + description: "The document name (formerly key)", + examples: ["abc123"] + }) + }) +).toOpenAPISchema(); + +export default new Hono().post( + "/", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Publish document", + requestBody: { + content: { + "text/plain": { + schema: schemaBody.schema + } + } + }, + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("header", schemaHeader, validatorHandler), + bodyStream, + async (ctx) => { + const { + password, + key: name, + keylength: nameLength + // @ts-expect-error upstream + } = ctx.req.valid("header") as typeof schemaHeader.infer; + + let setName: string; + if (name) { + if (mutable.database.document.get("name", name)?.name) { + return errorThrow(errorCodeDocumentNameAlreadyExists); + } + + setName = name; + } else { + setName = generateName(nameLength); + } + + const id = monotonicUlid(); + + let hashCombo: string | null; + if (password) { + hashCombo = generateHash(password).combo; + } else { + hashCombo = null; + } + + mutable.database.document.create({ + id: id, + user_id: null, + version: env.JSPB_DOCUMENT_COMPRESSION, + name: setName, + password: hashCombo + }); + await fsWrite(ctx, { id: id }); + + return ctx.json({ + key: setName, + secret: "", + url: new URL(ctx.req.url).host.concat("/", setName), + expirationTimestamp: 0 + }); + } +); diff --git a/src/endpoints/legacy/v2/documents/remove.route.ts b/src/endpoints/legacy/v2/documents/remove.route.ts new file mode 100644 index 00000000..6beb7196 --- /dev/null +++ b/src/endpoints/legacy/v2/documents/remove.route.ts @@ -0,0 +1,58 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver, validator } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { errorCodeDocumentNotFound, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { fsDelete } from "#util/fs.ts"; +import { validatorDocumentName } from "#util/validator/document.ts"; +import { validatorHandler } from "#util/validator/handler.ts"; + +const schemaParam = type({ + name: validatorDocumentName +}); + +const schemaBodyResponse = await resolver( + type({ + removed: type.true.configure({ + description: "Confirmation of deletion", + examples: [true] + }) + }) +).toOpenAPISchema(); + +export default new Hono().delete( + "/:name", + describeRoute({ + deprecated: true, + tags: ["DOCUMENT (legacy)"], + summary: "Remove document", + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse.schema + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] } + } + }), + validator("param", schemaParam, validatorHandler), + (ctx) => { + // @ts-expect-error upstream + const param = ctx.req.valid("param") as typeof schemaParam.infer; + + const document = mutable.database.document.get("name", param.name); + if (!document?.id || document.user_id) { + return errorThrow(errorCodeDocumentNotFound); + } + + mutable.database.document.delete("name", param.name); + void fsDelete(document); + + return ctx.json({ removed: true }); + } +); diff --git a/src/endpoints/user/v1/create.ts b/src/endpoints/user/v1/create.ts new file mode 100644 index 00000000..9b558f12 --- /dev/null +++ b/src/endpoints/user/v1/create.ts @@ -0,0 +1,50 @@ +import { Hono } from "@hono/hono/tiny"; +import { describeRoute, resolver } from "@hono/openapi"; +import { type } from "arktype"; +import { constantHttpStatusCodes, mutable } from "#/global.ts"; +import type { Env } from "#http/handler.ts"; +import { authMiddleware } from "#http/middleware/authorization.ts"; +import { env } from "#util/env.ts"; +import { errorCodeUserInvalidToken, errorThrow, genericErrorResponse } from "#util/error.ts"; +import { validatorUserToken } from "#util/validator/user.ts"; + +const schemaBodyResponse = resolver( + type({ + token: validatorUserToken + }) +); + +export default new Hono().post( + "/", + describeRoute({ + tags: ["USER (v1)"], + summary: "Create user", + description: "Create a user to the instance", + security: [{}, { bearer: [] }], + responses: { + 200: { + content: { + "application/json": { + schema: schemaBodyResponse + } + }, + description: constantHttpStatusCodes[200] + }, + 400: { ...genericErrorResponse, description: constantHttpStatusCodes[400] }, + 404: { ...genericErrorResponse, description: constantHttpStatusCodes[404] }, + + // auth middleware + 401: { ...genericErrorResponse, description: constantHttpStatusCodes[401] } + } + }), + authMiddleware, + (ctx) => { + if (!env.JSPB_USER_REGISTER && ctx.get("userId") !== mutable.database.user.getRoot()?.id) { + return errorThrow(errorCodeUserInvalidToken); + } + + return ctx.json({ + token: mutable.database.user.create() + }); + } +); diff --git a/src/endpoints/user/v1/index.ts b/src/endpoints/user/v1/index.ts new file mode 100644 index 00000000..a977e204 --- /dev/null +++ b/src/endpoints/user/v1/index.ts @@ -0,0 +1,7 @@ +import { Hono } from "@hono/hono/tiny"; +import type { Env } from "#http/handler.ts"; +import create from "./create.ts"; + +export const v1UserHandler = new Hono(); + +v1UserHandler.route("/", create); diff --git a/src/endpoints/v1/access.route.ts b/src/endpoints/v1/access.route.ts deleted file mode 100644 index ef91dc36..00000000 --- a/src/endpoints/v1/access.route.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { storage } from '#document/storage.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const accessRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'get', - path: '/{name}', - tags: ['v1'], - summary: 'Get document', - deprecated: true, - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - key: z.string().openapi({ - description: 'The document name (formerly key)', - example: 'abc123' - }), - data: z.string().openapi({ - description: 'The document data', - example: 'Hello, World!' - }) - }) - } - }, - description: 'The document object' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - - const document = await storage.read(params.name); - - // V1 Endpoint does not support document protected password - if (document.header.passwordHash) { - errorHandler.send(ErrorCode.documentPasswordNeeded); - } - - const buffer = compression.decode(document.data); - - return ctx.json({ - key: params.name, - data: buffer.toString('binary') - }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v1/accessRaw.route.ts b/src/endpoints/v1/accessRaw.route.ts deleted file mode 100644 index 0df2ac83..00000000 --- a/src/endpoints/v1/accessRaw.route.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { storage } from '#document/storage.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const accessRawRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'get', - path: '/{name}/raw', - tags: ['v1'], - summary: 'Get document data', - deprecated: true, - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }) - }, - responses: { - 200: { - content: { - 'text/plain': { - schema: z.any().openapi({ - description: 'The document data' - }), - example: 'Hello, World!' - } - }, - description: 'The document data' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - - const document = await storage.read(params.name); - - // V1 Endpoint does not support document protected password - if (document.header.passwordHash) { - errorHandler.send(ErrorCode.documentPasswordNeeded); - } - - // @ts-ignore: Return the buffer directly - return ctx.text(compression.decode(document.data)); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v1/index.ts b/src/endpoints/v1/index.ts deleted file mode 100644 index 6a5aa2a9..00000000 --- a/src/endpoints/v1/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { OpenAPIHono } from '@hono/zod-openapi'; -import { accessRoute } from './access.route.ts'; -import { accessRawRoute } from './accessRaw.route.ts'; -import { publishRoute } from './publish.route.ts'; -import { removeRoute } from './remove.route.ts'; - -export const v1 = (): typeof endpoint => { - const endpoint = new OpenAPIHono(); - - endpoint.get('/', (ctx) => { - return ctx.text('Welcome to JSPaste API v1'); - }); - - accessRoute(endpoint); - accessRawRoute(endpoint); - publishRoute(endpoint); - removeRoute(endpoint); - - return endpoint; -}; diff --git a/src/endpoints/v1/publish.route.ts b/src/endpoints/v1/publish.route.ts deleted file mode 100644 index a9fa3d67..00000000 --- a/src/endpoints/v1/publish.route.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { crypto } from '#document/crypto.ts'; -import { storage } from '#document/storage.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { middleware } from '#server/middleware.ts'; -import { DocumentVersion } from '#type/Document.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { StringUtils } from '#util/StringUtils.ts'; - -export const publishRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'post', - path: '/', - tags: ['v1'], - summary: 'Publish document', - deprecated: true, - middleware: [middleware.bodyLimit()], - request: { - body: { - content: { - 'text/plain': { - schema: z.string().openapi({ - description: 'Data to publish in the document', - example: 'Hello, World!' - }) - } - } - } - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - key: z.string().openapi({ - description: 'The document name (formerly key)', - example: 'abc123' - }), - secret: z.string().openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }) - }) - } - }, - description: 'An object with a "name" and "secret" parameters of the created document' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const body = await ctx.req.arrayBuffer(); - const name = await StringUtils.createName(); - const secret = StringUtils.createSecret(); - - await storage.write(name, { - data: compression.encode(body), - header: { - name: name, - secretHash: crypto.hash(secret), - passwordHash: null - }, - version: DocumentVersion.V1 - }); - - return ctx.json({ key: name, secret: secret }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v1/remove.route.ts b/src/endpoints/v1/remove.route.ts deleted file mode 100644 index 153ab2e7..00000000 --- a/src/endpoints/v1/remove.route.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { unlink } from 'node:fs/promises'; -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const removeRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'delete', - path: '/{name}', - tags: ['v1'], - summary: 'Remove document', - deprecated: true, - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }), - headers: z.object({ - secret: z.string().min(1).openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - removed: z.boolean().openapi({ - description: 'Confirmation of deletion', - example: true - }) - }) - } - }, - description: 'An object with a "removed" parameter of the deleted document' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - const headers = ctx.req.valid('header'); - - const document = await storage.read(params.name); - - validator.validateSecret(headers.secret, document.header.secretHash); - - const result = await unlink(config.storagePath + params.name) - .then(() => true) - .catch(() => false); - - return ctx.json({ removed: result }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/access.route.ts b/src/endpoints/v2/access.route.ts deleted file mode 100644 index 7b940768..00000000 --- a/src/endpoints/v2/access.route.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const accessRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'get', - path: '/{name}', - tags: ['v2'], - summary: 'Get document', - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }), - headers: z.object({ - password: z.string().optional().openapi({ - description: 'The password to access the document', - example: 'aabbccdd11223344' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - key: z.string().openapi({ - description: 'The document name (formerly key)', - example: 'abc123' - }), - data: z.string().openapi({ - description: 'The document data', - example: 'Hello, World!' - }), - url: z.string().openapi({ - description: 'The document URL', - example: 'https://jspaste.eu/abc123' - }), - expirationTimestamp: z.number().openapi({ - deprecated: true, - description: 'The document expiration timestamp (always will be 0)', - example: 0 - }) - }) - } - }, - description: 'The document object' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - const headers = ctx.req.valid('header'); - - const document = await storage.read(params.name); - - if (document.header.passwordHash) { - if (!headers.password) { - return errorHandler.send(ErrorCode.documentPasswordNeeded); - } - - validator.validatePassword(headers.password, document.header.passwordHash); - } - - const buffer = compression.decode(document.data); - - return ctx.json({ - key: params.name, - data: buffer.toString('binary'), - url: config.protocol.concat(new URL(ctx.req.url).host.concat('/', params.name)), - expirationTimestamp: 0 - }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/accessRaw.route.ts b/src/endpoints/v2/accessRaw.route.ts deleted file mode 100644 index 75171802..00000000 --- a/src/endpoints/v2/accessRaw.route.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const accessRawRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'get', - path: '/{name}/raw', - tags: ['v2'], - summary: 'Get document data', - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }), - headers: z.object({ - password: z.string().optional().openapi({ - description: 'The password to access the document', - example: 'aabbccdd11223344' - }) - }), - query: z.object({ - p: z.string().optional().openapi({ - description: - 'The password to decrypt the document. It is preferred to pass the password through headers, only use this method for support of web browsers.', - example: 'aabbccdd11223344' - }) - }) - }, - responses: { - 200: { - content: { - 'text/plain': { - schema: z.any().openapi({ - description: 'The document data' - }), - example: 'Hello, World!' - } - }, - description: 'The document data' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - const headers = ctx.req.valid('header'); - const query = ctx.req.valid('query'); - - const options = { - password: headers.password || query.p - }; - - const document = await storage.read(params.name); - - if (document.header.passwordHash) { - if (!options.password) { - return errorHandler.send(ErrorCode.documentPasswordNeeded); - } - - validator.validatePassword(options.password, document.header.passwordHash); - } - - // @ts-ignore: Return the buffer directly - return ctx.text(compression.decode(document.data)); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/edit.route.ts b/src/endpoints/v2/edit.route.ts deleted file mode 100644 index bdcb7e8e..00000000 --- a/src/endpoints/v2/edit.route.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { middleware } from '#server/middleware.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const editRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'patch', - path: '/{name}', - tags: ['v2'], - summary: 'Edit document', - middleware: [middleware.bodyLimit()], - request: { - body: { - content: { - 'text/plain': { - schema: z.string().openapi({ - description: 'Data to replace in the document', - example: 'Hello, World!' - }) - } - } - }, - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }), - headers: z.object({ - password: z.string().optional().openapi({ - deprecated: true, - description: 'The password to access the document (not used anymore)', - example: 'aabbccdd11223344' - }), - secret: z.string().openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - edited: z.boolean().openapi({ - description: 'Confirmation of edition', - example: true - }) - }) - } - }, - description: 'Confirmation of edition' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const body = await ctx.req.arrayBuffer(); - const params = ctx.req.valid('param'); - const headers = ctx.req.valid('header'); - - const document = await storage.read(params.name); - - validator.validateSecret(headers.secret, document.header.secretHash); - - document.data = compression.encode(body); - - const result = await storage - .write(params.name, document) - .then(() => true) - .catch(() => false); - - return ctx.json({ - edited: result - }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/exists.route.ts b/src/endpoints/v2/exists.route.ts deleted file mode 100644 index 7d853afd..00000000 --- a/src/endpoints/v2/exists.route.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const existsRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'get', - path: '/{name}/exists', - tags: ['v2'], - summary: 'Check document', - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }) - }, - responses: { - 200: { - content: { - 'text/plain': { - schema: z.string().openapi({ - description: 'The document existence result' - }), - examples: { - true: { - summary: 'Document exists', - value: 'true' - }, - false: { - summary: 'Document does not exist', - value: 'false' - } - } - } - }, - description: 'The document existence result' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - - validator.validateName(params.name); - - return ctx.text(String(await Bun.file(config.storagePath + params.name).exists())); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/index.ts b/src/endpoints/v2/index.ts deleted file mode 100644 index 4354ef4d..00000000 --- a/src/endpoints/v2/index.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { OpenAPIHono } from '@hono/zod-openapi'; -import { accessRoute } from './access.route.ts'; -import { accessRawRoute } from './accessRaw.route.ts'; -import { editRoute } from './edit.route.ts'; -import { existsRoute } from './exists.route.ts'; -import { publishRoute } from './publish.route.ts'; -import { removeRoute } from './remove.route.ts'; - -export const v2 = (): typeof endpoint => { - const endpoint = new OpenAPIHono(); - - endpoint.get('/', (ctx) => { - return ctx.text('Welcome to JSPaste API v2'); - }); - - accessRoute(endpoint); - accessRawRoute(endpoint); - editRoute(endpoint); - existsRoute(endpoint); - publishRoute(endpoint); - removeRoute(endpoint); - - return endpoint; -}; diff --git a/src/endpoints/v2/publish.route.ts b/src/endpoints/v2/publish.route.ts deleted file mode 100644 index f894f48e..00000000 --- a/src/endpoints/v2/publish.route.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { compression } from '#document/compression.ts'; -import { crypto } from '#document/crypto.ts'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { middleware } from '#server/middleware.ts'; -import { DocumentVersion } from '#type/Document.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { StringUtils } from '#util/StringUtils.ts'; -import { config } from '../../config.ts'; - -export const publishRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'post', - path: '/', - tags: ['v2'], - summary: 'Publish document', - middleware: [middleware.bodyLimit()], - request: { - body: { - content: { - 'text/plain': { - schema: z.string().openapi({ - description: 'Data to publish in the document', - example: 'Hello, World!' - }) - } - } - }, - headers: z.object({ - password: z.string().optional().openapi({ - description: 'The password to restrict the document', - example: 'aabbccdd11223344' - }), - key: z.string().optional().openapi({ - description: 'The document name (formerly key)', - example: 'abc123' - }), - keylength: z.string().optional().openapi({ - description: 'The document name length (formerly key length)', - example: config.documentNameLengthDefault.toString() - }), - secret: z.string().optional().openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - key: z.string().openapi({ - description: 'The document name (formerly key)', - example: 'abc123' - }), - secret: z.string().openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }), - url: z.string().openapi({ - description: 'The document URL', - example: 'https://jspaste.eu/abc123' - }), - expirationTimestamp: z.number().openapi({ - deprecated: true, - description: 'The document expiration timestamp (always will be 0)', - example: 0 - }) - }) - } - }, - description: 'An object with a "key", "secret" and "url" parameters of the created document' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const body = await ctx.req.arrayBuffer(); - const headers = ctx.req.valid('header'); - - if (headers.password) { - validator.validatePasswordLength(headers.password); - } - - let secret: string; - - if (headers.secret) { - validator.validateSecretLength(headers.secret); - - secret = headers.secret; - } else { - secret = StringUtils.createSecret(); - } - - let name: string; - - if (headers.key) { - validator.validateName(headers.key); - - if (await StringUtils.nameExists(headers.key)) { - errorHandler.send(ErrorCode.documentNameAlreadyExists); - } - - name = headers.key; - } else { - const nameLength = Number(headers.keylength || config.documentNameLengthDefault); - - name = await StringUtils.createName(nameLength); - } - - const data = compression.encode(body); - - await storage.write(name, { - data: data, - header: { - name: name, - secretHash: crypto.hash(secret), - passwordHash: headers.password ? crypto.hash(headers.password) : null - }, - version: DocumentVersion.V1 - }); - - return ctx.json({ - key: name, - secret: secret, - url: config.protocol.concat(new URL(ctx.req.url).host.concat('/', name)), - expirationTimestamp: 0 - }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/endpoints/v2/remove.route.ts b/src/endpoints/v2/remove.route.ts deleted file mode 100644 index 49587291..00000000 --- a/src/endpoints/v2/remove.route.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { unlink } from 'node:fs/promises'; -import { type OpenAPIHono, createRoute, z } from '@hono/zod-openapi'; -import { storage } from '#document/storage.ts'; -import { validator } from '#document/validator.ts'; -import { errorHandler, schema } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { config } from '../../config.ts'; - -export const removeRoute = (endpoint: OpenAPIHono): void => { - const route = createRoute({ - method: 'delete', - path: '/{name}', - tags: ['v2'], - summary: 'Remove document', - request: { - params: z.object({ - name: z.string().min(config.documentNameLengthMin).max(config.documentNameLengthMax).openapi({ - description: 'The document name', - example: 'abc123' - }) - }), - headers: z.object({ - secret: z.string().min(1).openapi({ - description: 'The document secret', - example: 'aaaaa-bbbbb-ccccc-ddddd' - }) - }) - }, - responses: { - 200: { - content: { - 'application/json': { - schema: z.object({ - removed: z.boolean().openapi({ - description: 'Confirmation of deletion', - example: true - }) - }) - } - }, - description: 'An object with a "removed" parameter of the deleted document' - }, - 400: schema, - 404: schema, - 500: schema - } - }); - - endpoint.openapi( - route, - async (ctx) => { - const params = ctx.req.valid('param'); - const headers = ctx.req.valid('header'); - - const document = await storage.read(params.name); - - validator.validateSecret(headers.secret, document.header.secretHash); - - const result = await unlink(config.storagePath + params.name) - .then(() => true) - .catch(() => false); - - return ctx.json({ removed: result }); - }, - (result) => { - if (!result.success) { - return errorHandler.send(ErrorCode.validation); - } - } - ); -}; diff --git a/src/global.ts b/src/global.ts new file mode 100644 index 00000000..9f0e1e49 --- /dev/null +++ b/src/global.ts @@ -0,0 +1,31 @@ +import { STATUS_CODES } from "node:http"; +import type { StatementSync } from "node:sqlite"; +import type { StatusCode } from "@hono/hono/utils/http-status"; +import { LruCache } from "@std/cache"; +import { customAlphabet } from "nanoid"; +import type { Database } from "#db/index.ts"; + +export const mutable = { + database: undefined as unknown as Database, + http: undefined as Deno.HttpServer | undefined +}; + +export const constantDatabaseMaxElements = 10_000; +export const constantDocumentNameLengthDefault = 8; +export const constantDocumentNameLengthMax = 32; +export const constantDocumentNameLengthMin = 2; +export const constantDocumentPasswordLengthMax = 128; +export const constantDocumentPasswordLengthMin = 2; +export const constantUserTokenLength = 59; +export const constantNanoid = customAlphabet("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_"); +export const constantPathStructStorage = "./storage/"; +export const constantPathStructStorageData = "./storage/data/"; +export const constantPathDatabaseFile = "./storage/database.db"; +export const constantStoreStatements = new LruCache(200); +export const constantStoreDispose = new Map Promise]>(); +export const constantTemporalUTC = () => Temporal.Now.zonedDateTimeISO("Etc/UTC"); +export const constantTemporalToUTC = (temporal: Temporal.Instant) => temporal.toZonedDateTimeISO("Etc/UTC"); +export const constantTemporalInstant = Temporal.Now.instant; +export const constantHttpStatusCodes = STATUS_CODES as Record; +export const constantTextEncoder = new TextEncoder(); +export const constantTextDecoder = new TextDecoder(); diff --git a/src/http/handler.ts b/src/http/handler.ts new file mode 100644 index 00000000..4fea4eeb --- /dev/null +++ b/src/http/handler.ts @@ -0,0 +1,131 @@ +import { cors } from "@hono/hono/cors"; +import { HTTPException } from "@hono/hono/http-exception"; +import { Hono } from "@hono/hono/tiny"; +import { openAPIRouteHandler } from "@hono/openapi"; +import { v1DocumentHandler } from "#endpoint/document/v1/index.ts"; +import { v2LegacyDocumentHandler } from "#endpoint/legacy/v2/documents/index.ts"; +import { v1UserHandler } from "#endpoint/user/v1/index.ts"; +import { Logger } from "#util/console.ts"; +import { env } from "../utils/env.ts"; +import { errorCodeCrash, errorCodeDocumentCorrupted, errorGet } from "../utils/error.ts"; + +const log: Logger = new Logger("http"); + +export type Env = { + Variables: { + userId: string | undefined; + hasBody: boolean | undefined; + }; +}; + +export const handler = (): Hono => { + const handler = new Hono().basePath("/api"); + + handler.notFound((ctx) => { + return ctx.body(null, 404); + }); + + handler.onError((instance, ctx) => { + if (instance instanceof HTTPException) { + return instance.getResponse(); + } + + // some of them may be triggered by a race condition + if ( + // IO + instance instanceof Deno.errors.NotFound || + instance instanceof Deno.errors.AlreadyExists || + instance instanceof Deno.errors.BadResource || + // corrupted stream (probably) + instance instanceof Deno.errors.Http + ) { + log.debug(instance); + + return ctx.json(errorGet(errorCodeDocumentCorrupted)); + } + + log.error(instance); + + return ctx.json(errorGet(errorCodeCrash)); + }); + + handler.use("*", cors()); + handler.use(async (ctx, next) => { + await next(); + + // disable compression + // https://docs.deno.com/runtime/fundamentals/http_server/#automatic-body-compression + ctx.res.headers.append("Cache-Control", "no-transform"); + }); + + handler.get( + "/oas.json", + openAPIRouteHandler(handler, { + documentation: { + openapi: "3.1.0", + info: { + version: "rolling", + title: "JSPaste API", + summary: "Create and share code with JSPaste! The developer website for easy code sharing.", + description: `The API endpoints documented here are stable. However, the OpenAPI spec used to generate this documentation is unstable. It may change or break without notice. + +## User class +- **Anonymous:** Can alter anonymous documents, everyone can alter their documents. +- **Registered:** Can alter their own and anonymous documents, only they and "root" can alter their documents. +- **"root":** Can alter every document, no one can alter their documents except "root" itself. + +## Restrictions +Each instance can impose restrictions to the API usage. These restrictions may include, but not limited to: + +(the following values might change without notice) +- Instance registration policy: ${env.JSPB_USER_REGISTER ? "OPEN" : "CLOSED"} +- Document size limit: ${env.JSPB_DOCUMENT_SIZE === 0 ? "unlimited" : (env.JSPB_DOCUMENT_SIZE ?? "unknown")} +- Document lifetime: ${env.JSPB_DOCUMENT_AGE.total("minutes") === 0 ? "unlimited" : (env.JSPB_DOCUMENT_AGE.total("minutes") ?? "unknown")} +- Document anonymous lifetime: ${env.JSPB_DOCUMENT_ANONYMOUS_AGE.total("minutes") === 0 ? "unlimited" : (env.JSPB_DOCUMENT_ANONYMOUS_AGE.total("minutes") ?? "unknown")} +`, + license: { + name: "EUPL-1.2", + url: "https://eur-lex.europa.eu/eli/dec_impl/2017/863" + } + }, + externalDocs: { + description: "Source code", + url: "https://github.com/jspaste/backend" + }, + components: { + securitySchemes: { + bearer: { + bearerFormat: "base64url", + type: "http", + scheme: "bearer", + description: "Registered user in the instance." + } + } + }, + servers: [ + { + url: "https://jspaste.eu", + description: "Official JSPaste instance" + }, + { + url: "http://localhost:4000", + description: "Local instance" + } + ] + } + }) + ); + + // deprecated + handler.get("/documents/*", (ctx) => { + return ctx.redirect(ctx.req.path.replace(/\/documents\//g, "/v2/documents/"), 307); + }); + + handler.route("/document/v1", v1DocumentHandler); + handler.route("/user/v1", v1UserHandler); + + // deprecated + handler.route("/v2/documents", v2LegacyDocumentHandler); + + return handler; +}; diff --git a/src/http/index.ts b/src/http/index.ts new file mode 100644 index 00000000..b751e230 --- /dev/null +++ b/src/http/index.ts @@ -0,0 +1,45 @@ +import { Logger } from "#util/console.ts"; +import { env } from "../utils/env.ts"; +import { errorCodeUnknown, errorGet } from "../utils/error.ts"; + +const log: Logger = new Logger("http"); + +const dummyHandler = (): Response => { + return Response.json( + { + ...errorGet(errorCodeUnknown) + }, + { + status: 503, + headers: { + // disable compression + // https://docs.deno.com/runtime/fundamentals/http_server/#automatic-body-compression + "Cache-Control": "no-transform" + } + } + ); +}; + +type Options = { + handler?: Deno.ServeHandler; +}; + +export const http = (options: Options = {}): Deno.HttpServer => { + const usingHandler: boolean = typeof options.handler !== "undefined"; + + options.handler ??= dummyHandler; + + return Deno.serve({ + transport: "tcp", + hostname: env.JSPB_HOSTNAME.root, + port: env.JSPB_PORT, + handler: options.handler, + onListen: () => { + if (usingHandler) { + log.info( + `Listening on ${env.JSPB_HOSTNAME.isIPv6 ? `[${env.JSPB_HOSTNAME.root}]` : env.JSPB_HOSTNAME.root}:${env.JSPB_PORT}` + ); + } + } + }); +}; diff --git a/src/http/middleware/authorization.ts b/src/http/middleware/authorization.ts new file mode 100644 index 00000000..74d3a1c0 --- /dev/null +++ b/src/http/middleware/authorization.ts @@ -0,0 +1,51 @@ +import { createMiddleware } from "@hono/hono/factory"; +import { type } from "arktype"; +import { mutable } from "#/global.ts"; +import { verifyHash } from "#util/crypto.ts"; +import { errorCodeUserInvalidToken, errorCodeValidation, errorThrow } from "#util/error.ts"; +import { validatorUserHeader } from "#util/validator/user.ts"; +import type { Env } from "../handler.ts"; + +export const authMiddleware = createMiddleware(async (ctx, next) => { + const authorization = ctx.req.header("authorization"); + if (!authorization) { + return next(); + } + + const token = validatorUserHeader(authorization); + if (token instanceof type.errors) { + return errorThrow(errorCodeValidation, token.summary); + } + + if (!token.includes(".")) { + // unhashed token + if (token.length === 32) { + // @ts-expect-error unindexed select + const id = mutable.database.user.get("token", token)?.id; + if (!id) { + return errorThrow(errorCodeUserInvalidToken); + } + + ctx.set("userId", id); + + return next(); + } + + return errorThrow(errorCodeUserInvalidToken); + } + + const [id] = token.split("."); + if (!id) { + return errorThrow(errorCodeUserInvalidToken); + } + + // trying to minimize timing attacks by always calling verifyHash + const combo = mutable.database.user.get("id", id)?.token ?? "0 0"; + if (!verifyHash(token, combo)) { + return errorThrow(errorCodeUserInvalidToken); + } + + ctx.set("userId", id); + + await next(); +}); diff --git a/src/http/middleware/bodyStream.ts b/src/http/middleware/bodyStream.ts new file mode 100644 index 00000000..6768b36a --- /dev/null +++ b/src/http/middleware/bodyStream.ts @@ -0,0 +1,63 @@ +import { createMiddleware } from "@hono/hono/factory"; +import { env } from "#util/env.ts"; +import { errorCodeDocumentInvalidSize, errorThrow } from "#util/error.ts"; +import type { Env } from "../handler.ts"; + +export const bodyStream = createMiddleware(async (ctx, next) => { + if (!ctx.req.raw.body) { + ctx.set("hasBody", false); + + return next(); + } + + const contentLengthHeader = ctx.req.raw.headers.get("content-length"); + if (contentLengthHeader !== null && !ctx.req.raw.headers.has("transfer-encoding")) { + const size = Number.parseInt(contentLengthHeader, 10); + if (size > env.JSPB_DOCUMENT_SIZE) { + return errorThrow(errorCodeDocumentInvalidSize); + } + + ctx.set("hasBody", size > 0); + + return next(); + } + + const bodyReader = ctx.req.raw.body.getReader(); + const head = await bodyReader.read(); + + bodyReader.releaseLock(); + + if (head.done || head.value.length === 0) { + ctx.set("hasBody", false); + + return next(); + } + + ctx.set("hasBody", true); + + let size = head.value.length; + const transformer = new TransformStream, Uint8Array>({ + start: (controller) => { + // reinsert head + controller.enqueue(head.value); + }, + transform: (chunk, controller) => { + size += chunk.length; + + if (size > env.JSPB_DOCUMENT_SIZE) { + controller.error(new Deno.errors.BrokenPipe()); + return; + } + + controller.enqueue(chunk); + } + }); + + const requestInit: RequestInit & { duplex: "half" } = { + body: ctx.req.raw.body.pipeThrough(transformer), + duplex: "half" + }; + ctx.req.raw = new Request(ctx.req.raw, requestInit); + + await next(); +}); diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..1bbcd836 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,21 @@ +import { configure } from "arktype/config"; +import "@std/dotenv/load"; + +declare global { + // biome-ignore lint/style/useConsistentTypeDefinitions: expected + interface ArkEnv { + meta(): { + ref?: string; + }; + } +} + +configure({ + toJsonSchema: { + fallback: { + morph: (ctx) => ctx.out ?? ctx.base + } + } +}); + +void import("./init.ts").then(({ init }) => init()); diff --git a/src/init.ts b/src/init.ts new file mode 100644 index 00000000..b15f293b --- /dev/null +++ b/src/init.ts @@ -0,0 +1,103 @@ +import { abortable } from "@std/async"; +import { ensureDir } from "@std/fs"; +import { Database } from "#db/index.ts"; +import { sweeper } from "#task/sweeper.ts"; +import { Logger } from "#util/console.ts"; +import { constantPathStructStorage, constantPathStructStorageData, constantStoreDispose, mutable } from "./global.ts"; +import { handler } from "./http/handler.ts"; +import { http } from "./http/index.ts"; +import { taskRegister } from "./task.ts"; +import { env } from "./utils/env.ts"; + +const log: Logger = new Logger(); + +let shutdown = false; + +const initDirStruct = async (): Promise => { + await Promise.all([await ensureDir(constantPathStructStorage), await ensureDir(constantPathStructStorageData)]); +}; + +const initHTTPServer = async (handler?: Deno.ServeHandler): Promise => { + const id = "__httpServer"; + + await constantStoreDispose.get(id)?.[1](); + + mutable.http = http({ + handler: handler + }); + + constantStoreDispose.set(id, [ + 10, + async () => { + mutable.http?.unref(); + + // Deno.serve will deadlock on shutdown under pressure + await mutable.http?.shutdown(); + } + ]); +}; + +const initDatabase = async (): Promise => { + const id = "__databaseServer"; + + await constantStoreDispose.get(id)?.[1](); + + mutable.database = new Database(); + + constantStoreDispose.set(id, [0, async () => mutable.database[Symbol.dispose]()]); + + await mutable.database.migration(); +}; + +const initTask = async (): Promise => { + taskRegister(env.JSPB_TASK_SWEEPER, sweeper, { + name: "sweeper" + }); +}; + +export const init = async (): Promise => { + for (const signal of ["SIGINT", "SIGTERM", "SIGHUP", "SIGUSR1", "SIGUSR2"] satisfies Deno.Signal[]) { + Deno.addSignalListener(signal, async () => { + if (shutdown) return; + shutdown = true; + + log.debug(`Received ${signal}.`); + + const storeDispose = constantStoreDispose + .entries() + .toArray() + .sort(([, [pa]], [, [pb]]) => pb - pa); + + try { + for (const [key, [, dispose]] of storeDispose) { + log.debug(`Closing "${key}"...`); + + try { + // biome-ignore lint/performance/noAwaitInLoops: serialized + await abortable(dispose(), AbortSignal.timeout(3000)); + } catch { + log.warn(`Couldn't close "${key}" on time.`); + } + } + } catch (error) { + log.error("Failed to gracefully shutdown (bad state)..:", error); + Deno.exit(1); + } + + if (Deno.exitCode === 0) { + log.info("Bye."); + } + }); + } + + try { + await Promise.all([initDirStruct(), initHTTPServer()]); + await Promise.all([initDatabase()]); + await Promise.all([initTask(), initHTTPServer(handler().fetch)]); + } catch (error) { + log.error(error); + + Deno.exitCode = 1; + Deno.kill(Deno.pid, "SIGTERM"); + } +}; diff --git a/src/server.ts b/src/server.ts deleted file mode 100644 index 2bd58c50..00000000 --- a/src/server.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { OpenAPIHono } from '@hono/zod-openapi'; -import { serve } from 'bun'; -import { cors } from 'hono/cors'; -import { HTTPException } from 'hono/http-exception'; -import { oas } from '#server/oas.ts'; -import { env } from '#util/env.ts'; -import { logger } from '#util/logger.ts'; -import { config } from './config.ts'; -import { endpoints } from './server/endpoints.ts'; -import { errorHandler } from './server/errorHandler.ts'; -import { ErrorCode } from './types/ErrorHandler.ts'; - -process.on('SIGTERM', async () => await backend.stop()); - -logger.set(env.logLevel); - -const instance = new OpenAPIHono().basePath(config.apiPath); - -export const server = (): typeof instance => { - instance.use('*', cors()); - - instance.onError((err) => { - if (err instanceof HTTPException) { - return err.getResponse(); - } - - logger.error(err); - throw errorHandler.send(ErrorCode.unknown); - }); - - instance.notFound((ctx) => { - return ctx.body(null, 404); - }); - - oas(instance); - endpoints(instance); - - logger.debug('Registered routes:', instance.routes.map((route) => route.path).join(', ')); - logger.info(`Listening on: http://localhost:${env.port}`); - - return instance; -}; - -const backend = serve({ - fetch: server().fetch, - port: env.port -}); diff --git a/src/server/endpoints.ts b/src/server/endpoints.ts deleted file mode 100644 index 76ece7dd..00000000 --- a/src/server/endpoints.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { OpenAPIHono } from '@hono/zod-openapi'; -import { v1 } from '#v1/index.ts'; -import { v2 } from '#v2/index.ts'; -import { config } from '../config.ts'; - -export const endpoints = (instance: OpenAPIHono): void => { - instance.get('/documents/*', (ctx) => { - return ctx.redirect(`${config.apiPath}/v2/documents`.concat(ctx.req.path.split('/documents').pop() ?? ''), 307); - }); - - instance.route('/v2/documents', v2()); - instance.route('/v1/documents', v1()); -}; diff --git a/src/server/errorHandler.ts b/src/server/errorHandler.ts deleted file mode 100644 index d42ec927..00000000 --- a/src/server/errorHandler.ts +++ /dev/null @@ -1,139 +0,0 @@ -import type { ResponseConfig } from '@asteasolutions/zod-to-openapi/dist/openapi-registry'; -import { z } from '@hono/zod-openapi'; -import { HTTPException } from 'hono/http-exception'; -import { ErrorCode, type Schema } from '#type/ErrorHandler.ts'; - -const map: Record = { - [ErrorCode.unknown]: { - httpCode: 500, - type: 'generic', - message: - 'An unknown error occurred. This may be due to an unexpected condition in the server. If it happens again, please report it here: https://github.com/jspaste/backend/issues/new/choose' - }, - [ErrorCode.notFound]: { - httpCode: 404, - type: 'generic', - message: 'The requested resource does not exist.' - }, - [ErrorCode.validation]: { - httpCode: 400, - type: 'generic', - message: - 'Validation of the request data failed. Check the entered data according to our documentation: https://jspaste.eu/docs' - }, - [ErrorCode.crash]: { - httpCode: 500, - type: 'generic', - message: - 'An internal server error occurred. This may be due to an unhandled exception. If it happens again, please report it here: https://github.com/jspaste/backend/issues/new/choose' - }, - [ErrorCode.parse]: { - httpCode: 400, - type: 'generic', - message: - 'The request could not be parsed. This may be due to a malformed input or an unsupported data format. Check the entered data and try again.' - }, - [ErrorCode.dummy]: { - httpCode: 200, - type: 'generic', - message: 'This is a dummy error.' - }, - [ErrorCode.documentNotFound]: { - httpCode: 404, - type: 'document', - message: 'The requested document does not exist. Check the document name and try again.' - }, - [ErrorCode.documentPasswordNeeded]: { - httpCode: 401, - type: 'document', - message: 'This document is protected. Provide the document password and try again.' - }, - [ErrorCode.documentInvalidPassword]: { - httpCode: 403, - type: 'document', - message: 'The credentials provided for the document are invalid.' - }, - [ErrorCode.documentInvalidPasswordLength]: { - httpCode: 400, - type: 'document', - message: 'The password length provided for the document is invalid.' - }, - [ErrorCode.documentInvalidSize]: { - httpCode: 413, - type: 'document', - message: 'The body size provided for the document is too large.' - }, - [ErrorCode.documentInvalidSecret]: { - httpCode: 403, - type: 'document', - message: 'The credentials provided for the document are invalid.' - }, - [ErrorCode.documentInvalidSecretLength]: { - httpCode: 400, - type: 'document', - message: 'The secret length provided for the document is invalid.' - }, - [ErrorCode.documentInvalidNameLength]: { - httpCode: 400, - type: 'document', - message: 'The name length provided for the document is out of range.' - }, - [ErrorCode.documentNameAlreadyExists]: { - httpCode: 400, - type: 'document', - message: 'The name provided for the document already exists. Use another one and try again.' - }, - [ErrorCode.documentInvalidName]: { - httpCode: 400, - type: 'document', - message: 'The name provided for the document is invalid. Use another one and try again.' - }, - [ErrorCode.documentCorrupted]: { - httpCode: 500, - type: 'document', - message: 'The document is corrupted. It may have been tampered with or uses an unsupported format.' - } -} as const; - -export const errorHandler = { - get: (code: ErrorCode) => { - const { type, message } = map[code]; - - return { type, code, message }; - }, - - send: (code: ErrorCode) => { - const { httpCode, type, message } = map[code]; - - throw new HTTPException(httpCode, { - res: new Response(JSON.stringify({ type, code, message }), { - status: httpCode, - headers: { - 'Content-Type': 'application/json' - } - }) - }); - } -} as const; - -export const schema: ResponseConfig = { - content: { - 'application/json': { - schema: z.object({ - type: z.string().openapi({ - description: 'The message type', - example: errorHandler.get(ErrorCode.dummy).type - }), - code: z.number().openapi({ - description: 'The message code', - example: errorHandler.get(ErrorCode.dummy).code - }), - message: z.string().openapi({ - description: 'The message description', - example: errorHandler.get(ErrorCode.dummy).message - }) - }) - } - }, - description: 'Generic error object' -} as const; diff --git a/src/server/middleware.ts b/src/server/middleware.ts deleted file mode 100644 index 5df085c0..00000000 --- a/src/server/middleware.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { bodyLimit as middlewareBodyLimit } from 'hono/body-limit'; -import { errorHandler } from '#server/errorHandler.ts'; -import { ErrorCode } from '#type/ErrorHandler.ts'; -import { env } from '#util/env.ts'; - -export const middleware = { - bodyLimit: (maxSize: number = env.documentMaxSize) => { - return middlewareBodyLimit({ - maxSize: maxSize * 1024, - onError: () => { - throw errorHandler.send(ErrorCode.documentInvalidSize); - } - }); - } -} as const; diff --git a/src/server/oas.ts b/src/server/oas.ts deleted file mode 100644 index 64db10a6..00000000 --- a/src/server/oas.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type { OpenAPIHono } from '@hono/zod-openapi'; -import { config } from '../config.ts'; - -export const oas = (instance: OpenAPIHono): void => { - instance.doc31('/oas.json', (ctx) => ({ - openapi: '3.1.0', - info: { - title: 'JSPaste API', - version: 'rolling', - description: `Note: The latest API version can be accessed with "${config.apiPath}/documents" alias route.`, - license: { - name: 'EUPL-1.2', - url: 'https://eur-lex.europa.eu/eli/dec_impl/2017/863' - } - }, - servers: [ - { - url: config.protocol.concat(new URL(ctx.req.url).host), - description: 'This instance' - }, - { - url: 'https://jspaste.eu', - description: 'Official JSPaste instance' - }, - { - url: 'https://paste.inetol.net', - description: 'Inetol Infrastructure instance' - } - ].filter((server, index, self) => self.findIndex((x) => x.url === server.url) === index) - })); -}; diff --git a/src/task.ts b/src/task.ts new file mode 100644 index 00000000..a627dd5c --- /dev/null +++ b/src/task.ts @@ -0,0 +1,42 @@ +import { Logger } from "#util/console.ts"; +import { constantStoreDispose } from "./global.ts"; + +const log: Logger = new Logger("task"); + +type TaskRegisterOptions = { + name: string; +}; + +const trigger = async (callback: () => Promise | void, options: TaskRegisterOptions): Promise => { + log.debug(`Running "${options.name}".`); + + try { + await callback(); + } catch (error) { + log.error(`Error while running "${options.name}"..:`, error); + } + + log.debug(`Finished "${options.name}".`); +}; + +export const taskRegister = ( + expression: string, + callback: () => Promise | void, + options: TaskRegisterOptions +): void => { + const abort = new AbortController(); + + const id = `__task-${options.name}`; + + constantStoreDispose.get(id)?.[1](); + + try { + Deno.cron(options.name, expression, { signal: abort.signal }, () => trigger(callback, options)); + } catch (error) { + log.error(`Failed to register "${options.name}"..:`, error); + } + + constantStoreDispose.set(id, [100, async () => abort.abort()]); + + log.debug(`Registered "${options.name}".`); +}; diff --git a/src/tasks/sweeper.ts b/src/tasks/sweeper.ts new file mode 100644 index 00000000..51cd030d --- /dev/null +++ b/src/tasks/sweeper.ts @@ -0,0 +1,96 @@ +import { mapNotNullish } from "@std/collections"; +import { decodeTime } from "@std/ulid"; +import { constantTemporalUTC, mutable } from "#/global.ts"; +import { Database } from "#db/index.ts"; +import { Logger } from "#util/console.ts"; +import { env } from "../utils/env.ts"; +import { fsDelete, fsList } from "../utils/fs.ts"; + +const log: Logger = new Logger("task::sweeper"); + +export const sweeper = async (): Promise => { + sweeperDatabaseUser(); + sweeperDatabaseDocument(); + + // sweeper will remove everything in storage on ephemeral + if (!env.JSPB_DEBUG_DATABASE_EPHEMERAL) { + await sweeperDangling(); + } +}; + +const sweeperDatabaseUser = (): void => { + using database = new Database(); + + const temporalFuture = constantTemporalUTC().add({ days: 3 }); + + const users = mapNotNullish(database.user.getAllWithoutDocuments(), ({ id }) => { + if (!id) return; + if (id === mutable.database.user.getRoot()?.id) return; + + if (temporalFuture.epochMilliseconds > decodeTime(id)) { + return id; + } + + return; + }); + + if (users.length > 0) { + database.user.delete("id", users); + log.debug(`Removed ${users.length} unused user records.`); + } +}; + +const sweeperDatabaseDocument = (): void => { + using database = new Database(); + + const temporalNow = constantTemporalUTC(); + + const documents = mapNotNullish(database.document.getAll(["id", "user_id"]), ({ id, user_id }) => { + if (!id) return; + + const ageType = user_id + ? env.JSPB_DOCUMENT_AGE.total("milliseconds") + : env.JSPB_DOCUMENT_ANONYMOUS_AGE.total("milliseconds"); + + if (ageType > 0 && temporalNow.epochMilliseconds - decodeTime(id) > ageType) { + return id; + } + + return; + }); + + if (documents.length > 0) { + database.document.delete("id", documents); + log.debug(`Removed ${documents.length} expired document records.`); + } +}; + +const sweeperDangling = async (): Promise => { + using database = new Database(); + + const databaseDocuments = mapNotNullish(database.document.getAll(["id"]), ({ id }) => id); + const storageDocuments = fsList(true); + + const databaseDocumentsSet = new Set(databaseDocuments); + const storageDocumentsSet = new Set(storageDocuments); + + const databaseDocumentsDangling = databaseDocumentsSet.difference(storageDocumentsSet); + const storageDocumentsDangling = storageDocumentsSet.difference(databaseDocumentsSet); + + const queue: Promise[] = []; + + if (databaseDocumentsDangling.size > 0) { + database.document.delete("id", databaseDocumentsDangling); + log.debug(`Removed ${databaseDocumentsDangling.size} dangling records.`); + } + + if (storageDocumentsDangling.size > 0) { + for (const id of storageDocumentsDangling) { + queue.push(fsDelete({ id: id })); + } + + await Promise.all(queue); + + log.debug(`Removed ${storageDocumentsDangling.size} dangling files.`); + } +}; diff --git a/src/types/Document.ts b/src/types/Document.ts deleted file mode 100644 index db05eb42..00000000 --- a/src/types/Document.ts +++ /dev/null @@ -1,13 +0,0 @@ -export enum DocumentVersion { - V1 = 1 -} - -export type Document = { - data: Uint8Array; - header: { - name: string; - secretHash: Uint8Array; - passwordHash: Uint8Array | null; - }; - version: DocumentVersion; -}; diff --git a/src/types/ErrorHandler.ts b/src/types/ErrorHandler.ts deleted file mode 100644 index 3e16dc2f..00000000 --- a/src/types/ErrorHandler.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { ContentfulStatusCode } from 'hono/utils/http-status'; - -export enum ErrorCode { - // * Generic - crash = 1000, - unknown = 1001, - validation = 1002, - parse = 1003, - notFound = 1004, - dummy = 1005, - - // * Document - documentNotFound = 1200, - documentNameAlreadyExists = 1201, - documentPasswordNeeded = 1202, - documentInvalidSize = 1203, - documentInvalidNameLength = 1204, - documentInvalidPassword = 1205, - documentInvalidPasswordLength = 1206, - documentInvalidSecret = 1207, - documentInvalidSecretLength = 1208, - documentInvalidName = 1209, - documentCorrupted = 1210 -} - -type Type = 'generic' | 'document'; - -export type Schema = { - httpCode: ContentfulStatusCode; - type: Type; - message: string; -}; diff --git a/src/types/Range.ts b/src/types/Range.ts deleted file mode 100644 index 612a2bb7..00000000 --- a/src/types/Range.ts +++ /dev/null @@ -1,9 +0,0 @@ -// https://github.com/microsoft/TypeScript/issues/43505 -export type Range< - START extends number, - END extends number, - ARR extends unknown[] = [], - ACC extends number = never -> = ARR['length'] extends END - ? ACC | START | END - : Range; diff --git a/src/utils/StringUtils.ts b/src/utils/StringUtils.ts deleted file mode 100644 index 7ceba70f..00000000 --- a/src/utils/StringUtils.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { config } from '../config.ts'; -import type { Range } from '../types/Range.ts'; -import { ValidatorUtils } from './ValidatorUtils.ts'; - -export class StringUtils { - public static readonly BASE64URL = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_'; - - public static random(length: number, base: Range<2, 64> = 62): string { - const baseSet = StringUtils.BASE64URL.slice(0, base); - let string = ''; - - while (length--) string += baseSet.charAt(Math.floor(Math.random() * baseSet.length)); - - return string; - } - - public static generateName(length: number = config.documentNameLengthDefault): string { - if (!ValidatorUtils.isLengthWithinRange(length, config.documentNameLengthMin, config.documentNameLengthMax)) { - length = config.documentNameLengthDefault; - } - - return StringUtils.random(length, 64); - } - - public static async nameExists(name: string): Promise { - return Bun.file(config.storagePath + name).exists(); - } - - public static async createName(length: number = config.documentNameLengthDefault): Promise { - const key = StringUtils.generateName(length); - - return (await StringUtils.nameExists(key)) ? StringUtils.createName(length + 1) : key; - } - - public static createSecret(chunkLength = 5, chunks = 4): string { - return Array.from({ length: chunks }, () => StringUtils.random(chunkLength)).join('-'); - } -} diff --git a/src/utils/ValidatorUtils.ts b/src/utils/ValidatorUtils.ts deleted file mode 100644 index e7ef03b0..00000000 --- a/src/utils/ValidatorUtils.ts +++ /dev/null @@ -1,31 +0,0 @@ -export class ValidatorUtils { - // biome-ignore lint/suspicious/noExplicitAny: We don't know the type of the value - public static isInstanceOf(value: unknown, type: new (...args: any[]) => T): value is T { - return value instanceof type; - } - - public static isTypeOf(value: unknown, type: string): value is T { - // biome-ignore lint/suspicious/useValidTypeof: We are checking the type of the value - return typeof value === type; - } - - public static isEmptyString(value: string): boolean { - return value.trim().length === 0; - } - - public static isValidArray(value: T[], validator: (value: T) => boolean): boolean { - return Array.isArray(value) && value.every(validator); - } - - public static isValidDomain(value: string): boolean { - return /\b((?=[a-z0-9-]{1,63}\.)(xn--)?[a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,63}\b/.test(value); - } - - public static isValidBase64URL(value: string): boolean { - return /^[\w-]+$/.test(value); - } - - public static isLengthWithinRange(value: number, min: number, max: number): boolean { - return value >= min && value <= max; - } -} diff --git a/src/utils/colors.ts b/src/utils/colors.ts deleted file mode 100644 index c4cb40ec..00000000 --- a/src/utils/colors.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { type ColorInput, color as bunColor } from 'bun'; - -const colorString = - (color: ColorInput) => - (...text: unknown[]): string => { - return bunColor(color, 'ansi') + text.join(' ') + colors.reset; - }; - -export const colors = { - red: colorString('#ef5454'), - orange: colorString('#ef8354'), - yellow: colorString('#efd554'), - green: colorString('#70ef54'), - turquoise: colorString('#54efef'), - blue: colorString('#5954ef'), - purple: colorString('#a454ef'), - pink: colorString('#ef54d5'), - gray: colorString('#888'), - black: colorString('#000'), - white: colorString('#fff'), - reset: '\x1b[0m' -} as const; diff --git a/src/utils/console.test.ts b/src/utils/console.test.ts new file mode 100644 index 00000000..fda8c8c4 --- /dev/null +++ b/src/utils/console.test.ts @@ -0,0 +1,24 @@ +import { Logger } from "./console.ts"; + +Deno.test("Logger#", () => { + const log = new Logger("test"); + const message = "Message here!"; + const extended = [ + "Extended here!", + undefined, + { + class: Logger, + log: "logloglogloglogloglogloglogloglogloglogloglogloglogloglogloghidden" + }, + null + ]; + + log.debug(message); + log.debug(extended, ...extended); + log.info(message); + log.info(extended, ...extended); + log.warn(message); + log.warn(extended, ...extended); + log.error(message); + log.error(extended, ...extended); +}); diff --git a/src/utils/console.ts b/src/utils/console.ts new file mode 100644 index 00000000..a3d727fb --- /dev/null +++ b/src/utils/console.ts @@ -0,0 +1,86 @@ +import { mapNotNullish } from "@std/collections"; +import { blue, gray, red, yellow } from "@std/fmt/colors"; +import { env } from "./env.ts"; + +export class Logger { + public static readonly level = { + none: [0, null], + error: [1, red], + warn: [2, yellow], + info: [3, blue], + debug: [4, gray] + } as const; + + public readonly source: string; + + public constructor(source = "common") { + this.source = source; + } + + public error(...message: unknown[]): void { + this.flush("error", message); + } + + public warn(...message: unknown[]): void { + this.flush("warn", message); + } + + public info(...message: unknown[]): void { + this.flush("info", message); + } + + public debug(...message: unknown[]): void { + this.flush("debug", message); + } + + private flush(level: Exclude, message: unknown[]): void { + const [levelNumber, color] = Logger.level[level]; + + if (levelNumber > env.JSPB_LOG_VERBOSITY) return; + + const prefix: string[] = []; + + if (env.JSPB_LOG_TIME) { + const temporalLocal = Temporal.Now.zonedDateTimeISO(); + const temporalYear = temporalLocal.year; + const temporalMonth = temporalLocal.month.toString().padStart(2, "0"); + const temporalDay = temporalLocal.day.toString().padStart(2, "0"); + const temporalHour = temporalLocal.hour.toString().padStart(2, "0"); + const temporalMinute = temporalLocal.minute.toString().padStart(2, "0"); + const temporalSecond = temporalLocal.second.toString().padStart(2, "0"); + const temporalMillisecond = temporalLocal.millisecond.toString().padStart(3, "0"); + const temporalOffset = temporalLocal.offset; + + prefix.push( + gray( + `${temporalYear}-${temporalMonth}-${temporalDay}T${temporalHour}:${temporalMinute}:${temporalSecond}.${temporalMillisecond + temporalOffset}` + ) + ); + } + + prefix.push(color(level.toUpperCase().padEnd(5))); + prefix.push(gray(`[${this.source}]`)); + + const prefixString = prefix.join(" "); + + const render = mapNotNullish(message, (item) => { + // biome-ignore lint/nursery/noEqualsToNull: expected + if (item == null) return; + + if (typeof item === "string") { + return `${prefixString} ${item}`; + } + + return `${prefixString} ${Deno.inspect(item, { + colors: true, + strAbbreviateSize: 60, + iterableLimit: 10 + })}`; + }); + + for (const line of render) { + // biome-ignore lint/suspicious/noConsole: logger + console[level](line); + } + } +} diff --git a/src/utils/crypto.ts b/src/utils/crypto.ts new file mode 100644 index 00000000..50888a19 --- /dev/null +++ b/src/utils/crypto.ts @@ -0,0 +1,35 @@ +import { decodeAscii85, encodeAscii85 } from "@std/encoding"; +import { createBLAKE3 } from "hash-wasm"; +import { constantTextEncoder } from "../global.ts"; + +const hasher = await createBLAKE3(); + +export const generateSalt = (length: number): Uint8Array => { + return crypto.getRandomValues(new Uint8Array(length)); +}; + +export const generateHash = (input: string, salt?: Uint8Array) => { + const defaultSalt = salt ?? generateSalt(4); + + hasher.init(); + hasher.update(defaultSalt); + hasher.update(constantTextEncoder.encode(input)); + + const encodedHash = encodeAscii85(hasher.digest("binary"), { standard: "Z85" }); + + return { + combo: `${encodedHash} ${encodeAscii85(defaultSalt, { standard: "Z85" })}`, + hash: encodedHash + }; +}; + +export const verifyHash = (input: string, combo: string): boolean => { + const [hash, salt] = combo.split(" "); + if (!(hash && salt)) { + throw new Error("Invalid hash combo"); + } + + const { hash: inputHash } = generateHash(input, decodeAscii85(salt, { standard: "Z85" })); + + return inputHash === hash; +}; diff --git a/src/utils/document.ts b/src/utils/document.ts new file mode 100644 index 00000000..0cad01e7 --- /dev/null +++ b/src/utils/document.ts @@ -0,0 +1,38 @@ +import { constantDocumentNameLengthDefault, constantNanoid, mutable } from "#/global.ts"; + +// deflate +export const documentVersionV1 = 1; +// no compression +export const documentVersionV2 = 2; + +export type DocumentVersionType = typeof documentVersionV1 | typeof documentVersionV2; + +export const generateName = (length = constantDocumentNameLengthDefault): string => { + let name: string; + do { + name = constantNanoid(length); + } while (mutable.database.document.get("name", name)?.name); + + return name; +}; + +export const isOwner = (userId?: string | null, documentUserId?: string | null): boolean => { + // the document is not owned, everyone can alter + if (!documentUserId) { + return true; + } + + if (userId) { + // the document is owned by the user + if (userId === documentUserId) { + return true; + } + + // the root user can alter everything + if (userId === mutable.database.user.getRoot()?.id) { + return true; + } + } + + return false; +}; diff --git a/src/utils/env.ts b/src/utils/env.ts index 0e081cde..8dcf8850 100644 --- a/src/utils/env.ts +++ b/src/utils/env.ts @@ -1,9 +1,45 @@ -import { get } from 'env-var'; -import { LogLevels } from '#util/logger.ts'; - -export const env = { - documentMaxSize: get('DOCUMENT_MAXSIZE').default(1024).asIntPositive(), - logLevel: get('LOGLEVEL').default(LogLevels.info).asIntPositive(), - port: get('PORT').default(4000).asPortNumber(), - tls: get('TLS').asBoolStrict() ?? true -} as const; +import arkenv from "arkenv"; +import { type } from "arktype"; +import { humanizeSize, humanizeTime } from "#util/humanize.ts"; +import { type DocumentVersionType, documentVersionV1, documentVersionV2 } from "./document.ts"; + +export const env = arkenv( + { + JSPB_LOG_VERBOSITY: type.keywords.number.integer.atLeast(0).atMost(4).default(3), + JSPB_LOG_TIME: type.boolean.default(true), + JSPB_HOSTNAME: type.keywords.string.ip.root + .pipe((hostname) => { + return { + isIPv6: hostname.includes(":"), + root: hostname + }; + }) + .default("::"), + JSPB_PORT: type.keywords.number.integer.atLeast(0).atMost(65_535).default(4000), + + // debug + JSPB_DEBUG_DATABASE_EPHEMERAL: type.boolean.default(false), + + // document + JSPB_DOCUMENT_SIZE: type.string.pipe(humanizeSize).default("1mb"), + JSPB_DOCUMENT_COMPRESSION: type.boolean + .pipe((boolean): DocumentVersionType => (boolean ? documentVersionV1 : documentVersionV2)) + .default(true), + JSPB_DOCUMENT_AGE: type.string.pipe(humanizeTime).default("0"), + JSPB_DOCUMENT_ANONYMOUS_AGE: type.string.pipe(humanizeTime).default("7d"), + + // user + JSPB_USER_REGISTER: type.boolean.default(true), + JSPB_USER_ROOT_RECOVERY: type.boolean.default(false), + + // task + JSPB_TASK_SWEEPER: type( + /^(?:\*|[0-5]?\d(?:-[0-5]?\d)?)(?:\/[1-9]\d*)?(?:,(?:\*|[0-5]?\d(?:-[0-5]?\d)?)(?:\/[1-9]\d*)?)*\s+(?:\*|(?:[01]?\d|2[0-3])(?:-(?:[01]?\d|2[0-3]))?)(?:\/[1-9]\d*)?(?:,(?:\*|(?:[01]?\d|2[0-3])(?:-(?:[01]?\d|2[0-3]))?)(?:\/[1-9]\d*)?)*\s+(?:\*|(?:[1-9]|[12]\d|3[01])(?:-(?:[1-9]|[12]\d|3[01]))?)(?:\/[1-9]\d*)?(?:,(?:\*|(?:[1-9]|[12]\d|3[01])(?:-(?:[1-9]|[12]\d|3[01]))?)(?:\/[1-9]\d*)?)*\s+(?:\*|(?:[1-9]|1[0-2]|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)(?:-(?:[1-9]|1[0-2]|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec))?)(?:\/[1-9]\d*)?(?:,(?:\*|(?:[1-9]|1[0-2]|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)(?:-(?:[1-9]|1[0-2]|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec))?)(?:\/[1-9]\d*)?)*\s+(?:\*|(?:[0-7]|sun|mon|tue|wed|thu|fri|sat)(?:-(?:[0-7]|sun|mon|tue|wed|thu|fri|sat))?)(?:\/[1-9]\d*)?(?:,(?:\*|(?:[0-7]|sun|mon|tue|wed|thu|fri|sat)(?:-(?:[0-7]|sun|mon|tue|wed|thu|fri|sat))?)(?:\/[1-9]\d*)?)*$/i + ) + .describe("a valid unix based cron: https://man7.org/linux/man-pages/man5/crontab.5.html") + .default("0 1 * * *") + }, + { + env: Deno.env.toObject() + } +); diff --git a/src/utils/error.ts b/src/utils/error.ts new file mode 100644 index 00000000..b32ad277 --- /dev/null +++ b/src/utils/error.ts @@ -0,0 +1,147 @@ +import { HTTPException } from "@hono/hono/http-exception"; +import type { ContentfulStatusCode } from "@hono/hono/utils/http-status"; +import { resolver } from "@hono/openapi"; +import { type } from "arktype"; + +// allow const enum in the future +// https://github.com/rolldown/rolldown/issues/7676 + +export const errorCodeCrash = 1000; +export const errorCodeUnknown = 1001; +export const errorCodeValidation = 1002; +// export const errorCodeParse = 1003; // moved to 1002 +export const errorCodeNotFound = 1004; +export const errorCodeDummy = 1005; + +// document +export const errorCodeDocumentNotFound = 1200; +export const errorCodeDocumentNameAlreadyExists = 1201; +export const errorCodeDocumentPasswordNeeded = 1202; +export const errorCodeDocumentInvalidSize = 1203; +// export const errorCodeDocumentInvalidNameLength = 1204; // moved to 1002 +export const errorCodeDocumentInvalidPassword = 1205; +// export const errorCodeDocumentInvalidPasswordLength = 1206; // moved to 1002 +// export const errorCodeDocumentInvalidSecret = 1207; // deprecated +// export const errorCodeDocumentInvalidSecretLength = 1208; // deprecated +// export const errorCodeDocumentInvalidName = 1209; // moved to 1002 +export const errorCodeDocumentCorrupted = 1210; + +// user +export const errorCodeUserInvalidToken = 1300; + +export type ErrorCodeType = + | typeof errorCodeCrash + | typeof errorCodeUnknown + | typeof errorCodeValidation + // | typeof errorCodeParse + | typeof errorCodeNotFound + | typeof errorCodeDummy + // document + | typeof errorCodeDocumentNotFound + | typeof errorCodeDocumentNameAlreadyExists + | typeof errorCodeDocumentPasswordNeeded + | typeof errorCodeDocumentInvalidSize + // | typeof errorCodeDocumentInvalidNameLength + | typeof errorCodeDocumentInvalidPassword + // | typeof errorCodeDocumentInvalidPasswordLength + // | typeof errorCodeDocumentInvalidSecret + // | typeof errorCodeDocumentInvalidSecretLength + // | typeof errorCodeDocumentInvalidName + | typeof errorCodeDocumentCorrupted + // user + | typeof errorCodeUserInvalidToken; + +export type Schema = { + httpCode: ContentfulStatusCode; + message: string; +}; + +const errorDefinition: Record = { + [errorCodeCrash]: { + httpCode: 500, + message: + "An unexpected server error occurred. If this persists, open an issue at: https://github.com/jspaste/backend/issues" + }, + [errorCodeUnknown]: { + httpCode: 503, + message: "Server handler has not loaded yet. Wait..." + }, + [errorCodeValidation]: { + httpCode: 400, + message: "The request contains invalid or malformed data." + }, + [errorCodeNotFound]: { + httpCode: 404, + message: "The requested resource could not be found." + }, + [errorCodeDummy]: { + httpCode: 200, + message: "Placeholder response for documentation purposes." + }, + + // document + [errorCodeDocumentNotFound]: { + httpCode: 404, + message: "No document exists with the specified name." + }, + [errorCodeDocumentNameAlreadyExists]: { + httpCode: 409, + message: "A document with this name already exists. Choose a different name." + }, + [errorCodeDocumentPasswordNeeded]: { + httpCode: 401, + message: "This document is password protected. Include the password in your request." + }, + [errorCodeDocumentInvalidSize]: { + httpCode: 413, + message: "The document content exceeds the maximum allowed size." + }, + [errorCodeDocumentInvalidPassword]: { + httpCode: 403, + message: "The provided password is incorrect." + }, + [errorCodeDocumentCorrupted]: { + httpCode: 500, + message: "The document content is corrupted and cannot be retrieved." + }, + + // user + [errorCodeUserInvalidToken]: { + httpCode: 401, + message: "The provided authorization token is invalid or missing privileges." + } +} as const; + +export const errorGet = (code: ErrorCodeType, overrideMessage?: string) => { + const { message } = errorDefinition[code]; + + return { code: code, message: overrideMessage ?? message }; +}; + +export const errorThrow = (code: ErrorCodeType, overrideMessage?: string): never => { + const { httpCode, message } = errorDefinition[code]; + + throw new HTTPException(httpCode, { + res: Response.json({ code: code, message: overrideMessage ?? message }) + }); +}; + +export const genericErrorResponse = { + content: { + "application/json": { + schema: resolver( + type({ + code: type.number.configure({ + description: "The error code", + examples: [errorCodeDummy] + }), + message: type.string.configure({ + description: "The error description" + }) + }).configure({ + ref: "GenericError" + }) + ) + } + } +} as const; diff --git a/src/utils/fs.ts b/src/utils/fs.ts new file mode 100644 index 00000000..800fdd90 --- /dev/null +++ b/src/utils/fs.ts @@ -0,0 +1,113 @@ +import type { Context } from "@hono/hono"; +import type { Document } from "#db/query.ts"; +import { constantPathStructStorageData, constantTemporalToUTC, constantTemporalUTC } from "../global.ts"; +import type { Env } from "../http/handler.ts"; +import { documentVersionV1, documentVersionV2 } from "./document.ts"; +import { env } from "./env.ts"; +import { errorCodeDocumentCorrupted, errorCodeDocumentInvalidSize, errorThrow } from "./error.ts"; + +export const fsWrite = async (ctx: Context, { id }: Pick): Promise => { + await using handle = await Deno.open(constantPathStructStorageData + id, { + create: true, + write: true, + truncate: true + }); + + let stream: ReadableStream; + switch (env.JSPB_DOCUMENT_COMPRESSION) { + case documentVersionV1: { + // ctx.req.raw.body is only null on GET/HEAD + stream = (ctx.req.raw.body as NonNullable).pipeThrough(new CompressionStream("deflate")); + + break; + } + case documentVersionV2: { + // ctx.req.raw.body is only null on GET/HEAD + stream = ctx.req.raw.body as NonNullable; + + break; + } + default: { + return errorThrow(errorCodeDocumentCorrupted); + } + } + + try { + await stream.pipeTo(handle.writable, { preventClose: true }); + } catch (why) { + void fsDelete({ id: id }); + + if (why instanceof Deno.errors.BrokenPipe) { + return errorThrow(errorCodeDocumentInvalidSize); + } + + throw why; + } +}; + +export const fsDelete = async ({ id }: Pick): Promise => { + try { + await Deno.remove(constantPathStructStorageData + id); + } catch (why) { + // already deleted + if (why instanceof Deno.errors.NotFound) return; + + throw why; + } +}; + +export const fsRead = async ( + ctx: Context, + { id, version }: Pick, + clientIgnoreCapabilities = false +): Promise>> => { + const handle = await Deno.open(constantPathStructStorageData + id); + + const hasClientDeflate = clientIgnoreCapabilities ? false : ctx.req.header("accept-encoding")?.includes("deflate"); + + let stream: ReadableStream; + switch (version) { + case documentVersionV1: { + if (hasClientDeflate) { + ctx.res.headers.set("content-encoding", "deflate"); + stream = handle.readable; + } else { + stream = handle.readable.pipeThrough(new DecompressionStream("deflate")); + } + + break; + } + case documentVersionV2: { + stream = handle.readable; + + break; + } + default: { + return errorThrow(errorCodeDocumentCorrupted); + } + } + + return stream; +}; + +// relaxed exists because races between fs/db may occur +export function* fsList(relaxed?: boolean): Iterable { + for (const entry of Deno.readDirSync(constantPathStructStorageData)) { + if (entry.isFile) { + if (relaxed) { + const info = Deno.statSync(constantPathStructStorageData + entry.name); + + if ( + !info.mtime || + constantTemporalUTC().epochMilliseconds - + constantTemporalToUTC(info.mtime.toTemporalInstant()).epochMilliseconds >= + 10_000 + ) { + yield entry.name; + } + } else { + yield entry.name; + } + } + } +} diff --git a/src/utils/humanize.test.ts b/src/utils/humanize.test.ts new file mode 100644 index 00000000..527ef63f --- /dev/null +++ b/src/utils/humanize.test.ts @@ -0,0 +1,62 @@ +import { assertStrictEquals, assertThrows } from "@std/assert"; +import { humanizeSize, humanizeTime } from "#util/humanize.ts"; + +Deno.test("humanizeTime", () => { + const basic = humanizeTime("1d"); + assertStrictEquals(basic.total("seconds"), 86_400); + + // case sensitive + const sensitiveMinutes = humanizeTime("1m"); + const sensitiveMonths = humanizeTime("1M"); + assertStrictEquals(sensitiveMinutes.minutes, 1); + assertStrictEquals(sensitiveMonths.months, 1); + + const zero = humanizeTime("0"); + assertStrictEquals(zero.total("seconds"), 0); + + const zeroUnit = humanizeTime("0d"); + assertStrictEquals(zeroUnit.total("milliseconds"), 0); + + // invalid unit + assertThrows(() => humanizeTime("1x")); + + // spaces in between + assertThrows(() => humanizeTime("1 d")); + + // float + assertThrows(() => humanizeTime("1.9d")); + + // multiple units + assertThrows(() => humanizeTime("1d 50m")); +}); + +Deno.test("humanizeSize", () => { + const basic = humanizeSize("1gb"); + assertStrictEquals(basic, 1_000_000_000); + + const basicBinary = humanizeSize("1gib"); + assertStrictEquals(basicBinary, 1_073_741_824); + + // case insensitive + const insensitive = humanizeSize("1kIb"); + assertStrictEquals(insensitive, 1024); + + // float + const floatValue = humanizeSize("1.5mb"); + assertStrictEquals(floatValue, 1_500_000); + + const zero = humanizeSize("0"); + assertStrictEquals(zero, 0); + + const zeroUnit = humanizeSize("0mb"); + assertStrictEquals(zeroUnit, 0); + + // invalid unit + assertThrows(() => humanizeSize("1xib")); + + // spaces in between + assertThrows(() => humanizeSize("1 gb")); + + // multiple units + assertThrows(() => humanizeTime("1gb 50mb")); +}); diff --git a/src/utils/humanize.ts b/src/utils/humanize.ts new file mode 100644 index 00000000..e97c6e69 --- /dev/null +++ b/src/utils/humanize.ts @@ -0,0 +1,49 @@ +const timeUnitsRegex = /^(\d+)([smhdwMy])$/; +const timeUnitMap: Record = { + s: "seconds", + m: "minutes", + h: "hours", + d: "days", + w: "weeks", + M: "months", + y: "years" +} as const; + +const sizeUnitsRegex = /^(\d+(?:\.\d+)?)(b|[kmgtp]i?b)$/i; +const sizeUnits: Record = { + b: 1, + kb: 1000, + kib: 1024, + mb: 1000 ** 2, + mib: 1024 ** 2, + gb: 1000 ** 3, + gib: 1024 ** 3, + tb: 1000 ** 4, + tib: 1024 ** 4 +} as const; + +export const humanizeTime = (input: string): Temporal.Duration => { + if (input === "0") { + return Temporal.Duration.from({ seconds: 0 }); + } + + const [, value, unit] = timeUnitsRegex.exec(input) ?? []; + if (!(value && unit)) { + throw new Error(`Invalid time "${input}"`); + } + + return Temporal.Duration.from({ [timeUnitMap[unit] as string]: Number.parseInt(value, 10) }); +}; + +export const humanizeSize = (input: string): number => { + if (input === "0") { + return 0; + } + + const [, value, unit] = sizeUnitsRegex.exec(input) ?? []; + if (!(value && unit)) { + throw new Error(`Invalid size "${input}"`); + } + + return Number.parseFloat(value) * (sizeUnits[unit.toLowerCase()] as number); +}; diff --git a/src/utils/logger.ts b/src/utils/logger.ts deleted file mode 100644 index df201d66..00000000 --- a/src/utils/logger.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { colors } from '#util/colors.ts'; - -export enum LogLevels { - none = 0, - error = 1, - warn = 2, - info = 3, - debug = 4 -} - -let logLevel: LogLevels = LogLevels.info; - -export const logger = { - set: (level: LogLevels): void => { - logLevel = level; - }, - - error: (...text: unknown[]): void => { - if (logLevel >= LogLevels.error) { - console.error(colors.gray('[BACKEND]'), colors.red('[ERROR]'), text.join(' ')); - } - }, - - warn: (...text: unknown[]): void => { - if (logLevel >= LogLevels.warn) { - console.warn(colors.gray('[BACKEND]'), colors.yellow('[WARN]'), text.join(' ')); - } - }, - - info: (...text: unknown[]): void => { - if (logLevel >= LogLevels.info) { - console.info(colors.gray('[BACKEND]'), colors.blue('[INFO]'), text.join(' ')); - } - }, - - debug: (...text: unknown[]): void => { - if (logLevel >= LogLevels.debug) { - console.debug(colors.gray('[BACKEND]'), colors.gray('[DEBUG]'), text.join(' ')); - } - } -} as const; diff --git a/src/utils/regex.ts b/src/utils/regex.ts new file mode 100644 index 00000000..315286e6 --- /dev/null +++ b/src/utils/regex.ts @@ -0,0 +1,2 @@ +export const regexBase64URL = /^[A-Za-z0-9_-]+$/; +export const regexHeaderBearer = /^Bearer .+$/; diff --git a/src/utils/user.ts b/src/utils/user.ts new file mode 100644 index 00000000..d0f63093 --- /dev/null +++ b/src/utils/user.ts @@ -0,0 +1,7 @@ +import { constantNanoid } from "../global.ts"; + +export const generateToken = (id: string): string => { + const noise = constantNanoid(32); + + return `${id}.${noise}`; +}; diff --git a/src/utils/validator/document.ts b/src/utils/validator/document.ts new file mode 100644 index 00000000..6ad8e090 --- /dev/null +++ b/src/utils/validator/document.ts @@ -0,0 +1,87 @@ +import { type } from "arktype"; +import { + constantDocumentNameLengthMax, + constantDocumentNameLengthMin, + constantDocumentPasswordLengthMax, + constantDocumentPasswordLengthMin +} from "#/global.ts"; +import { regexBase64URL } from "../regex.ts"; +import { validatorCreationTimestamp } from "./shared.ts"; + +export const validatorDocumentName = type(regexBase64URL) + .atLeastLength(constantDocumentNameLengthMin) + .atMostLength(constantDocumentNameLengthMax) + .configure({ + ref: "DocumentName", + description: "The document name", + examples: ["myDocumentNameHere"], + expected: (ctx) => { + switch (ctx.code) { + case "pattern": { + return "a valid Base64URL"; + } + case "minLength": { + return `at least ${ctx.rule} characters long`; + } + case "maxLength": { + return `at most ${ctx.rule} characters long`; + } + default: { + return "valid"; + } + } + } + }); + +export const validatorDocumentNameLength = type.keywords.string.integer.parse + .to(type.number.atLeast(constantDocumentNameLengthMin).atMost(constantDocumentNameLengthMax)) + .configure({ + ref: "DocumentNameLength", + description: "The name length for the document", + expected: (ctx) => { + switch (ctx.code) { + case "domain": { + return "a valid integer"; + } + case "min": { + return `must be greater than ${ctx.rule}`; + } + case "max": { + return `must be less than ${ctx.rule}`; + } + default: { + return "valid"; + } + } + } + }); + +export const validatorDocumentPassword = type.string + .atLeastLength(constantDocumentPasswordLengthMin) + .atMostLength(constantDocumentPasswordLengthMax) + .configure({ + ref: "DocumentPassword.default", + description: "The password for the document (read access)", + examples: ["myDocumentPasswordHere"] + }); + +export const validatorDocumentPasswordEmpty = type.string.exactlyLength(0).configure({ + ref: "DocumentPassword.empty", + description: "A blank password for the document", + examples: [""] +}); + +export const validatorDocumentDownload = type.unknown.configure({ + ref: "DocumentDownload", + description: "Indicate the client that downloads the document as a file attachment (only useful in web browsers)" +}); + +export const validatorDocumentListObject = type({ + name: validatorDocumentName, + created: validatorCreationTimestamp +}).configure({ + // FIXME: schema references not being generated when using toOpenAPISchema() + // Invalid object key "DocumentListMetadata" at position 2 in "/components/schemas/DocumentListMetadata": key not found in object + //ref: "DocumentListMetadata", + description: "An object with document metadata" +}); diff --git a/src/utils/validator/handler.ts b/src/utils/validator/handler.ts new file mode 100644 index 00000000..434a7159 --- /dev/null +++ b/src/utils/validator/handler.ts @@ -0,0 +1,8 @@ +import type { sValidator } from "@hono/standard-validator"; +import { errorCodeValidation, errorThrow } from "../error.ts"; + +export const validatorHandler: Parameters[2] = (res) => { + if (res.success) return; + + return errorThrow(errorCodeValidation, res.error[0]?.message); +}; diff --git a/src/utils/validator/shared.ts b/src/utils/validator/shared.ts new file mode 100644 index 00000000..af864ad2 --- /dev/null +++ b/src/utils/validator/shared.ts @@ -0,0 +1,6 @@ +import { type } from "arktype"; + +export const validatorCreationTimestamp = type.keywords.string.date.iso.root.configure({ + description: "The ISO 8601 timestamp when the resource was created", + examples: ["2026-01-01T00:00:00.000Z"] +}); diff --git a/src/utils/validator/user.ts b/src/utils/validator/user.ts new file mode 100644 index 00000000..0f3b5806 --- /dev/null +++ b/src/utils/validator/user.ts @@ -0,0 +1,51 @@ +import { type } from "arktype"; +import { constantUserTokenLength } from "#/global.ts"; +import { regexBase64URL, regexHeaderBearer } from "../regex.ts"; + +// FIXME: schema references not being generated when using toOpenAPISchema() +export const validatorUserToken = type.string.exactlyLength(constantUserTokenLength).configure({ + ref: "UserToken.default", + description: "A user token", + examples: ["myUserTokenHere"], + expected: (ctx) => { + switch (ctx.code) { + case "domain": { + return "a string"; + } + case "exactLength": { + return `exactly ${ctx.rule} characters`; + } + default: { + return "valid"; + } + } + } +}); + +export const validatorUserTokenLegacy = type(regexBase64URL) + .exactlyLength(32) + .configure({ + ref: "UserToken.legacy", + description: "An unhashed user token", + examples: ["myUserTokenHere"], + expected: (ctx) => { + switch (ctx.code) { + case "pattern": { + return "a valid Base64URL"; + } + case "exactLength": { + return `exactly ${ctx.rule} characters`; + } + default: { + return "valid"; + } + } + } + }); + +export const validatorUserHeader = type(regexHeaderBearer) + .configure({ + description: "A RFC 6750 structured Bearer header", + expected: "a valid header" + }) + .pipe((string) => string.split(" ")[1], validatorUserToken.or(validatorUserTokenLegacy)); diff --git a/tsconfig.json b/tsconfig.json index 9ade037c..52a5cb2c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,47 +1,42 @@ { - "$schema": "https://json.schemastore.org/tsconfig.json", - "compilerOptions": { - "lib": ["ESNext"], - "module": "ESNext", - "moduleResolution": "Bundler", - "target": "ESNext", + "$schema": "https://www.schemastore.org/tsconfig.json", + "compilerOptions": { + "lib": ["ESNext", "deno.window", "deno.unstable"], + "types": ["node"], + "module": "esnext", + "moduleResolution": "bundler", - "allowImportingTsExtensions": true, - "allowJs": true, - "checkJs": true, - "esModuleInterop": true, - "incremental": true, - "noEmit": true, - "resolveJsonModule": true, - "skipLibCheck": true, + "checkJs": false, + "skipLibCheck": true, - "strict": true, - "allowUnreachableCode": false, - "allowUnusedLabels": false, - "exactOptionalPropertyTypes": false, - "forceConsistentCasingInFileNames": true, - "isolatedModules": true, - "noFallthroughCasesInSwitch": true, - "noImplicitOverride": true, - "noImplicitReturns": true, - "noPropertyAccessFromIndexSignature": false, - "noUncheckedIndexedAccess": true, - "noUncheckedSideEffectImports": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "verbatimModuleSyntax": true, + "strict": true, + "allowUnreachableCode": false, + "allowUnusedLabels": false, + "exactOptionalPropertyTypes": false, + "isolatedDeclarations": false, + "noErrorTruncation": false, + "noFallthroughCasesInSwitch": true, + "noImplicitAny": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noPropertyAccessFromIndexSignature": false, + "noUncheckedIndexedAccess": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useUnknownInCatchVariables": true, + "verbatimModuleSyntax": true, - "baseUrl": ".", - "paths": { - "#v1/*": ["./src/endpoints/v1/*"], - "#v2/*": ["./src/endpoints/v2/*"], - "#document/*": ["./src/document/*"], - "#server/*": ["./src/server/*"], - "#type/*": ["./src/types/*"], - "#util/*": ["./src/utils/*"] - }, - "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.tsbuildinfo" - }, - "include": ["./"], - "exclude": ["./dist/", "./node_modules/", "./storage/"] + "baseUrl": ".", + "paths": { + "#/*": ["./src/*"], + "#db/*": ["./src/database/*"], + "#document/*": ["./src/document/*"], + "#endpoint/*": ["./src/endpoints/*"], + "#http/*": ["./src/http/*"], + "#task/*": ["./src/tasks/*"], + "#util/*": ["./src/utils/*"] + } + }, + "exclude": ["./dist/", "./node_modules/", "./storage/"] }