From 514d60d35bd67bc8babdc65c87fc49bad5cf2005 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 03:44:58 +0100 Subject: [PATCH 01/27] Switch to Bun This obviates the need for source-map-support and dotenv dependencies without any code modification. This does not update the docker files since I have no idea how those work. --- .github/workflows/build-server.yml | 15 +- README.md | 7 +- server/.gitignore | 149 +++++++++++- server/bun.lock | 122 ++++++++++ server/bunfig.toml | 4 + server/package.json | 15 +- server/tsconfig.json | 43 ++-- server/yarn.lock | 348 ----------------------------- 8 files changed, 313 insertions(+), 390 deletions(-) create mode 100644 server/bun.lock create mode 100644 server/bunfig.toml delete mode 100644 server/yarn.lock diff --git a/.github/workflows/build-server.yml b/.github/workflows/build-server.yml index eacd5eed..be351377 100644 --- a/.github/workflows/build-server.yml +++ b/.github/workflows/build-server.yml @@ -13,17 +13,14 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - version: ["lts/*", "latest"] + version: ["1.2.15", "latest"] steps: - uses: actions/checkout@v3 - - name: Use latest Node.js LTS - uses: actions/setup-node@v3 + - name: Setup bun.sh + uses: oven-sh/setup-bun@v1 with: - node-version: ${{ matrix.version }} - # cache: "yarn" - - run: yarn + bun-version: ${{ matrix.version }} + - run: bun install working-directory: ./server - - run: yarn build - working-directory: ./server - - run: yarn test + - run: bun test working-directory: ./server diff --git a/README.md b/README.md index 5533744f..857dafca 100644 --- a/README.md +++ b/README.md @@ -46,11 +46,10 @@ You can control who has access to a Sync Server by editing its `allowed-users.tx System Install
-- install recent nodejs (~17) +- install [Bun](https://bun.sh/) - clone code, `cd server` -- `npm install` -- `npm run build` -- this has to be run after every time the code is edited -- `npm run start` +- `bun install` +- `bun start` - to stop, press Ctrl+C twice diff --git a/server/.gitignore b/server/.gitignore index 892bc841..0aaad063 100644 --- a/server/.gitignore +++ b/server/.gitignore @@ -1,5 +1,146 @@ -node_modules/ -/dist/ -*.sqlite -*.pem /mapsync/ + +# Created by https://www.toptal.com/developers/gitignore/api/node +# Edit at https://www.toptal.com/developers/gitignore?templates=node + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +### Node Patch ### +# Serverless Webpack directories +.webpack/ + +# Optional stylelint cache + +# SvelteKit build / generate output +.svelte-kit + +# End of https://www.toptal.com/developers/gitignore/api/node diff --git a/server/bun.lock b/server/bun.lock new file mode 100644 index 00000000..c244870a --- /dev/null +++ b/server/bun.lock @@ -0,0 +1,122 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "civmap-server", + "dependencies": { + "async-mutex": "^0.4.0", + "better-sqlite3": "^9.5.0", + "kysely": "^0.26.1", + "zod": "^3.21.4", + "zod-validation-error": "^1.3.1", + }, + "devDependencies": { + "@types/better-sqlite3": "^7.6.4", + "@types/bun": "^1.2.15", + "prettier": "^3.0.1", + "typescript": "^5.1.6", + }, + }, + }, + "packages": { + "@types/better-sqlite3": ["@types/better-sqlite3@7.6.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA=="], + + "@types/bun": ["@types/bun@1.2.15", "", { "dependencies": { "bun-types": "1.2.15" } }, "sha512-U1ljPdBEphF0nw1MIk0hI7kPg7dFdPyM7EenHsp6W5loNHl7zqy6JQf/RKCgnUn2KDzUpkBwHPnEJEjII594bA=="], + + "@types/node": ["@types/node@24.0.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-yZQa2zm87aRVcqDyH5+4Hv9KYgSdgwX1rFnGvpbzMaC7YAljmhBET93TPiTd3ObwTL+gSpIzPKg5BqVxdCvxKg=="], + + "async-mutex": ["async-mutex@0.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA=="], + + "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + + "better-sqlite3": ["better-sqlite3@9.6.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ=="], + + "bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="], + + "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], + + "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], + + "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], + + "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], + + "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="], + + "deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="], + + "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], + + "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], + + "expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="], + + "file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="], + + "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="], + + "github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="], + + "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + + "ini": ["ini@1.3.8", "", {}, "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="], + + "kysely": ["kysely@0.26.3", "", {}, "sha512-yWSgGi9bY13b/W06DD2OCDDHQmq1kwTGYlQ4wpZkMOJqMGCstVCFIvxCCVG4KfY1/3G0MhDAcZsip/Lw8/vJWw=="], + + "mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], + + "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], + + "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="], + + "napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="], + + "node-abi": ["node-abi@3.75.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-OhYaY5sDsIka7H7AtijtI9jwGYLyl29eQn/W623DiN/MIv5sUqc4g7BIDThX+gb7di9f6xK02nkp8sdfFWZLTg=="], + + "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], + + "prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="], + + "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], + + "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], + + "rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="], + + "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], + + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + + "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], + + "simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="], + + "simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="], + + "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], + + "strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], + + "tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], + + "tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="], + + "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], + + "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], + + "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], + + "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], + + "zod": ["zod@3.25.56", "", {}, "sha512-rd6eEF3BTNvQnR2e2wwolfTmUTnp70aUTqr0oaGbHifzC3BKJsoV+Gat8vxUMR1hwOKBs6El+qWehrHbCpW6SQ=="], + + "zod-validation-error": ["zod-validation-error@1.5.0", "", { "peerDependencies": { "zod": "^3.18.0" } }, "sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw=="], + } +} diff --git a/server/bunfig.toml b/server/bunfig.toml new file mode 100644 index 00000000..7d1ca567 --- /dev/null +++ b/server/bunfig.toml @@ -0,0 +1,4 @@ +telemetry = false + +[install] +saveTextLockfile = true diff --git a/server/package.json b/server/package.json index 516626f6..6941547e 100644 --- a/server/package.json +++ b/server/package.json @@ -4,25 +4,24 @@ "private": true, "author": "Gjum", "license": "GPL-3.0-only", + "type": "module", + "module": "src/main.ts", "scripts": { - "build": "tsc", - "format": "prettier -w .", - "test": "true", - "start": "node -r source-map-support/register dist/main.js", - "start:dev": "tsc && node --inspect -r source-map-support/register dist/main.js" + "format": "bunx prettier -w .", + "test": "bun test ./src/*.test.ts", + "start": "bun src/main.ts", + "start:dev": "bun --inspect src/main.ts" }, "dependencies": { "async-mutex": "^0.4.0", "better-sqlite3": "^9.5.0", "kysely": "^0.26.1", - "source-map-support": "^0.5.21", "zod": "^3.21.4", "zod-validation-error": "^1.3.1" }, "devDependencies": { "@types/better-sqlite3": "^7.6.4", - "@types/node": "^18.17.4", - "dotenv": "^16.0.1", + "@types/bun": "^1.2.15", "prettier": "^3.0.1", "typescript": "^5.1.6" }, diff --git a/server/tsconfig.json b/server/tsconfig.json index a21b0116..44f17047 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -1,22 +1,31 @@ { "compilerOptions": { - "allowSyntheticDefaultImports": true, - "esModuleInterop": true, - "experimentalDecorators": true, - "forceConsistentCasingInFileNames": true, - "isolatedModules": true, - "lib": ["esnext", "webworker"], - "module": "CommonJS", - "moduleResolution": "node", - "noImplicitReturns": true, + // Enable latest features + "lib": ["ESNext"], + "target": "ESNext", + "module": "ESNext", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, "noFallthroughCasesInSwitch": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false, + + // Custom options "noImplicitAny": true, - "outDir": "dist", - "resolveJsonModule": true, - "skipLibCheck": true, - "sourceMap": true, - "strict": true, - "target": "ESNext" - }, - "include": ["src"] + "forceConsistentCasingInFileNames": true + } } diff --git a/server/yarn.lock b/server/yarn.lock deleted file mode 100644 index db76b2f1..00000000 --- a/server/yarn.lock +++ /dev/null @@ -1,348 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@types/better-sqlite3@^7.6.4": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/better-sqlite3/-/better-sqlite3-7.6.4.tgz#102462611e67aadf950d3ccca10292de91e6f35b" - integrity sha512-dzrRZCYPXIXfSR1/surNbJ/grU3scTaygS0OMzjlGf71i9sc2fGyHPXXiXmEvNIoE0cGwsanEFMVJxPXmco9Eg== - dependencies: - "@types/node" "*" - -"@types/node@*": - version "16.11.36" - resolved "https://registry.npmjs.org/@types/node/-/node-16.11.36.tgz" - integrity sha512-FR5QJe+TaoZ2GsMHkjuwoNabr+UrJNRr2HNOo+r/7vhcuntM6Ee/pRPOnRhhL2XE9OOvX9VLEq+BcXl3VjNoWA== - -"@types/node@^18.17.4": - version "18.17.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.17.5.tgz#c58b12bca8c2a437b38c15270615627e96dd0bc5" - integrity sha512-xNbS75FxH6P4UXTPUJp/zNPq6/xsfdJKussCWNOnz4aULWIRwMgP1LgaB5RiBnMX1DPCYenuqGZfnIAx5mbFLA== - -async-mutex@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/async-mutex/-/async-mutex-0.4.0.tgz#ae8048cd4d04ace94347507504b3cf15e631c25f" - integrity sha512-eJFZ1YhRR8UN8eBLoNzcDPcy/jqjsg6I1AP+KvWQX80BqOSW1oJPJXDylPUEeMr2ZQvHgnQ//Lp6f3RQ1zI7HA== - dependencies: - tslib "^2.4.0" - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -better-sqlite3@^9.5.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/better-sqlite3/-/better-sqlite3-9.6.0.tgz#b01e58ba7c48abcdc0383b8301206ee2ab81d271" - integrity sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ== - dependencies: - bindings "^1.5.0" - prebuild-install "^7.1.1" - -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -decompress-response@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" - integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== - dependencies: - mimic-response "^3.1.0" - -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - -detect-libc@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz" - integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== - -dotenv@^16.0.1: - version "16.0.1" - resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.0.1.tgz" - integrity sha512-1K6hR6wtk2FviQ4kEiSjFiH5rpzEVi8WW0x96aztHVMhEspNpc4DVOUTEHtEva5VThQ8IaBX1Pe4gSzpVVUsKQ== - -end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -expand-template@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" - integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== - -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -github-from-package@0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" - integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== - -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -inherits@^2.0.3, inherits@^2.0.4: - version "2.0.4" - resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -ini@~1.3.0: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -kysely@^0.26.1: - version "0.26.1" - resolved "https://registry.yarnpkg.com/kysely/-/kysely-0.26.1.tgz#2d2fb9316d53f3062596102c98d0d476e4e097b5" - integrity sha512-FVRomkdZofBu3O8SiwAOXrwbhPZZr8mBN5ZeUWyprH29jzvy6Inzqbd0IMmGxpd4rcOCL9HyyBNWBa8FBqDAdg== - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -mimic-response@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" - integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== - -minimist@^1.2.0, minimist@^1.2.3: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - -napi-build-utils@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/napi-build-utils/-/napi-build-utils-1.0.2.tgz#b1fddc0b2c46e380a0b7a76f984dd47c41a13806" - integrity sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg== - -node-abi@^3.3.0: - version "3.45.0" - resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.45.0.tgz#f568f163a3bfca5aacfce1fbeee1fa2cc98441f5" - integrity sha512-iwXuFrMAcFVi/ZoZiqq8BzAdsLw9kxDfTC0HMyjXfSL/6CSDAGD5UmR7azrAgWV1zKYq7dUUMj4owusBWKLsiQ== - dependencies: - semver "^7.3.5" - -once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -prebuild-install@^7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.2.tgz#a5fd9986f5a6251fbc47e1e5c65de71e68c0a056" - integrity sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ== - dependencies: - detect-libc "^2.0.0" - expand-template "^2.0.3" - github-from-package "0.0.0" - minimist "^1.2.3" - mkdirp-classic "^0.5.3" - napi-build-utils "^1.0.1" - node-abi "^3.3.0" - pump "^3.0.0" - rc "^1.2.7" - simple-get "^4.0.0" - tar-fs "^2.0.0" - tunnel-agent "^0.6.0" - -prettier@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.1.tgz#65271fc9320ce4913c57747a70ce635b30beaa40" - integrity sha512-fcOWSnnpCrovBsmFZIGIy9UqK2FaI7Hqax+DIO0A9UxeVoY4iweyaFjS5TavZN97Hfehph0nhsZnjlVKzEQSrQ== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -rc@^1.2.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -readable-stream@^3.1.1, readable-stream@^3.4.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -safe-buffer@^5.0.1, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -semver@^7.3.5: - version "7.3.7" - resolved "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - -simple-concat@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" - integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== - -simple-get@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-4.0.1.tgz#4a39db549287c979d352112fa03fd99fd6bc3543" - integrity sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA== - dependencies: - decompress-response "^6.0.0" - once "^1.3.1" - simple-concat "^1.0.0" - -source-map-support@^0.5.21: - version "0.5.21" - resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0: - version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== - -tar-fs@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" - integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.1.4" - -tar-stream@^2.1.4: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -tslib@^2.4.0: - version "2.6.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.1.tgz#fd8c9a0ff42590b25703c0acb3de3d3f4ede0410" - integrity sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig== - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - -typescript@^5.1.6: - version "5.1.6" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.1.6.tgz#02f8ac202b6dad2c0dd5e0913745b47a37998274" - integrity sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA== - -util-deprecate@^1.0.1: - version "1.0.2" - resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -wrappy@1: - version "1.0.2" - resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -zod-validation-error@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/zod-validation-error/-/zod-validation-error-1.3.1.tgz#7134579d2ba3994495133b879a076786c8c270f5" - integrity sha512-cNEXpla+tREtNdAnNKY4xKY1SGOn2yzyuZMu4O0RQylX9apRpUjNcPkEc3uHIAr5Ct7LenjZt6RzjEH6+JsqVQ== - -zod@^3.21.4: - version "3.21.4" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.21.4.tgz#10882231d992519f0a10b5dd58a38c9dabbb64db" - integrity sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw== From 62164a939f40e0257ff38b3911803978b44881f2 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 03:47:43 +0100 Subject: [PATCH 02/27] Switch to bun:sqlite --- server/bun.lock | 80 ------------------------------------------ server/package.json | 2 -- server/src/database.ts | 17 +++++---- 3 files changed, 10 insertions(+), 89 deletions(-) diff --git a/server/bun.lock b/server/bun.lock index c244870a..671ea88d 100644 --- a/server/bun.lock +++ b/server/bun.lock @@ -5,13 +5,11 @@ "name": "civmap-server", "dependencies": { "async-mutex": "^0.4.0", - "better-sqlite3": "^9.5.0", "kysely": "^0.26.1", "zod": "^3.21.4", "zod-validation-error": "^1.3.1", }, "devDependencies": { - "@types/better-sqlite3": "^7.6.4", "@types/bun": "^1.2.15", "prettier": "^3.0.1", "typescript": "^5.1.6", @@ -19,102 +17,24 @@ }, }, "packages": { - "@types/better-sqlite3": ["@types/better-sqlite3@7.6.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA=="], - "@types/bun": ["@types/bun@1.2.15", "", { "dependencies": { "bun-types": "1.2.15" } }, "sha512-U1ljPdBEphF0nw1MIk0hI7kPg7dFdPyM7EenHsp6W5loNHl7zqy6JQf/RKCgnUn2KDzUpkBwHPnEJEjII594bA=="], "@types/node": ["@types/node@24.0.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-yZQa2zm87aRVcqDyH5+4Hv9KYgSdgwX1rFnGvpbzMaC7YAljmhBET93TPiTd3ObwTL+gSpIzPKg5BqVxdCvxKg=="], "async-mutex": ["async-mutex@0.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA=="], - "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], - - "better-sqlite3": ["better-sqlite3@9.6.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-yR5HATnqeYNVnkaUTf4bOP2dJSnyhP4puJN/QPRyx4YkBEEUxib422n2XzPqDEHjQQqazoYoADdAm5vE15+dAQ=="], - - "bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="], - - "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], - - "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], - "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], - "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], - - "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="], - - "deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="], - - "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], - - "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], - - "expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="], - - "file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="], - - "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="], - - "github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="], - - "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], - - "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], - - "ini": ["ini@1.3.8", "", {}, "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="], - "kysely": ["kysely@0.26.3", "", {}, "sha512-yWSgGi9bY13b/W06DD2OCDDHQmq1kwTGYlQ4wpZkMOJqMGCstVCFIvxCCVG4KfY1/3G0MhDAcZsip/Lw8/vJWw=="], - "mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], - - "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], - - "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="], - - "napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="], - - "node-abi": ["node-abi@3.75.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-OhYaY5sDsIka7H7AtijtI9jwGYLyl29eQn/W623DiN/MIv5sUqc4g7BIDThX+gb7di9f6xK02nkp8sdfFWZLTg=="], - - "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], - - "prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="], - "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], - "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], - - "rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="], - - "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - - "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], - - "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="], - - "simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="], - - "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], - - "strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], - - "tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], - - "tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], - "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="], - "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], - "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], - - "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], - "zod": ["zod@3.25.56", "", {}, "sha512-rd6eEF3BTNvQnR2e2wwolfTmUTnp70aUTqr0oaGbHifzC3BKJsoV+Gat8vxUMR1hwOKBs6El+qWehrHbCpW6SQ=="], "zod-validation-error": ["zod-validation-error@1.5.0", "", { "peerDependencies": { "zod": "^3.18.0" } }, "sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw=="], diff --git a/server/package.json b/server/package.json index 6941547e..43b99207 100644 --- a/server/package.json +++ b/server/package.json @@ -14,13 +14,11 @@ }, "dependencies": { "async-mutex": "^0.4.0", - "better-sqlite3": "^9.5.0", "kysely": "^0.26.1", "zod": "^3.21.4", "zod-validation-error": "^1.3.1" }, "devDependencies": { - "@types/better-sqlite3": "^7.6.4", "@types/bun": "^1.2.15", "prettier": "^3.0.1", "typescript": "^5.1.6" diff --git a/server/src/database.ts b/server/src/database.ts index 0b073c05..7703e997 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -1,5 +1,5 @@ import * as kysely from "kysely"; -import sqlite from "better-sqlite3"; +import { Database as BunSqliteDatabase } from "bun:sqlite"; import { DATA_FOLDER } from "./metadata"; import { type Pos2D } from "./model"; @@ -25,12 +25,15 @@ export function get() { if (!database) { database = new kysely.Kysely({ dialect: new kysely.SqliteDialect({ - database: async () => - sqlite( - process.env["SQLITE_PATH"] ?? - `${DATA_FOLDER}/db.sqlite`, - {}, - ), + database: async () => { + return new BunSqliteDatabase( + Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, + { + create: true, + readwrite: true, + } + ) as unknown as kysely.SqliteDatabase + } }), }); } From 3e2db02ac2620e162107451c152b160f469e45dd Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 03:52:55 +0100 Subject: [PATCH 03/27] Fix server port compilation issue Seems like node lets you pass in a stringified port, whereas Bun requires a number. --- server/src/server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/server.ts b/server/src/server.ts index 73e21bf4..3b80ad0e 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -36,7 +36,7 @@ export class TcpServer { this.server.close(); }); - this.server.listen({ port: PORT, hostname: HOST }, () => { + this.server.listen({ port: parseInt(PORT), hostname: HOST }, () => { console.log("[TcpServer] Listening on", HOST, PORT); }); } From b85a19f86a26bd23a10ae6397c3c8238585cbf47 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 03:54:14 +0100 Subject: [PATCH 04/27] Update BufWriter to use ArrayBufferSink --- server/src/protocol/BufWriter.ts | 126 +++++++++++-------------- server/src/protocol/CatchupPacket.ts | 4 +- server/src/protocol/ChunkTilePacket.ts | 4 +- server/src/protocol/index.ts | 2 +- server/src/server.ts | 2 +- 5 files changed, 60 insertions(+), 78 deletions(-) diff --git a/server/src/protocol/BufWriter.ts b/server/src/protocol/BufWriter.ts index 0dba9ea9..acaf0338 100644 --- a/server/src/protocol/BufWriter.ts +++ b/server/src/protocol/BufWriter.ts @@ -1,100 +1,82 @@ -/** Each write advances the internal offset into the buffer. - * Grows the buffer to twice the current size if a write would exceed the buffer. */ -export class BufWriter { - private off = 0; - private buf: Buffer; +import { ArrayBufferSink } from "bun"; - constructor(initialSize?: number) { - this.buf = Buffer.alloc(initialSize || 1024); - } +export class BufWriter { + private readonly sink: ArrayBufferSink; + private readonly view = new DataView(new ArrayBuffer(8)); // 64 bits - /** Returns a slice reference to the written bytes so far. */ - getBuffer() { - return this.buf.slice(0, this.off); + public constructor() { + this.sink = new ArrayBufferSink(); + this.sink.start({ + asUint8Array: true, + stream: true, + }); } - writeUInt8(val: number) { - this.ensureSpace(1); - this.buf.writeUInt8(val, this.off); - this.off += 1; + public getBuffer(): Buffer { + return Buffer.from(this.sink.flush() as Uint8Array); } - writeInt8(val: number) { - this.ensureSpace(1); - this.buf.writeInt8(val, this.off); - this.off += 1; + public writeUnt8(val: number) { + this.view.setUint8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); } - writeUInt16(val: number) { - this.ensureSpace(2); - this.buf.writeUInt16BE(val, this.off); - this.off += 2; + public writeInt8(val: number) { + this.view.setInt8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); } - writeInt16(val: number) { - this.ensureSpace(2); - this.buf.writeInt16BE(val, this.off); - this.off += 2; + public writeUnt16(val: number) { + this.view.setUint16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); } - writeUInt32(val: number) { - this.ensureSpace(4); - this.buf.writeUInt32BE(val, this.off); - this.off += 4; + public writeInt16(val: number) { + this.view.setInt16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); } - writeInt32(val: number) { - this.ensureSpace(4); - this.buf.writeInt32BE(val, this.off); - this.off += 4; + public writeUnt32(val: number) { + this.view.setUint32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); } - writeUInt64(val: number) { - this.ensureSpace(8); - this.buf.writeBigUInt64BE(BigInt(val), this.off); - this.off += 8; + public writeInt32(val: number) { + this.view.setInt32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); } - writeInt64(val: number) { - this.ensureSpace(8); - this.buf.writeBigInt64BE(BigInt(val), this.off); - this.off += 8; + public writeUnt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigUint64(0, val); + this.sink.write(this.view.buffer); } - /** length-prefixed (32 bits), UTF-8 encoded */ - writeString(str: string) { - const strBuf = Buffer.from(str, "utf8"); - this.ensureSpace(4 + strBuf.length); - this.buf.writeUInt32BE(strBuf.length, this.off); - this.off += 4; - this.buf.set(strBuf, this.off); - this.off += strBuf.length; + public writeInt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigInt64(0, val); + this.sink.write(this.view.buffer); } - /** length-prefixed (32 bits), UTF-8 encoded */ - writeBufWithLen(buf: Buffer) { - this.ensureSpace(4 + buf.length); - this.buf.writeUInt32BE(buf.length, this.off); - this.off += 4; - this.buf.set(buf, this.off); - this.off += buf.length; + /** length-prefixed (u32), UTF-8 encoded */ + readonly #stringEncoder = new TextEncoder(); + public writeString(str: string) { + const bytes = this.#stringEncoder.encode(str); + this.writeUnt32(bytes.byteLength); + this.sink.write(bytes); } - writeBufRaw(buf: Buffer) { - this.ensureSpace(buf.length); - this.buf.set(buf, this.off); - this.off += buf.length; + /** length-prefixed (u32), UTF-8 encoded */ + public writeBufWithLen(buf: Buffer) { + this.writeUnt32(buf.byteLength); + this.writeBufRaw(buf); } - private ensureSpace(bytes: number) { - let len = this.buf.length; - while (len <= this.off + bytes) { - len = len * 2; - } - if (len !== this.buf.length) { - const newBuf = Buffer.alloc(len); - this.buf.copy(newBuf, 0, 0, this.off); - this.buf = newBuf; - } + public writeBufRaw(buf: Buffer) { + this.sink.write(buf); } } diff --git a/server/src/protocol/CatchupPacket.ts b/server/src/protocol/CatchupPacket.ts index d05f839b..3d1c829e 100644 --- a/server/src/protocol/CatchupPacket.ts +++ b/server/src/protocol/CatchupPacket.ts @@ -12,11 +12,11 @@ export namespace CatchupPacket { if (pkt.chunks.length < 1) throw new Error(`Catchup chunks must not be empty`); writer.writeString(pkt.world); - writer.writeUInt32(pkt.chunks.length); + writer.writeUnt32(pkt.chunks.length); for (const row of pkt.chunks) { writer.writeInt32(row.chunkX); writer.writeInt32(row.chunkZ); - writer.writeUInt64(row.timestamp); + writer.writeUnt64(row.timestamp); } } } diff --git a/server/src/protocol/ChunkTilePacket.ts b/server/src/protocol/ChunkTilePacket.ts index eee9f326..1728a79c 100644 --- a/server/src/protocol/ChunkTilePacket.ts +++ b/server/src/protocol/ChunkTilePacket.ts @@ -31,8 +31,8 @@ export namespace ChunkTilePacket { writer.writeString(pkt.world); writer.writeInt32(pkt.chunk_x); writer.writeInt32(pkt.chunk_z); - writer.writeUInt64(pkt.ts); - writer.writeUInt16(pkt.data.version); + writer.writeUnt64(pkt.ts); + writer.writeUnt16(pkt.data.version); writer.writeBufRaw(pkt.data.hash); writer.writeBufRaw(pkt.data.data); // XXX do we need to prefix with length? } diff --git a/server/src/protocol/index.ts b/server/src/protocol/index.ts index da615fb8..f956e73c 100644 --- a/server/src/protocol/index.ts +++ b/server/src/protocol/index.ts @@ -59,7 +59,7 @@ export function decodePacket(reader: BufReader): ClientPacket { } export function encodePacket(pkt: ServerPacket, writer: BufWriter): void { - writer.writeUInt8(getPacketId(pkt.type)); + writer.writeUnt8(getPacketId(pkt.type)); switch (pkt.type) { case "ChunkTile": return ChunkTilePacket.encode(pkt, writer); diff --git a/server/src/server.ts b/server/src/server.ts index 3b80ad0e..8f9869b0 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -204,7 +204,7 @@ export class TcpClient { throw new Error(`Can't encrypt: handshake not finished`); const writer = new BufWriter(); // TODO size hint - writer.writeUInt32(0); // set later, but reserve space in buffer + writer.writeUnt32(0); // set later, but reserve space in buffer encodePacket(pkt, writer); let buf = writer.getBuffer(); buf.writeUInt32BE(buf.length - 4, 0); // write into space reserved above From a63ce4d05b2c06abb14d9b30ca3e6c05de2fc6c6 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 04:13:43 +0100 Subject: [PATCH 05/27] Switch to Bun.listen() This also means that the message handler will be awaited. --- server/src/server.ts | 153 +++++++++++++++++++------------------------ 1 file changed, 69 insertions(+), 84 deletions(-) diff --git a/server/src/server.ts b/server/src/server.ts index 8f9869b0..8fdda362 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1,5 +1,5 @@ +import { type TCPSocketListener, type Socket, listen } from "bun"; import crypto from "crypto"; -import net from "net"; import { Main } from "./main"; import type { ClientPacket, ServerPacket } from "./protocol"; import { decodePacket, encodePacket } from "./protocol"; @@ -14,7 +14,7 @@ const { PORT = "12312", HOST = "127.0.0.1" } = process.env; type ProtocolHandler = Main; // TODO cleanup export class TcpServer { - server: net.Server; + server: TCPSocketListener; clients: Record = {}; keyPair = crypto.generateKeyPairSync("rsa", { modulusLength: 1024 }); @@ -25,20 +25,30 @@ export class TcpServer { }); constructor(readonly handler: ProtocolHandler) { - this.server = net.createServer({}, (socket) => { - const client = new TcpClient(socket, this, handler); - this.clients[client.id] = client; - socket.on("close", () => delete this.clients[client.id]); - }); - - this.server.on("error", (err: Error) => { - console.error("[TcpServer] Error:", err); - this.server.close(); - }); - - this.server.listen({ port: parseInt(PORT), hostname: HOST }, () => { - console.log("[TcpServer] Listening on", HOST, PORT); + const self = this; + this.server = listen({ + hostname: HOST, + port: parseInt(PORT), + socket: { + binaryType: "buffer", + async open(socket) { + const client = new TcpClient(socket, self, handler); + self.clients[client.id] = socket.data = client; + }, + async close(socket, err) { + const client: TcpClient = socket.data; + delete self.clients[client.id]; + if ((err ?? null) !== null) { + client.warn(`Closed due to an error!`, err); + } + }, + async data(socket, data) { + const client: TcpClient = socket.data; + await client.handleReceivedData(data); + }, + } }); + console.log("[TcpServer] Listening on", HOST, PORT); } decrypt(buf: Buffer) { @@ -81,85 +91,60 @@ export class TcpClient { }>; constructor( - private socket: net.Socket, + private socket: Socket, private server: TcpServer, private handler: ProtocolHandler, ) { this.log("Connected from", socket.remoteAddress); handler.handleClientConnected(this); + } - /** Accumulates received data, containing none, one, or multiple frames; the last frame may be partial only. */ - let accBuf: Buffer = Buffer.alloc(0); - - socket.on("data", async (data: Buffer) => { - try { - if (this.cryptoPromise) { - const { decipher } = await this.cryptoPromise; - data = decipher.update(data); - } - - // creating a new buffer every time is fine in our case, because we expect most frames to be large - accBuf = Buffer.concat([accBuf, data]); - - // we may receive multiple frames in one call - while (true) { - if (accBuf.length <= 4) return; // wait for more data - const frameSize = accBuf.readUInt32BE(); - - // prevent Out of Memory - if (frameSize > this.maxFrameSize) { - return this.kick( - "Frame too large: " + - frameSize + - " have " + - accBuf.length, - ); - } + static readonly #EMPTY_BUFFER = Buffer.allocUnsafe(0); + #receivedBuffer: Buffer = TcpClient.#EMPTY_BUFFER; + public async handleReceivedData( + data: Buffer + ) { + if (this.cryptoPromise) { + data = (await this.cryptoPromise).decipher.update(data); + } - if (accBuf.length < 4 + frameSize) return; // wait for more data + // creating a new buffer every time is fine in our case, because we expect most frames to be large + this.#receivedBuffer = Buffer.concat([ + this.#receivedBuffer, + data + ]); + + // we may receive multiple frames in one call + while (true) { + if (this.#receivedBuffer.byteLength <= 4) return; // wait for more data + const frameSize = this.#receivedBuffer.readUInt32BE(); + + // prevent Out of Memory + if (frameSize > this.maxFrameSize) { + return this.kick( + "Frame too large: " + + frameSize + + " have " + + this.#receivedBuffer.byteLength, + ); + } - const frameReader = new BufReader(accBuf); - frameReader.readUInt32(); // skip frame size - let pktBuf = frameReader.readBufLen(frameSize); - accBuf = frameReader.readRemainder(); + if (this.#receivedBuffer.byteLength < 4 + frameSize) return; // wait for more data - const reader = new BufReader(pktBuf); + const frameReader = new BufReader(this.#receivedBuffer.subarray(4)); + const packetBuffer = frameReader.readBufLen(frameSize); + this.#receivedBuffer = frameReader.readRemainder(); - try { - const packet = decodePacket(reader); - await this.handlePacketReceived(packet); - } catch (err) { - this.warn(err); - return this.kick("Error in packet handler"); - } - } - } catch (err) { + try { + const packet = decodePacket(new BufReader(packetBuffer)); + await this.handlePacketReceived(packet); + } + catch (err) { this.warn(err); - return this.kick("Error in data handler"); + this.kick("Error in packet handler"); + return; } - }); - - socket.on("close", (hadError: boolean) => { - this.log("Closed.", { hadError }); - }); - - socket.on("end", () => { - // This event is called when the other end signals the end of transmission, meaning this client is - // still writeable, but no longer readable. In this situation we just want to close the socket. - // https://nodejs.org/dist/latest-v18.x/docs/api/net.html#event-end - this.kick("Ended"); - }); - - socket.on("timeout", () => { - // As per the docs, the socket needs to be manually closed. - // https://nodejs.org/dist/latest-v18.x/docs/api/net.html#event-timeout - this.kick("Timed out"); - }); - - socket.on("error", (err: Error) => { - this.warn("Error:", err); - this.kick("Socket error"); - }); + } } private async handlePacketReceived(pkt: ClientPacket) { @@ -181,7 +166,7 @@ export class TcpClient { kick(internalReason: string) { this.log(`Kicking:`, internalReason); - this.socket.destroy(); + this.socket.end(); } async send(pkt: ServerPacket) { @@ -198,7 +183,7 @@ export class TcpClient { } private async sendInternal(pkt: ServerPacket, doCrypto = false) { - if (!this.socket.writable) + if (this.socket.readyState <= 0) return this.debug("Socket closed, dropping", pkt.type); if (doCrypto && !this.cryptoPromise) throw new Error(`Can't encrypt: handshake not finished`); From ee418185600a39690074437cd9192cb3e9bf929c Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 04:18:51 +0100 Subject: [PATCH 06/27] Fix type-import warnings --- server/src/main.ts | 2 +- server/src/protocol/RegionTimestampsPacket.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main.ts b/server/src/main.ts index c082e3c2..fa202ecb 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,7 +1,7 @@ import "./cli"; import * as database from "./database"; import * as metadata from "./metadata"; -import { ClientPacket } from "./protocol"; +import { type ClientPacket } from "./protocol"; import { CatchupRequestPacket } from "./protocol/CatchupRequestPacket"; import { ChunkTilePacket } from "./protocol/ChunkTilePacket"; import { TcpClient, TcpServer } from "./server"; diff --git a/server/src/protocol/RegionTimestampsPacket.ts b/server/src/protocol/RegionTimestampsPacket.ts index e99a151c..795ea8e1 100644 --- a/server/src/protocol/RegionTimestampsPacket.ts +++ b/server/src/protocol/RegionTimestampsPacket.ts @@ -1,5 +1,5 @@ import { BufWriter } from "./BufWriter"; -import { CatchupRegion } from "../model"; +import { type CatchupRegion } from "../model"; export interface RegionTimestampsPacket { type: "RegionTimestamps"; From 8c03847775e26b8deda0926342c64422e33b041b Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 04:19:16 +0100 Subject: [PATCH 07/27] Add compile-to-executable build script --- server/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/package.json b/server/package.json index 43b99207..7e830cfe 100644 --- a/server/package.json +++ b/server/package.json @@ -10,7 +10,8 @@ "format": "bunx prettier -w .", "test": "bun test ./src/*.test.ts", "start": "bun src/main.ts", - "start:dev": "bun --inspect src/main.ts" + "start:dev": "bun --inspect src/main.ts", + "compile": "bun build --compile . --outfile out/mapsync-server" }, "dependencies": { "async-mutex": "^0.4.0", From 037628ec13a2263def59474e43b936cb364060d8 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 04:39:28 +0100 Subject: [PATCH 08/27] Minor fixup of packet serialisation This is largely just renaming "world" to "dimension" and removing unused imports/functions. --- server/src/main.ts | 14 +++++++------- server/src/protocol/CatchupPacket.ts | 12 ++++++------ server/src/protocol/CatchupRequestPacket.ts | 6 +++--- server/src/protocol/ChunkTilePacket.ts | 6 +++--- server/src/protocol/EncryptionRequestPacket.ts | 9 --------- server/src/protocol/EncryptionResponsePacket.ts | 6 ------ server/src/protocol/HandshakePacket.ts | 5 ++--- server/src/protocol/RegionCatchupPacket.ts | 15 +++++++-------- server/src/protocol/RegionTimestampsPacket.ts | 6 +++--- server/src/server.ts | 2 +- 10 files changed, 32 insertions(+), 49 deletions(-) diff --git a/server/src/main.ts b/server/src/main.ts index fa202ecb..f50342d5 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -50,7 +50,7 @@ export class Main { const timestamps = await database.getRegionTimestamps(client.world!); client.send({ type: "RegionTimestamps", - world: client.world!, + dimension: client.world!, regions: timestamps, }); } @@ -83,7 +83,7 @@ export class Main { await database .storeChunkData( - pkt.world, + pkt.dimension, pkt.chunk_x, pkt.chunk_z, client.uuid, @@ -112,13 +112,13 @@ export class Main { for (const req of pkt.chunks) { let chunk = await database.getChunkData( - pkt.world, + pkt.dimension, req.chunkX, req.chunkZ, ); if (!chunk) { console.error(`${client.name} requested unavailable chunk`, { - world: pkt.world, + world: pkt.dimension, ...req, }); continue; @@ -129,7 +129,7 @@ export class Main { client.send({ type: "ChunkTile", - world: pkt.world, + dimension: pkt.dimension, chunk_x: req.chunkX, chunk_z: req.chunkX, ts: req.timestamp, @@ -150,10 +150,10 @@ export class Main { throw new Error(`${client.name} is not authenticated`); const chunks = await database.getChunkTimestamps( - pkt.world, + pkt.dimension, pkt.regions, ); if (chunks.length) - client.send({ type: "Catchup", world: pkt.world, chunks }); + client.send({ type: "Catchup", dimension: pkt.dimension, chunks }); } } diff --git a/server/src/protocol/CatchupPacket.ts b/server/src/protocol/CatchupPacket.ts index 3d1c829e..1d9a452c 100644 --- a/server/src/protocol/CatchupPacket.ts +++ b/server/src/protocol/CatchupPacket.ts @@ -3,7 +3,7 @@ import { BufWriter } from "./BufWriter"; export interface CatchupPacket { type: "Catchup"; - world: string; + dimension: string; chunks: CatchupChunk[]; } @@ -11,12 +11,12 @@ export namespace CatchupPacket { export function encode(pkt: CatchupPacket, writer: BufWriter) { if (pkt.chunks.length < 1) throw new Error(`Catchup chunks must not be empty`); - writer.writeString(pkt.world); + writer.writeString(pkt.dimension); writer.writeUnt32(pkt.chunks.length); - for (const row of pkt.chunks) { - writer.writeInt32(row.chunkX); - writer.writeInt32(row.chunkZ); - writer.writeUnt64(row.timestamp); + for (const chunk of pkt.chunks) { + writer.writeInt32(chunk.chunkX); + writer.writeInt32(chunk.chunkZ); + writer.writeUnt64(chunk.timestamp); } } } diff --git a/server/src/protocol/CatchupRequestPacket.ts b/server/src/protocol/CatchupRequestPacket.ts index a14ddc86..b7b4bbf3 100644 --- a/server/src/protocol/CatchupRequestPacket.ts +++ b/server/src/protocol/CatchupRequestPacket.ts @@ -3,13 +3,13 @@ import { BufReader } from "./BufReader"; export interface CatchupRequestPacket { type: "CatchupRequest"; - world: string; + dimension: string; chunks: CatchupChunk[]; } export namespace CatchupRequestPacket { export function decode(reader: BufReader): CatchupRequestPacket { - const world = reader.readString(); + const dimension = reader.readString(); const chunks: CatchupChunk[] = new Array(reader.readUInt32()); for (let i = 0; i < chunks.length; i++) { chunks[i] = { @@ -18,6 +18,6 @@ export namespace CatchupRequestPacket { timestamp: reader.readUInt64(), }; } - return { type: "CatchupRequest", world, chunks }; + return { type: "CatchupRequest", dimension, chunks }; } } diff --git a/server/src/protocol/ChunkTilePacket.ts b/server/src/protocol/ChunkTilePacket.ts index 1728a79c..dc614eba 100644 --- a/server/src/protocol/ChunkTilePacket.ts +++ b/server/src/protocol/ChunkTilePacket.ts @@ -4,7 +4,7 @@ import { SHA1_HASH_LENGTH } from "../constants"; export interface ChunkTilePacket { type: "ChunkTile"; - world: string; + dimension: string; chunk_x: number; chunk_z: number; ts: number; @@ -15,7 +15,7 @@ export namespace ChunkTilePacket { export function decode(reader: BufReader): ChunkTilePacket { return { type: "ChunkTile", - world: reader.readString(), + dimension: reader.readString(), chunk_x: reader.readInt32(), chunk_z: reader.readInt32(), ts: reader.readUInt64(), @@ -28,7 +28,7 @@ export namespace ChunkTilePacket { } export function encode(pkt: ChunkTilePacket, writer: BufWriter) { - writer.writeString(pkt.world); + writer.writeString(pkt.dimension); writer.writeInt32(pkt.chunk_x); writer.writeInt32(pkt.chunk_z); writer.writeUnt64(pkt.ts); diff --git a/server/src/protocol/EncryptionRequestPacket.ts b/server/src/protocol/EncryptionRequestPacket.ts index 148e4212..a49b4319 100644 --- a/server/src/protocol/EncryptionRequestPacket.ts +++ b/server/src/protocol/EncryptionRequestPacket.ts @@ -1,4 +1,3 @@ -import { BufReader } from "./BufReader"; import { BufWriter } from "./BufWriter"; export interface EncryptionRequestPacket { @@ -8,14 +7,6 @@ export interface EncryptionRequestPacket { } export namespace EncryptionRequestPacket { - export function decode(reader: BufReader): EncryptionRequestPacket { - return { - type: "EncryptionRequest", - publicKey: reader.readBufWithLen(), - verifyToken: reader.readBufWithLen(), - }; - } - export function encode(pkt: EncryptionRequestPacket, writer: BufWriter) { writer.writeBufWithLen(pkt.publicKey); writer.writeBufWithLen(pkt.verifyToken); diff --git a/server/src/protocol/EncryptionResponsePacket.ts b/server/src/protocol/EncryptionResponsePacket.ts index e17adc5f..35e6eb24 100644 --- a/server/src/protocol/EncryptionResponsePacket.ts +++ b/server/src/protocol/EncryptionResponsePacket.ts @@ -1,5 +1,4 @@ import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; export interface EncryptionResponsePacket { type: "EncryptionResponse"; @@ -17,9 +16,4 @@ export namespace EncryptionResponsePacket { verifyToken: reader.readBufWithLen(), }; } - - export function encode(pkt: EncryptionResponsePacket, writer: BufWriter) { - writer.writeBufWithLen(pkt.sharedSecret); - writer.writeBufWithLen(pkt.verifyToken); - } } diff --git a/server/src/protocol/HandshakePacket.ts b/server/src/protocol/HandshakePacket.ts index 32bd4b82..747b012b 100644 --- a/server/src/protocol/HandshakePacket.ts +++ b/server/src/protocol/HandshakePacket.ts @@ -1,12 +1,11 @@ import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; export interface HandshakePacket { type: "Handshake"; modVersion: string; mojangName: string; gameAddress: string; - world: string; + dimension: string; } export namespace HandshakePacket { @@ -16,7 +15,7 @@ export namespace HandshakePacket { modVersion: reader.readString(), mojangName: reader.readString(), gameAddress: reader.readString(), - world: reader.readString(), + dimension: reader.readString(), }; } } diff --git a/server/src/protocol/RegionCatchupPacket.ts b/server/src/protocol/RegionCatchupPacket.ts index 13890d9b..15eb0e8e 100644 --- a/server/src/protocol/RegionCatchupPacket.ts +++ b/server/src/protocol/RegionCatchupPacket.ts @@ -3,21 +3,20 @@ import { type Pos2D } from "../model"; export interface RegionCatchupPacket { type: "RegionCatchup"; - world: string; + dimension: string; regions: Pos2D[]; } export namespace RegionCatchupPacket { export function decode(reader: BufReader): RegionCatchupPacket { - let world = reader.readString(); - const len = reader.readInt16(); - const regions: Pos2D[] = []; - for (let i = 0; i < len; i++) { - regions.push({ + const dimension = reader.readString(); + const regions: Pos2D[] = new Array(reader.readInt16()); + for (let i = 0; i < regions.length; i++) { + regions[i] = { x: reader.readInt16(), z: reader.readInt16(), - }); + }; } - return { type: "RegionCatchup", world, regions }; + return { type: "RegionCatchup", dimension, regions }; } } diff --git a/server/src/protocol/RegionTimestampsPacket.ts b/server/src/protocol/RegionTimestampsPacket.ts index 795ea8e1..6a74d46d 100644 --- a/server/src/protocol/RegionTimestampsPacket.ts +++ b/server/src/protocol/RegionTimestampsPacket.ts @@ -3,17 +3,17 @@ import { type CatchupRegion } from "../model"; export interface RegionTimestampsPacket { type: "RegionTimestamps"; - world: string; + dimension: string; regions: Array; } export namespace RegionTimestampsPacket { export function encode(pkt: RegionTimestampsPacket, writer: BufWriter) { - writer.writeString(pkt.world); + writer.writeString(pkt.dimension); writer.writeInt16(pkt.regions.length); console.log("Sending regions " + JSON.stringify(pkt.regions)); for (let i = 0; i < pkt.regions.length; i++) { - let region = pkt.regions[i]; + const region = pkt.regions[i]; writer.writeInt16(region.regionX); writer.writeInt16(region.regionZ); writer.writeInt64(region.timestamp); diff --git a/server/src/server.ts b/server/src/server.ts index 8fdda362..22991b57 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -217,7 +217,7 @@ export class TcpClient { this.gameAddress = packet.gameAddress; this.claimedMojangName = packet.mojangName; - this.world = packet.world; + this.world = packet.dimension; this.verifyToken = crypto.randomBytes(4); await this.sendInternal({ From 29baa44e239ec4dbb455eb7a319253eb3c76a6b5 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 04:53:39 +0100 Subject: [PATCH 09/27] Move encryption and ciphers to crypto file --- server/src/crypto.ts | 33 +++++++++++++++++++++++ server/src/server.ts | 62 ++++++++++++-------------------------------- 2 files changed, 49 insertions(+), 46 deletions(-) create mode 100644 server/src/crypto.ts diff --git a/server/src/crypto.ts b/server/src/crypto.ts new file mode 100644 index 00000000..0d9d6cdd --- /dev/null +++ b/server/src/crypto.ts @@ -0,0 +1,33 @@ +import node_crypto from "node:crypto"; +export { randomBytes, createHash } from "node:crypto"; + +const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { modulusLength: 1024 }); +export const PUBLIC_KEY = KEY_PAIR.publicKey.export({ + type: "spki", + format: "der", +}); + +export function decrypt( + buf: Buffer +): Buffer { + return node_crypto.privateDecrypt( + { + key: KEY_PAIR.privateKey, + padding: node_crypto.constants.RSA_PKCS1_PADDING, + }, + buf, + ); +} + +export type Ciphers = { + encipher: node_crypto.Cipheriv, + decipher: node_crypto.Decipheriv +}; +export function createCiphers( + secret: Buffer +): Ciphers { + return { + encipher: node_crypto.createCipheriv("aes-128-cfb8", secret, secret), + decipher: node_crypto.createDecipheriv("aes-128-cfb8", secret, secret) + }; +} diff --git a/server/src/server.ts b/server/src/server.ts index 22991b57..7487c8f2 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1,5 +1,5 @@ import { type TCPSocketListener, type Socket, listen } from "bun"; -import crypto from "crypto"; +import * as crypto from "./crypto.ts"; import { Main } from "./main"; import type { ClientPacket, ServerPacket } from "./protocol"; import { decodePacket, encodePacket } from "./protocol"; @@ -17,13 +17,6 @@ export class TcpServer { server: TCPSocketListener; clients: Record = {}; - keyPair = crypto.generateKeyPairSync("rsa", { modulusLength: 1024 }); - // precomputed for networking - publicKeyBuffer = this.keyPair.publicKey.export({ - type: "spki", - format: "der", - }); - constructor(readonly handler: ProtocolHandler) { const self = this; this.server = listen({ @@ -50,16 +43,6 @@ export class TcpServer { }); console.log("[TcpServer] Listening on", HOST, PORT); } - - decrypt(buf: Buffer) { - return crypto.privateDecrypt( - { - key: this.keyPair.privateKey, - padding: crypto.constants.RSA_PKCS1_PADDING, - }, - buf, - ); - } } let nextClientId = 1; @@ -85,10 +68,7 @@ export class TcpClient { private verifyToken?: Buffer; /** we need to wait for the mojang auth response * before we can en/decrypt packets following the handshake */ - private cryptoPromise?: Promise<{ - cipher: crypto.Cipher; - decipher: crypto.Decipher; - }>; + private ciphers: crypto.Ciphers | null = null; constructor( private socket: Socket, @@ -104,8 +84,8 @@ export class TcpClient { public async handleReceivedData( data: Buffer ) { - if (this.cryptoPromise) { - data = (await this.cryptoPromise).decipher.update(data); + if (this.ciphers) { + data = this.ciphers.decipher.update(data); } // creating a new buffer every time is fine in our case, because we expect most frames to be large @@ -170,7 +150,7 @@ export class TcpClient { } async send(pkt: ServerPacket) { - if (!this.cryptoPromise) { + if (!this.ciphers) { this.debug("Not encrypted, dropping packet", pkt.type); return; } @@ -185,7 +165,7 @@ export class TcpClient { private async sendInternal(pkt: ServerPacket, doCrypto = false) { if (this.socket.readyState <= 0) return this.debug("Socket closed, dropping", pkt.type); - if (doCrypto && !this.cryptoPromise) + if (doCrypto && !this.ciphers) throw new Error(`Can't encrypt: handshake not finished`); const writer = new BufWriter(); // TODO size hint @@ -195,15 +175,14 @@ export class TcpClient { buf.writeUInt32BE(buf.length - 4, 0); // write into space reserved above if (doCrypto) { - const { cipher } = await this.cryptoPromise!; - buf = cipher!.update(buf); + buf = this.ciphers!.encipher.update(buf); } this.socket.write(buf); } private async handleHandshakePacket(packet: HandshakePacket) { - if (this.cryptoPromise) throw new Error(`Already authenticated`); + if (this.ciphers) throw new Error(`Already authenticated`); if (this.verifyToken) throw new Error(`Encryption already started`); if (!SUPPORTED_VERSIONS.has(packet.modVersion)) { @@ -222,7 +201,7 @@ export class TcpClient { await this.sendInternal({ type: "EncryptionRequest", - publicKey: this.server.publicKeyBuffer, + publicKey: crypto.PUBLIC_KEY, verifyToken: this.verifyToken, }); } @@ -230,29 +209,29 @@ export class TcpClient { private async handleEncryptionResponsePacket( pkt: EncryptionResponsePacket, ) { - if (this.cryptoPromise) throw new Error(`Already authenticated`); + if (this.ciphers) throw new Error(`Already authenticated`); if (!this.claimedMojangName) throw new Error(`Encryption has not started: no mojangName`); if (!this.verifyToken) throw new Error(`Encryption has not started: no verifyToken`); - const verifyToken = this.server.decrypt(pkt.verifyToken); + const verifyToken = crypto.decrypt(pkt.verifyToken); if (!this.verifyToken.equals(verifyToken)) { throw new Error( `verifyToken mismatch: got ${verifyToken} expected ${this.verifyToken}`, ); } - const secret = this.server.decrypt(pkt.sharedSecret); + const secret = crypto.decrypt(pkt.sharedSecret); const shaHex = crypto .createHash("sha1") .update(secret) - .update(this.server.publicKeyBuffer) + .update(crypto.PUBLIC_KEY) .digest() .toString("hex"); - this.cryptoPromise = fetchHasJoined({ + this.ciphers = await fetchHasJoined({ username: this.claimedMojangName, shaHex, }).then(async (mojangAuth) => { @@ -267,19 +246,10 @@ export class TcpClient { this.mcName = mojangAuth.name; this.name += ":" + mojangAuth.name; - return { - cipher: crypto.createCipheriv("aes-128-cfb8", secret, secret), - decipher: crypto.createDecipheriv( - "aes-128-cfb8", - secret, - secret, - ), - }; + return crypto.createCiphers(secret); }); - await this.cryptoPromise.then(async () => { - await this.handler.handleClientAuthenticated(this); - }); + await this.handler.handleClientAuthenticated(this); } debug(...args: any[]) { From ffe097dbb934f88d2aa06db29413222e684473fd Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 05:13:36 +0100 Subject: [PATCH 10/27] Update typescript version --- server/bun.lock | 2 +- server/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/bun.lock b/server/bun.lock index 671ea88d..c3774d68 100644 --- a/server/bun.lock +++ b/server/bun.lock @@ -12,7 +12,7 @@ "devDependencies": { "@types/bun": "^1.2.15", "prettier": "^3.0.1", - "typescript": "^5.1.6", + "typescript": "^5.8.3", }, }, }, diff --git a/server/package.json b/server/package.json index 7e830cfe..40b1ac91 100644 --- a/server/package.json +++ b/server/package.json @@ -22,7 +22,7 @@ "devDependencies": { "@types/bun": "^1.2.15", "prettier": "^3.0.1", - "typescript": "^5.1.6" + "typescript": "^5.8.3" }, "prettier": { "useTabs": false, From e79d43c8e3557ed8577babf7d9ffdcf5c6d485ed Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 05:19:50 +0100 Subject: [PATCH 11/27] Resolve ProtocolHandler cleanup todo This creates a ProtocolHandler interface in server.ts that main.ts implements. Also moves host and port determination to the config, rather than environment. --- server/src/main.ts | 266 +++++++++++++++++++++++-------------------- server/src/server.ts | 35 ++++-- 2 files changed, 168 insertions(+), 133 deletions(-) diff --git a/server/src/main.ts b/server/src/main.ts index f50342d5..4522d05e 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -4,7 +4,7 @@ import * as metadata from "./metadata"; import { type ClientPacket } from "./protocol"; import { CatchupRequestPacket } from "./protocol/CatchupRequestPacket"; import { ChunkTilePacket } from "./protocol/ChunkTilePacket"; -import { TcpClient, TcpServer } from "./server"; +import { type ProtocolHandler, TcpClient, TcpServer } from "./server"; import { RegionCatchupPacket } from "./protocol/RegionCatchupPacket"; let config: metadata.Config = null!; @@ -18,142 +18,158 @@ Promise.resolve().then(async () => { await metadata.loadWhitelist(); await metadata.loadUuidCache(); - new Main(); -}); - -type ProtocolClient = TcpClient; // TODO cleanup + const server = new TcpServer(config.host, config.port, new class implements ProtocolHandler { + public async handleClientConnected( + client: TcpClient + ) { -export class Main { - server = new TcpServer(this); + } - //Cannot be async, as it's caled from a synchronous constructor - handleClientConnected(client: ProtocolClient) {} + public async handleClientDisconnected( + client: TcpClient + ) { - async handleClientAuthenticated(client: ProtocolClient) { - if (!client.uuid) throw new Error("Client not authenticated"); + } - metadata.cachePlayerUuid(client.mcName!, client.uuid!); - await metadata.saveUuidCache(); + public async handleClientAuthenticated( + client: TcpClient + ) { + if (!client.uuid) { + throw new Error("Client not authenticated"); + } - if (config.whitelist) { - if (!metadata.whitelist.has(client.uuid)) { - client.log( - `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, - ); - client.kick(`Not whitelisted`); - return; + metadata.cachePlayerUuid(client.mcName!, client.uuid!); + await metadata.saveUuidCache(); + + if (config.whitelist) { + if (!metadata.whitelist.has(client.uuid)) { + client.log( + `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, + ); + client.kick(`Not whitelisted`); + return; + } } - } - // TODO check version, mc server, user access - - const timestamps = await database.getRegionTimestamps(client.world!); - client.send({ - type: "RegionTimestamps", - dimension: client.world!, - regions: timestamps, - }); - } - - handleClientDisconnected(client: ProtocolClient) {} - - handleClientPacketReceived(client: ProtocolClient, pkt: ClientPacket) { - client.debug(client.mcName + " <- " + pkt.type); - switch (pkt.type) { - case "ChunkTile": - return this.handleChunkTilePacket(client, pkt); - case "CatchupRequest": - return this.handleCatchupRequest(client, pkt); - case "RegionCatchup": - return this.handleRegionCatchupPacket(client, pkt); - default: - throw new Error( - `Unknown packet '${(pkt as any).type}' from client ${ - client.id - }`, - ); + // TODO check version, mc server, user access + + const timestamps = await database.getRegionTimestamps(client.world!); + await client.send({ + type: "RegionTimestamps", + dimension: client.world!, + regions: timestamps, + }); } - } - - async handleChunkTilePacket(client: ProtocolClient, pkt: ChunkTilePacket) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - // TODO ignore if same chunk hash exists in db - - await database - .storeChunkData( - pkt.dimension, - pkt.chunk_x, - pkt.chunk_z, - client.uuid, - pkt.ts, - pkt.data.version, - pkt.data.hash, - pkt.data.data, - ) - .catch(console.error); - - // TODO small timeout, then skip if other client already has it - for (const otherClient of Object.values(this.server.clients)) { - if (client === otherClient) continue; - otherClient.send(pkt); + + public async handleClientPacketReceived( + client: TcpClient, + packet: ClientPacket + ) { + client.debug(client.mcName + " <- " + packet.type); + switch (packet.type) { + case "ChunkTile": + return this.handleChunkTilePacket(client, packet); + case "CatchupRequest": + return this.handleCatchupRequest(client, packet); + case "RegionCatchup": + return this.handleRegionCatchupPacket(client, packet); + default: + throw new Error( + `Unknown packet '${(packet as any).type}' from client ${ + client.id + }`, + ); + } } - // TODO queue tile render for web map - } - - async handleCatchupRequest( - client: ProtocolClient, - pkt: CatchupRequestPacket, - ) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - for (const req of pkt.chunks) { - let chunk = await database.getChunkData( - pkt.dimension, - req.chunkX, - req.chunkZ, + private async handleChunkTilePacket( + client: TcpClient, + packet: ChunkTilePacket + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); + } + + // TODO ignore if same chunk hash exists in db + + await database + .storeChunkData( + packet.dimension, + packet.chunk_x, + packet.chunk_z, + client.uuid, + packet.ts, + packet.data.version, + packet.data.hash, + packet.data.data, + ) + .catch(console.error); + + // TODO small timeout, then skip if other client already has it + await Promise.allSettled( + Object.values(server.clients) + .filter((other) => other !== client && (other.uuid ?? null) !== null) + .map((other) => other.send(packet)) ); - if (!chunk) { - console.error(`${client.name} requested unavailable chunk`, { - world: pkt.dimension, - ...req, + + // TODO queue tile render for web map + } + + private async handleCatchupRequest( + client: TcpClient, + packet: CatchupRequestPacket, + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); + } + + for (const req of packet.chunks) { + let chunk = await database.getChunkData( + packet.dimension, + req.chunkX, + req.chunkZ, + ); + if (!chunk) { + console.error(`${client.name} requested unavailable chunk`, { + world: packet.dimension, + ...req, + }); + continue; + } + + if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client + if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this + + await client.send({ + type: "ChunkTile", + dimension: packet.dimension, + chunk_x: req.chunkX, + chunk_z: req.chunkX, + ts: req.timestamp, + data: { + hash: chunk.hash, + data: chunk.data, + version: chunk.version, + }, }); - continue; } + } - if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client - if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this - - client.send({ - type: "ChunkTile", - dimension: pkt.dimension, - chunk_x: req.chunkX, - chunk_z: req.chunkX, - ts: req.timestamp, - data: { - hash: chunk.hash, - data: chunk.data, - version: chunk.version, - }, - }); + private async handleRegionCatchupPacket( + client: TcpClient, + packet: RegionCatchupPacket, + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); + } + + const chunks = await database.getChunkTimestamps( + packet.dimension, + packet.regions, + ); + if (chunks.length) { + await client.send({ type: "Catchup", dimension: packet.dimension, chunks }); + } } - } - - async handleRegionCatchupPacket( - client: ProtocolClient, - pkt: RegionCatchupPacket, - ) { - if (!client.uuid) - throw new Error(`${client.name} is not authenticated`); - - const chunks = await database.getChunkTimestamps( - pkt.dimension, - pkt.regions, - ); - if (chunks.length) - client.send({ type: "Catchup", dimension: pkt.dimension, chunks }); - } -} + }); +}); diff --git a/server/src/server.ts b/server/src/server.ts index 7487c8f2..2313360d 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1,6 +1,5 @@ import { type TCPSocketListener, type Socket, listen } from "bun"; import * as crypto from "./crypto.ts"; -import { Main } from "./main"; import type { ClientPacket, ServerPacket } from "./protocol"; import { decodePacket, encodePacket } from "./protocol"; import { BufReader } from "./protocol/BufReader"; @@ -9,24 +8,44 @@ import { EncryptionResponsePacket } from "./protocol/EncryptionResponsePacket"; import { HandshakePacket } from "./protocol/HandshakePacket"; import { SUPPORTED_VERSIONS } from "./constants"; -const { PORT = "12312", HOST = "127.0.0.1" } = process.env; +export interface ProtocolHandler { + handleClientConnected( + client: TcpClient + ): Promise -type ProtocolHandler = Main; // TODO cleanup + handleClientDisconnected( + client: TcpClient + ): Promise + + handleClientAuthenticated( + client: TcpClient + ): Promise + + handleClientPacketReceived( + client: TcpClient, + packet: ClientPacket + ): Promise +} export class TcpServer { server: TCPSocketListener; clients: Record = {}; - constructor(readonly handler: ProtocolHandler) { + constructor( + host: string, + port: number, + readonly handler: ProtocolHandler + ) { const self = this; this.server = listen({ - hostname: HOST, - port: parseInt(PORT), + hostname: host, + port: port, socket: { binaryType: "buffer", async open(socket) { const client = new TcpClient(socket, self, handler); self.clients[client.id] = socket.data = client; + await self.handler.handleClientConnected(client); }, async close(socket, err) { const client: TcpClient = socket.data; @@ -34,6 +53,7 @@ export class TcpServer { if ((err ?? null) !== null) { client.warn(`Closed due to an error!`, err); } + await self.handler.handleClientDisconnected(client); }, async data(socket, data) { const client: TcpClient = socket.data; @@ -41,7 +61,7 @@ export class TcpServer { }, } }); - console.log("[TcpServer] Listening on", HOST, PORT); + console.log("[TcpServer] Listening on", host, port); } } @@ -76,7 +96,6 @@ export class TcpClient { private handler: ProtocolHandler, ) { this.log("Connected from", socket.remoteAddress); - handler.handleClientConnected(this); } static readonly #EMPTY_BUFFER = Buffer.allocUnsafe(0); From 3c6e5588e6604c8cec69a3ea2e88ab651f2c5d98 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 06:20:11 +0100 Subject: [PATCH 12/27] Consolidate packets into single file This also modernises them into classes, and uses symbols instead of strings for matching. --- server/src/main.ts | 283 +++++++++--------- server/src/protocol/CatchupPacket.ts | 22 -- server/src/protocol/CatchupRequestPacket.ts | 23 -- server/src/protocol/ChunkTilePacket.ts | 39 --- .../src/protocol/EncryptionRequestPacket.ts | 14 - .../src/protocol/EncryptionResponsePacket.ts | 19 -- server/src/protocol/HandshakePacket.ts | 21 -- server/src/protocol/RegionCatchupPacket.ts | 22 -- server/src/protocol/RegionTimestampsPacket.ts | 22 -- server/src/protocol/index.ts | 84 +++--- server/src/protocol/packets.ts | 212 +++++++++++++ server/src/server.ts | 72 ++--- 12 files changed, 430 insertions(+), 403 deletions(-) delete mode 100644 server/src/protocol/CatchupPacket.ts delete mode 100644 server/src/protocol/CatchupRequestPacket.ts delete mode 100644 server/src/protocol/ChunkTilePacket.ts delete mode 100644 server/src/protocol/EncryptionRequestPacket.ts delete mode 100644 server/src/protocol/EncryptionResponsePacket.ts delete mode 100644 server/src/protocol/HandshakePacket.ts delete mode 100644 server/src/protocol/RegionCatchupPacket.ts delete mode 100644 server/src/protocol/RegionTimestampsPacket.ts create mode 100644 server/src/protocol/packets.ts diff --git a/server/src/main.ts b/server/src/main.ts index 4522d05e..0f08e937 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -2,10 +2,11 @@ import "./cli"; import * as database from "./database"; import * as metadata from "./metadata"; import { type ClientPacket } from "./protocol"; -import { CatchupRequestPacket } from "./protocol/CatchupRequestPacket"; -import { ChunkTilePacket } from "./protocol/ChunkTilePacket"; import { type ProtocolHandler, TcpClient, TcpServer } from "./server"; -import { RegionCatchupPacket } from "./protocol/RegionCatchupPacket"; +import { + ChunkTilePacket, ClientboundChunkTimestampsResponsePacket, + ClientboundRegionTimestampsPacket, ServerboundCatchupRequestPacket, ServerboundChunkTimestampsRequestPacket, +} from "./protocol/packets.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { @@ -18,158 +19,162 @@ Promise.resolve().then(async () => { await metadata.loadWhitelist(); await metadata.loadUuidCache(); - const server = new TcpServer(config.host, config.port, new class implements ProtocolHandler { - public async handleClientConnected( - client: TcpClient - ) { + const server = new TcpServer( + config.host, + config.port, + new (class implements ProtocolHandler { + public async handleClientConnected(client: TcpClient) {} - } + public async handleClientDisconnected(client: TcpClient) {} - public async handleClientDisconnected( - client: TcpClient - ) { + public async handleClientAuthenticated(client: TcpClient) { + if (!client.uuid) { + throw new Error("Client not authenticated"); + } - } + metadata.cachePlayerUuid(client.mcName!, client.uuid!); + await metadata.saveUuidCache(); + + if (config.whitelist) { + if (!metadata.whitelist.has(client.uuid)) { + client.log( + `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, + ); + client.kick(`Not whitelisted`); + return; + } + } - public async handleClientAuthenticated( - client: TcpClient - ) { - if (!client.uuid) { - throw new Error("Client not authenticated"); - } + // TODO check version, mc server, user access - metadata.cachePlayerUuid(client.mcName!, client.uuid!); - await metadata.saveUuidCache(); + await client.send( + new ClientboundRegionTimestampsPacket( + client.world!, + await database.getRegionTimestamps(client.world!), + ), + ); + } - if (config.whitelist) { - if (!metadata.whitelist.has(client.uuid)) { - client.log( - `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, - ); - client.kick(`Not whitelisted`); - return; + public async handleClientPacketReceived( + client: TcpClient, + packet: ClientPacket, + ) { + client.debug(client.mcName + " <- " + packet.type.toString()); + switch (packet.type) { + case ChunkTilePacket.TYPE: + return this.handleChunkTilePacket(client, packet as ChunkTilePacket); + case ServerboundCatchupRequestPacket.TYPE: + return this.handleCatchupRequest(client, packet as ServerboundCatchupRequestPacket); + case ServerboundChunkTimestampsRequestPacket.TYPE: + return this.handleRegionCatchupPacket(client, packet as ServerboundChunkTimestampsRequestPacket); + default: + throw new Error( + `Unknown packet '${(packet as any).type}' from client ${ + client.id + }`, + ); } } - // TODO check version, mc server, user access - - const timestamps = await database.getRegionTimestamps(client.world!); - await client.send({ - type: "RegionTimestamps", - dimension: client.world!, - regions: timestamps, - }); - } - - public async handleClientPacketReceived( - client: TcpClient, - packet: ClientPacket - ) { - client.debug(client.mcName + " <- " + packet.type); - switch (packet.type) { - case "ChunkTile": - return this.handleChunkTilePacket(client, packet); - case "CatchupRequest": - return this.handleCatchupRequest(client, packet); - case "RegionCatchup": - return this.handleRegionCatchupPacket(client, packet); - default: - throw new Error( - `Unknown packet '${(packet as any).type}' from client ${ - client.id - }`, - ); - } - } - - private async handleChunkTilePacket( - client: TcpClient, - packet: ChunkTilePacket - ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); - } + private async handleChunkTilePacket( + client: TcpClient, + packet: ChunkTilePacket, + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); + } - // TODO ignore if same chunk hash exists in db + // TODO ignore if same chunk hash exists in db + + await database + .storeChunkData( + packet.dimension, + packet.chunkX, + packet.chunkZ, + client.uuid!, + packet.timestamp, + packet.version, + packet.hash, + packet.data, + ) + .catch(console.error); + + // TODO small timeout, then skip if other client already has it + await Promise.allSettled( + Object.values(server.clients) + .filter( + (other) => + other !== client && + (other.uuid ?? null) !== null, + ) + .map((other) => other.send(packet)), + ); - await database - .storeChunkData( - packet.dimension, - packet.chunk_x, - packet.chunk_z, - client.uuid, - packet.ts, - packet.data.version, - packet.data.hash, - packet.data.data, - ) - .catch(console.error); - - // TODO small timeout, then skip if other client already has it - await Promise.allSettled( - Object.values(server.clients) - .filter((other) => other !== client && (other.uuid ?? null) !== null) - .map((other) => other.send(packet)) - ); - - // TODO queue tile render for web map - } - - private async handleCatchupRequest( - client: TcpClient, - packet: CatchupRequestPacket, - ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); + // TODO queue tile render for web map } - for (const req of packet.chunks) { - let chunk = await database.getChunkData( - packet.dimension, - req.chunkX, - req.chunkZ, - ); - if (!chunk) { - console.error(`${client.name} requested unavailable chunk`, { - world: packet.dimension, - ...req, - }); - continue; + private async handleCatchupRequest( + client: TcpClient, + packet: ServerboundCatchupRequestPacket, + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); } - if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client - if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this - - await client.send({ - type: "ChunkTile", - dimension: packet.dimension, - chunk_x: req.chunkX, - chunk_z: req.chunkX, - ts: req.timestamp, - data: { - hash: chunk.hash, - data: chunk.data, - version: chunk.version, - }, - }); - } - } - - private async handleRegionCatchupPacket( - client: TcpClient, - packet: RegionCatchupPacket, - ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); + for (const req of packet.chunks) { + let chunk = await database.getChunkData( + packet.dimension, + req.chunkX, + req.chunkZ, + ); + if (!chunk) { + console.error( + `${client.name} requested unavailable chunk`, + { + world: packet.dimension, + ...req, + }, + ); + continue; + } + + if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client + if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this + + await client.send(new ChunkTilePacket( + packet.dimension, + req.chunkX, + req.chunkZ, + req.timestamp, + chunk.version, + chunk.hash, + chunk.data + )); + } } - const chunks = await database.getChunkTimestamps( - packet.dimension, - packet.regions, - ); - if (chunks.length) { - await client.send({ type: "Catchup", dimension: packet.dimension, chunks }); + private async handleRegionCatchupPacket( + client: TcpClient, + packet: ServerboundChunkTimestampsRequestPacket, + ) { + if (!client.uuid) { + throw new Error(`${client.name} is not authenticated`); + } + + const chunks = await database.getChunkTimestamps( + packet.dimension, + packet.regions.map((region) => ({ + x: region.regionX, + z: region.regionZ + })), + ); + if (chunks.length) { + await client.send(new ClientboundChunkTimestampsResponsePacket( + packet.dimension, + chunks + )); + } } - } - }); + })(), + ); }); diff --git a/server/src/protocol/CatchupPacket.ts b/server/src/protocol/CatchupPacket.ts deleted file mode 100644 index 1d9a452c..00000000 --- a/server/src/protocol/CatchupPacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { type CatchupChunk } from "../model"; -import { BufWriter } from "./BufWriter"; - -export interface CatchupPacket { - type: "Catchup"; - dimension: string; - chunks: CatchupChunk[]; -} - -export namespace CatchupPacket { - export function encode(pkt: CatchupPacket, writer: BufWriter) { - if (pkt.chunks.length < 1) - throw new Error(`Catchup chunks must not be empty`); - writer.writeString(pkt.dimension); - writer.writeUnt32(pkt.chunks.length); - for (const chunk of pkt.chunks) { - writer.writeInt32(chunk.chunkX); - writer.writeInt32(chunk.chunkZ); - writer.writeUnt64(chunk.timestamp); - } - } -} diff --git a/server/src/protocol/CatchupRequestPacket.ts b/server/src/protocol/CatchupRequestPacket.ts deleted file mode 100644 index b7b4bbf3..00000000 --- a/server/src/protocol/CatchupRequestPacket.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { type CatchupChunk } from "../model"; -import { BufReader } from "./BufReader"; - -export interface CatchupRequestPacket { - type: "CatchupRequest"; - dimension: string; - chunks: CatchupChunk[]; -} - -export namespace CatchupRequestPacket { - export function decode(reader: BufReader): CatchupRequestPacket { - const dimension = reader.readString(); - const chunks: CatchupChunk[] = new Array(reader.readUInt32()); - for (let i = 0; i < chunks.length; i++) { - chunks[i] = { - chunkX: reader.readInt32(), - chunkZ: reader.readInt32(), - timestamp: reader.readUInt64(), - }; - } - return { type: "CatchupRequest", dimension, chunks }; - } -} diff --git a/server/src/protocol/ChunkTilePacket.ts b/server/src/protocol/ChunkTilePacket.ts deleted file mode 100644 index dc614eba..00000000 --- a/server/src/protocol/ChunkTilePacket.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; -import { SHA1_HASH_LENGTH } from "../constants"; - -export interface ChunkTilePacket { - type: "ChunkTile"; - dimension: string; - chunk_x: number; - chunk_z: number; - ts: number; - data: { version: number; hash: Buffer; data: Buffer }; -} - -export namespace ChunkTilePacket { - export function decode(reader: BufReader): ChunkTilePacket { - return { - type: "ChunkTile", - dimension: reader.readString(), - chunk_x: reader.readInt32(), - chunk_z: reader.readInt32(), - ts: reader.readUInt64(), - data: { - version: reader.readUInt16(), - hash: reader.readBufLen(SHA1_HASH_LENGTH), - data: reader.readRemainder(), - }, - }; - } - - export function encode(pkt: ChunkTilePacket, writer: BufWriter) { - writer.writeString(pkt.dimension); - writer.writeInt32(pkt.chunk_x); - writer.writeInt32(pkt.chunk_z); - writer.writeUnt64(pkt.ts); - writer.writeUnt16(pkt.data.version); - writer.writeBufRaw(pkt.data.hash); - writer.writeBufRaw(pkt.data.data); // XXX do we need to prefix with length? - } -} diff --git a/server/src/protocol/EncryptionRequestPacket.ts b/server/src/protocol/EncryptionRequestPacket.ts deleted file mode 100644 index a49b4319..00000000 --- a/server/src/protocol/EncryptionRequestPacket.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { BufWriter } from "./BufWriter"; - -export interface EncryptionRequestPacket { - type: "EncryptionRequest"; - publicKey: Buffer; - verifyToken: Buffer; -} - -export namespace EncryptionRequestPacket { - export function encode(pkt: EncryptionRequestPacket, writer: BufWriter) { - writer.writeBufWithLen(pkt.publicKey); - writer.writeBufWithLen(pkt.verifyToken); - } -} diff --git a/server/src/protocol/EncryptionResponsePacket.ts b/server/src/protocol/EncryptionResponsePacket.ts deleted file mode 100644 index 35e6eb24..00000000 --- a/server/src/protocol/EncryptionResponsePacket.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { BufReader } from "./BufReader"; - -export interface EncryptionResponsePacket { - type: "EncryptionResponse"; - /** encrypted with server's public key */ - sharedSecret: Buffer; - /** encrypted with server's public key */ - verifyToken: Buffer; -} - -export namespace EncryptionResponsePacket { - export function decode(reader: BufReader): EncryptionResponsePacket { - return { - type: "EncryptionResponse", - sharedSecret: reader.readBufWithLen(), - verifyToken: reader.readBufWithLen(), - }; - } -} diff --git a/server/src/protocol/HandshakePacket.ts b/server/src/protocol/HandshakePacket.ts deleted file mode 100644 index 747b012b..00000000 --- a/server/src/protocol/HandshakePacket.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { BufReader } from "./BufReader"; - -export interface HandshakePacket { - type: "Handshake"; - modVersion: string; - mojangName: string; - gameAddress: string; - dimension: string; -} - -export namespace HandshakePacket { - export function decode(reader: BufReader): HandshakePacket { - return { - type: "Handshake", - modVersion: reader.readString(), - mojangName: reader.readString(), - gameAddress: reader.readString(), - dimension: reader.readString(), - }; - } -} diff --git a/server/src/protocol/RegionCatchupPacket.ts b/server/src/protocol/RegionCatchupPacket.ts deleted file mode 100644 index 15eb0e8e..00000000 --- a/server/src/protocol/RegionCatchupPacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { BufReader } from "./BufReader"; -import { type Pos2D } from "../model"; - -export interface RegionCatchupPacket { - type: "RegionCatchup"; - dimension: string; - regions: Pos2D[]; -} - -export namespace RegionCatchupPacket { - export function decode(reader: BufReader): RegionCatchupPacket { - const dimension = reader.readString(); - const regions: Pos2D[] = new Array(reader.readInt16()); - for (let i = 0; i < regions.length; i++) { - regions[i] = { - x: reader.readInt16(), - z: reader.readInt16(), - }; - } - return { type: "RegionCatchup", dimension, regions }; - } -} diff --git a/server/src/protocol/RegionTimestampsPacket.ts b/server/src/protocol/RegionTimestampsPacket.ts deleted file mode 100644 index 6a74d46d..00000000 --- a/server/src/protocol/RegionTimestampsPacket.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { BufWriter } from "./BufWriter"; -import { type CatchupRegion } from "../model"; - -export interface RegionTimestampsPacket { - type: "RegionTimestamps"; - dimension: string; - regions: Array; -} - -export namespace RegionTimestampsPacket { - export function encode(pkt: RegionTimestampsPacket, writer: BufWriter) { - writer.writeString(pkt.dimension); - writer.writeInt16(pkt.regions.length); - console.log("Sending regions " + JSON.stringify(pkt.regions)); - for (let i = 0; i < pkt.regions.length; i++) { - const region = pkt.regions[i]; - writer.writeInt16(region.regionX); - writer.writeInt16(region.regionZ); - writer.writeInt64(region.timestamp); - } - } -} diff --git a/server/src/protocol/index.ts b/server/src/protocol/index.ts index f956e73c..55e177aa 100644 --- a/server/src/protocol/index.ts +++ b/server/src/protocol/index.ts @@ -1,58 +1,60 @@ import { BufReader } from "./BufReader"; import { BufWriter } from "./BufWriter"; -import { ChunkTilePacket } from "./ChunkTilePacket"; -import { EncryptionRequestPacket } from "./EncryptionRequestPacket"; -import { EncryptionResponsePacket } from "./EncryptionResponsePacket"; -import { HandshakePacket } from "./HandshakePacket"; -import { CatchupPacket } from "./CatchupPacket"; -import { CatchupRequestPacket } from "./CatchupRequestPacket"; -import { RegionTimestampsPacket } from "./RegionTimestampsPacket"; -import { RegionCatchupPacket } from "./RegionCatchupPacket"; +import { + ChunkTilePacket, + ClientboundEncryptionRequestPacket, + ClientboundRegionTimestampsPacket, + ServerboundChunkTimestampsRequestPacket, + ServerboundEncryptionResponsePacket, + ServerboundHandshakePacket, + ClientboundChunkTimestampsResponsePacket, + ServerboundCatchupRequestPacket, +} from "./packets.ts"; export type ClientPacket = | ChunkTilePacket - | EncryptionResponsePacket - | HandshakePacket - | CatchupRequestPacket - | RegionCatchupPacket; + | ServerboundEncryptionResponsePacket + | ServerboundHandshakePacket + | ServerboundCatchupRequestPacket + | ServerboundChunkTimestampsRequestPacket; export type ServerPacket = | ChunkTilePacket - | EncryptionRequestPacket - | CatchupPacket - | RegionTimestampsPacket; + | ClientboundEncryptionRequestPacket + | ClientboundChunkTimestampsResponsePacket + | ClientboundRegionTimestampsPacket; export const packetIds = [ "ERROR:pkt0", - "Handshake", - "EncryptionRequest", - "EncryptionResponse", - "ChunkTile", - "Catchup", - "CatchupRequest", - "RegionTimestamps", - "RegionCatchup", + ServerboundHandshakePacket.TYPE, + ClientboundEncryptionRequestPacket.TYPE, + ServerboundEncryptionResponsePacket.TYPE, + ChunkTilePacket.TYPE, + ClientboundChunkTimestampsResponsePacket.TYPE, + ServerboundCatchupRequestPacket.TYPE, + ClientboundRegionTimestampsPacket.TYPE, + ServerboundChunkTimestampsRequestPacket.TYPE, ]; export function getPacketId(type: ServerPacket["type"]) { const id = packetIds.indexOf(type); - if (id === -1) throw new Error(`Unknown packet type ${type}`); + if (id === -1) throw new Error(`Unknown packet type ${type.toString()}`); return id; } export function decodePacket(reader: BufReader): ClientPacket { const packetType = reader.readUInt8(); switch (packetIds[packetType]) { - case "ChunkTile": + case ChunkTilePacket.TYPE: return ChunkTilePacket.decode(reader); - case "Handshake": - return HandshakePacket.decode(reader); - case "EncryptionResponse": - return EncryptionResponsePacket.decode(reader); - case "CatchupRequest": - return CatchupRequestPacket.decode(reader); - case "RegionCatchup": - return RegionCatchupPacket.decode(reader); + case ServerboundHandshakePacket.TYPE: + return ServerboundHandshakePacket.decode(reader); + case ServerboundEncryptionResponsePacket.TYPE: + return ServerboundEncryptionResponsePacket.decode(reader); + case ServerboundCatchupRequestPacket.TYPE: + return ServerboundCatchupRequestPacket.decode(reader); + case ServerboundChunkTimestampsRequestPacket.TYPE: + return ServerboundChunkTimestampsRequestPacket.decode(reader); default: throw new Error(`Unknown packet type ${packetType}`); } @@ -61,14 +63,14 @@ export function decodePacket(reader: BufReader): ClientPacket { export function encodePacket(pkt: ServerPacket, writer: BufWriter): void { writer.writeUnt8(getPacketId(pkt.type)); switch (pkt.type) { - case "ChunkTile": - return ChunkTilePacket.encode(pkt, writer); - case "Catchup": - return CatchupPacket.encode(pkt, writer); - case "EncryptionRequest": - return EncryptionRequestPacket.encode(pkt, writer); - case "RegionTimestamps": - return RegionTimestampsPacket.encode(pkt, writer); + case ChunkTilePacket.TYPE: + return (pkt as ChunkTilePacket).encode(writer); + case ClientboundChunkTimestampsResponsePacket.TYPE: + return (pkt as ClientboundChunkTimestampsResponsePacket).encode(writer); + case ClientboundEncryptionRequestPacket.TYPE: + return (pkt as ClientboundEncryptionRequestPacket).encode(writer); + case ClientboundRegionTimestampsPacket.TYPE: + return (pkt as ClientboundRegionTimestampsPacket).encode(writer); default: throw new Error(`Unknown packet type ${(pkt as any).type}`); } diff --git a/server/src/protocol/packets.ts b/server/src/protocol/packets.ts new file mode 100644 index 00000000..b16b3ab9 --- /dev/null +++ b/server/src/protocol/packets.ts @@ -0,0 +1,212 @@ +import { BufReader } from "./BufReader.ts"; +import { BufWriter } from "./BufWriter.ts"; +import { SHA1_HASH_LENGTH } from "../constants.ts"; + +interface Packet { + type: Symbol +} + +function readArray( + length: number, + parser: () => T +): Array { + const array: T[] = new Array(length); + for (let i = 0; i < length; i++) { + array[i] = parser() + } + return array; +} + +export class ServerboundHandshakePacket implements Packet { + public static readonly TYPE = Symbol("ServerboundHandshakePacket"); + + public readonly type = ServerboundHandshakePacket.TYPE; + + public constructor( + public readonly modVersion: string, + public readonly mojangName: string, + public readonly gameAddress: string, + public readonly dimension: string, + ) {} + + public static decode(reader: BufReader): ServerboundHandshakePacket { + return new ServerboundHandshakePacket( + reader.readString(), + reader.readString(), + reader.readString(), + reader.readString() + ); + } +} + +export class ClientboundEncryptionRequestPacket implements Packet { + public static readonly TYPE = Symbol("ClientboundEncryptionRequestPacket"); + + public readonly type = ClientboundEncryptionRequestPacket.TYPE; + + public constructor( + public readonly publicKey: Buffer, + public readonly verifyToken: Buffer, + ) {} + + public encode(writer: BufWriter) { + writer.writeBufWithLen(this.publicKey); + writer.writeBufWithLen(this.verifyToken); + } +} + +export class ServerboundEncryptionResponsePacket implements Packet { + public static readonly TYPE = Symbol("ServerboundEncryptionResponsePacket"); + + public readonly type = ServerboundEncryptionResponsePacket.TYPE; + + public constructor( + public readonly sharedSecret: Buffer, + public readonly verifyToken: Buffer, + ) {} + + public static decode(reader: BufReader): ServerboundEncryptionResponsePacket { + return new ServerboundEncryptionResponsePacket( + reader.readBufWithLen(), + reader.readBufWithLen() + ); + } +} + +export class ClientboundRegionTimestampsPacket implements Packet { + public static readonly TYPE = Symbol("ClientboundRegionTimestampsPacket"); + + public readonly type = ClientboundRegionTimestampsPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly regions: Array<{ + readonly regionX: number; + readonly regionZ: number; + readonly timestamp: number; + }>, + ) {} + + public encode(writer: BufWriter) { + writer.writeString(this.dimension); + writer.writeInt16(this.regions.length); + for (const region of this.regions) { + writer.writeInt16(region.regionX); + writer.writeInt16(region.regionZ); + writer.writeInt64(region.timestamp); + } + } +} + +export class ServerboundChunkTimestampsRequestPacket implements Packet { + public static readonly TYPE = Symbol("ServerboundChunkTimestampsRequestPacket"); + + public readonly type = ServerboundChunkTimestampsRequestPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly regions: Array<{ + readonly regionX: number; + readonly regionZ: number; + }>, + ) {} + + public static decode(reader: BufReader): ServerboundChunkTimestampsRequestPacket { + return new ServerboundChunkTimestampsRequestPacket( + reader.readString(), + readArray(reader.readInt16(), () => ({ + regionX: reader.readInt16(), + regionZ: reader.readInt16(), + })) + ); + } +} + +export class ClientboundChunkTimestampsResponsePacket implements Packet { + public static readonly TYPE = Symbol("ClientboundChunkTimestampsResponsePacket"); + + public readonly type = ClientboundChunkTimestampsResponsePacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunks: Array<{ + readonly chunkX: number; + readonly chunkZ: number; + readonly timestamp: number; + }>, + ) {} + + public encode(writer: BufWriter) { + writer.writeString(this.dimension); + writer.writeUnt32(this.chunks.length); + for (const chunk of this.chunks) { + writer.writeInt32(chunk.chunkX); + writer.writeInt32(chunk.chunkZ); + writer.writeUnt64(chunk.timestamp); + } + } +} + +export class ServerboundCatchupRequestPacket implements Packet { + public static readonly TYPE = Symbol("ServerboundCatchupRequestPacket"); + + public readonly type = ServerboundCatchupRequestPacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunks: Array<{ + readonly chunkX: number; + readonly chunkZ: number; + readonly timestamp: number; + }>, + ) {} + + public static decode(reader: BufReader): ServerboundCatchupRequestPacket { + return new ServerboundCatchupRequestPacket( + reader.readString(), + readArray(reader.readUInt32(), () => ({ + chunkX: reader.readInt32(), + chunkZ: reader.readInt32(), + timestamp: reader.readUInt64() + })) + ); + } +} + +export class ChunkTilePacket implements Packet { + public static readonly TYPE = Symbol("ChunkTilePacket"); + + public readonly type = ChunkTilePacket.TYPE; + + public constructor( + public readonly dimension: string, + public readonly chunkX: number, + public readonly chunkZ: number, + public readonly timestamp: number, + public readonly version: number, + public readonly hash: Buffer, + public readonly data: Buffer, + ) {} + + public encode(writer: BufWriter) { + writer.writeString(this.dimension); + writer.writeInt32(this.chunkX); + writer.writeInt32(this.chunkZ); + writer.writeUnt64(this.timestamp); + writer.writeUnt16(this.version); + writer.writeBufRaw(this.hash); + writer.writeBufRaw(this.data); // XXX do we need to prefix with length? + } + + public static decode(reader: BufReader): ChunkTilePacket { + return new ChunkTilePacket( + reader.readString(), + reader.readInt32(), + reader.readInt32(), + reader.readUInt64(), + reader.readUInt16(), + reader.readBufLen(SHA1_HASH_LENGTH), + reader.readRemainder() + ); + } +} diff --git a/server/src/server.ts b/server/src/server.ts index 2313360d..6e6ff14e 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1,30 +1,27 @@ -import { type TCPSocketListener, type Socket, listen } from "bun"; +import { listen, type Socket, type TCPSocketListener } from "bun"; import * as crypto from "./crypto.ts"; import type { ClientPacket, ServerPacket } from "./protocol"; import { decodePacket, encodePacket } from "./protocol"; import { BufReader } from "./protocol/BufReader"; import { BufWriter } from "./protocol/BufWriter"; -import { EncryptionResponsePacket } from "./protocol/EncryptionResponsePacket"; -import { HandshakePacket } from "./protocol/HandshakePacket"; import { SUPPORTED_VERSIONS } from "./constants"; +import { + ClientboundEncryptionRequestPacket, + ServerboundEncryptionResponsePacket, + ServerboundHandshakePacket, +} from "./protocol/packets.ts"; export interface ProtocolHandler { - handleClientConnected( - client: TcpClient - ): Promise + handleClientConnected(client: TcpClient): Promise; - handleClientDisconnected( - client: TcpClient - ): Promise + handleClientDisconnected(client: TcpClient): Promise; - handleClientAuthenticated( - client: TcpClient - ): Promise + handleClientAuthenticated(client: TcpClient): Promise; handleClientPacketReceived( client: TcpClient, - packet: ClientPacket - ): Promise + packet: ClientPacket, + ): Promise; } export class TcpServer { @@ -34,7 +31,7 @@ export class TcpServer { constructor( host: string, port: number, - readonly handler: ProtocolHandler + readonly handler: ProtocolHandler, ) { const self = this; this.server = listen({ @@ -59,7 +56,7 @@ export class TcpServer { const client: TcpClient = socket.data; await client.handleReceivedData(data); }, - } + }, }); console.log("[TcpServer] Listening on", host, port); } @@ -100,18 +97,13 @@ export class TcpClient { static readonly #EMPTY_BUFFER = Buffer.allocUnsafe(0); #receivedBuffer: Buffer = TcpClient.#EMPTY_BUFFER; - public async handleReceivedData( - data: Buffer - ) { + public async handleReceivedData(data: Buffer) { if (this.ciphers) { data = this.ciphers.decipher.update(data); } // creating a new buffer every time is fine in our case, because we expect most frames to be large - this.#receivedBuffer = Buffer.concat([ - this.#receivedBuffer, - data - ]); + this.#receivedBuffer = Buffer.concat([this.#receivedBuffer, data]); // we may receive multiple frames in one call while (true) { @@ -122,9 +114,9 @@ export class TcpClient { if (frameSize > this.maxFrameSize) { return this.kick( "Frame too large: " + - frameSize + - " have " + - this.#receivedBuffer.byteLength, + frameSize + + " have " + + this.#receivedBuffer.byteLength, ); } @@ -137,8 +129,7 @@ export class TcpClient { try { const packet = decodePacket(new BufReader(packetBuffer)); await this.handlePacketReceived(packet); - } - catch (err) { + } catch (err) { this.warn(err); this.kick("Error in packet handler"); return; @@ -150,13 +141,13 @@ export class TcpClient { if (!this.uuid) { // not authenticated yet switch (pkt.type) { - case "Handshake": - return await this.handleHandshakePacket(pkt); - case "EncryptionResponse": - return await this.handleEncryptionResponsePacket(pkt); + case ServerboundHandshakePacket.TYPE: + return await this.handleHandshakePacket(pkt as ServerboundHandshakePacket); + case ServerboundEncryptionResponsePacket.TYPE: + return await this.handleEncryptionResponsePacket(pkt as ServerboundEncryptionResponsePacket); } throw new Error( - `Packet ${pkt.type} from unauth'd client ${this.id}`, + `Packet ${pkt.type.toString()} from unauth'd client ${this.id}`, ); } else { return await this.handler.handleClientPacketReceived(this, pkt); @@ -177,7 +168,7 @@ export class TcpClient { this.debug("Not authenticated, dropping packet", pkt.type); return; } - this.debug(this.mcName + " -> " + pkt.type); + this.debug(this.mcName + " -> " + pkt.type.toString()); await this.sendInternal(pkt, true); } @@ -200,7 +191,7 @@ export class TcpClient { this.socket.write(buf); } - private async handleHandshakePacket(packet: HandshakePacket) { + private async handleHandshakePacket(packet: ServerboundHandshakePacket) { if (this.ciphers) throw new Error(`Already authenticated`); if (this.verifyToken) throw new Error(`Encryption already started`); @@ -218,15 +209,14 @@ export class TcpClient { this.world = packet.dimension; this.verifyToken = crypto.randomBytes(4); - await this.sendInternal({ - type: "EncryptionRequest", - publicKey: crypto.PUBLIC_KEY, - verifyToken: this.verifyToken, - }); + await this.sendInternal(new ClientboundEncryptionRequestPacket( + crypto.PUBLIC_KEY, + this.verifyToken + )); } private async handleEncryptionResponsePacket( - pkt: EncryptionResponsePacket, + pkt: ServerboundEncryptionResponsePacket, ) { if (this.ciphers) throw new Error(`Already authenticated`); if (!this.claimedMojangName) From 0956769f5db738db3b55710b03be354c5ea403fc Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 06:29:11 +0100 Subject: [PATCH 13/27] Consolidate buffer helper classes into single file --- server/src/protocol/BufReader.ts | 100 ------------------ server/src/protocol/BufWriter.ts | 82 --------------- server/src/protocol/buffers.ts | 171 +++++++++++++++++++++++++++++++ server/src/protocol/index.ts | 4 +- server/src/protocol/packets.ts | 29 +++--- server/src/server.ts | 9 +- 6 files changed, 191 insertions(+), 204 deletions(-) delete mode 100644 server/src/protocol/BufReader.ts delete mode 100644 server/src/protocol/BufWriter.ts create mode 100644 server/src/protocol/buffers.ts diff --git a/server/src/protocol/BufReader.ts b/server/src/protocol/BufReader.ts deleted file mode 100644 index e4d39ef2..00000000 --- a/server/src/protocol/BufReader.ts +++ /dev/null @@ -1,100 +0,0 @@ -/** Each read advances the internal offset into the buffer. */ -export class BufReader { - private off = 0; - private offStack: number[] = []; - - constructor(private buf: Buffer) {} - - saveOffset() { - this.offStack.push(this.off); - } - - restoreOffset() { - const off = this.offStack.pop(); - if (off === undefined) throw new Error("Offset stack is empty"); - this.off = off; - } - - readUInt8() { - const val = this.buf.readUInt8(this.off); - this.off += 1; - return val; - } - - readInt8() { - const val = this.buf.readInt8(this.off); - this.off += 1; - return val; - } - - readUInt16() { - const val = this.buf.readUInt16BE(this.off); - this.off += 2; - return val; - } - - readInt16() { - const val = this.buf.readInt16BE(this.off); - this.off += 2; - return val; - } - - readUInt32() { - const val = this.buf.readUInt32BE(this.off); - this.off += 4; - return val; - } - - readInt32() { - const val = this.buf.readInt32BE(this.off); - this.off += 4; - return val; - } - - readUInt64() { - const valBig = this.buf.readBigUInt64BE(this.off); - if (valBig > Number.MAX_SAFE_INTEGER) { - throw new Error(`64-bit number too big: ${valBig}`); - } - this.off += 8; - return Number(valBig); - } - - readInt64() { - const valBig = this.buf.readBigInt64BE(this.off); - if (valBig > Number.MAX_SAFE_INTEGER) { - throw new Error(`64-bit number too big: ${valBig}`); - } - if (valBig < Number.MIN_SAFE_INTEGER) { - throw new Error(`64-bit number too small: ${valBig}`); - } - this.off += 8; - return Number(valBig); - } - - /** length-prefixed (32 bits), UTF-8 encoded */ - readString() { - const len = this.readUInt32(); - const str = this.buf.toString("utf8", this.off, this.off + len); - this.off += len; - return str; - } - - readBufWithLen() { - const len = this.readUInt32(); - return this.readBufLen(len); - } - - readBufLen(length: number) { - // simply returning a slice() would retain the entire buf in memory - const buf = Buffer.allocUnsafe(length); - this.buf.copy(buf, 0, this.off, this.off + length); - this.off += length; - return buf; - } - - /** any reads after this will fail */ - readRemainder() { - return this.readBufLen(this.buf.length - this.off); - } -} diff --git a/server/src/protocol/BufWriter.ts b/server/src/protocol/BufWriter.ts deleted file mode 100644 index acaf0338..00000000 --- a/server/src/protocol/BufWriter.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { ArrayBufferSink } from "bun"; - -export class BufWriter { - private readonly sink: ArrayBufferSink; - private readonly view = new DataView(new ArrayBuffer(8)); // 64 bits - - public constructor() { - this.sink = new ArrayBufferSink(); - this.sink.start({ - asUint8Array: true, - stream: true, - }); - } - - public getBuffer(): Buffer { - return Buffer.from(this.sink.flush() as Uint8Array); - } - - public writeUnt8(val: number) { - this.view.setUint8(0, val); - this.sink.write(this.view.buffer.slice(0, 1)); - } - - public writeInt8(val: number) { - this.view.setInt8(0, val); - this.sink.write(this.view.buffer.slice(0, 1)); - } - - public writeUnt16(val: number) { - this.view.setUint16(0, val); - this.sink.write(this.view.buffer.slice(0, 2)); - } - - public writeInt16(val: number) { - this.view.setInt16(0, val); - this.sink.write(this.view.buffer.slice(0, 2)); - } - - public writeUnt32(val: number) { - this.view.setUint32(0, val); - this.sink.write(this.view.buffer.slice(0, 4)); - } - - public writeInt32(val: number) { - this.view.setInt32(0, val); - this.sink.write(this.view.buffer.slice(0, 4)); - } - - public writeUnt64(val: number | bigint) { - if (typeof val === "number") { - val = BigInt(val); - } - this.view.setBigUint64(0, val); - this.sink.write(this.view.buffer); - } - - public writeInt64(val: number | bigint) { - if (typeof val === "number") { - val = BigInt(val); - } - this.view.setBigInt64(0, val); - this.sink.write(this.view.buffer); - } - - /** length-prefixed (u32), UTF-8 encoded */ - readonly #stringEncoder = new TextEncoder(); - public writeString(str: string) { - const bytes = this.#stringEncoder.encode(str); - this.writeUnt32(bytes.byteLength); - this.sink.write(bytes); - } - - /** length-prefixed (u32), UTF-8 encoded */ - public writeBufWithLen(buf: Buffer) { - this.writeUnt32(buf.byteLength); - this.writeBufRaw(buf); - } - - public writeBufRaw(buf: Buffer) { - this.sink.write(buf); - } -} diff --git a/server/src/protocol/buffers.ts b/server/src/protocol/buffers.ts new file mode 100644 index 00000000..e5d45f18 --- /dev/null +++ b/server/src/protocol/buffers.ts @@ -0,0 +1,171 @@ +import { ArrayBufferSink } from "bun"; + +export class BufferWriter { + private readonly sink: ArrayBufferSink; + private readonly view = new DataView(new ArrayBuffer(8)); // 64 bits + + public constructor() { + this.sink = new ArrayBufferSink(); + this.sink.start({ + asUint8Array: true, + stream: true, + }); + } + + public getBuffer(): Buffer { + return Buffer.from(this.sink.flush() as Uint8Array); + } + + public writeUnt8(val: number) { + this.view.setUint8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); + } + + public writeInt8(val: number) { + this.view.setInt8(0, val); + this.sink.write(this.view.buffer.slice(0, 1)); + } + + public writeUnt16(val: number) { + this.view.setUint16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); + } + + public writeInt16(val: number) { + this.view.setInt16(0, val); + this.sink.write(this.view.buffer.slice(0, 2)); + } + + public writeUnt32(val: number) { + this.view.setUint32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); + } + + public writeInt32(val: number) { + this.view.setInt32(0, val); + this.sink.write(this.view.buffer.slice(0, 4)); + } + + public writeUnt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigUint64(0, val); + this.sink.write(this.view.buffer); + } + + public writeInt64(val: number | bigint) { + if (typeof val === "number") { + val = BigInt(val); + } + this.view.setBigInt64(0, val); + this.sink.write(this.view.buffer); + } + + /** length-prefixed (u32), UTF-8 encoded */ + readonly #stringEncoder = new TextEncoder(); + public writeString(str: string) { + const bytes = this.#stringEncoder.encode(str); + this.writeUnt32(bytes.byteLength); + this.sink.write(bytes); + } + + /** length-prefixed (u32), UTF-8 encoded */ + public writeBufWithLen(buf: Buffer) { + this.writeUnt32(buf.byteLength); + this.writeBufRaw(buf); + } + + public writeBufRaw(buf: Buffer) { + this.sink.write(buf); + } +} + +/** Each read advances the internal offset into the buffer. */ +export class BufferReader { + private offset = 0; + + public constructor( + private readonly buffer: Buffer + ) {} + + public readUnt8(): number { + const val = this.buffer.readUInt8(this.offset); + this.offset += 1; + return val; + } + + public readInt8(): number { + const val = this.buffer.readInt8(this.offset); + this.offset += 1; + return val; + } + + public readUnt16(): number { + const val = this.buffer.readUInt16BE(this.offset); + this.offset += 2; + return val; + } + + public readInt16(): number { + const val = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return val; + } + + public readUnt32(): number { + const val = this.buffer.readUInt32BE(this.offset); + this.offset += 4; + return val; + } + + public readInt32(): number { + const val = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return val; + } + + public readUnt64(): number { + const val = this.buffer.readBigUInt64BE(this.offset); + if (val > Number.MAX_SAFE_INTEGER) { + throw new Error(`64-bit number too big: ${val}`); + } + this.offset += 8; + return Number(val); + } + + public readInt64(): number { + const val = this.buffer.readBigInt64BE(this.offset); + if (val > Number.MAX_SAFE_INTEGER) { + throw new Error(`64-bit number too big: ${val}`); + } + if (val < Number.MIN_SAFE_INTEGER) { + throw new Error(`64-bit number too small: ${val}`); + } + this.offset += 8; + return Number(val); + } + + readonly #stringDecoder = new TextDecoder("utf-8"); + /** length-prefixed (u32), UTF-8 encoded */ + public readString(): string { + return this.#stringDecoder.decode(this.readBufWithLen()); + } + + public readBufWithLen(): Buffer { + return this.readBufLen(this.readUnt32()); + } + + public readBufLen(length: number): Buffer { + // simply returning a slice() would retain the entire buf in memory + const buffer = Buffer.allocUnsafe(length); + this.buffer.copy(buffer, 0, this.offset, this.offset + length); + this.offset += length; + return buffer; + } + + /** any reads after this will fail */ + public readRemainder(): Buffer { + return this.readBufLen(this.buffer.length - this.offset); + } +} diff --git a/server/src/protocol/index.ts b/server/src/protocol/index.ts index 55e177aa..d5419b7d 100644 --- a/server/src/protocol/index.ts +++ b/server/src/protocol/index.ts @@ -1,5 +1,5 @@ import { BufReader } from "./BufReader"; -import { BufWriter } from "./BufWriter"; +import { BufferWriter } from "./buffers.ts"; import { ChunkTilePacket, ClientboundEncryptionRequestPacket, @@ -60,7 +60,7 @@ export function decodePacket(reader: BufReader): ClientPacket { } } -export function encodePacket(pkt: ServerPacket, writer: BufWriter): void { +export function encodePacket(pkt: ServerPacket, writer: BufferWriter): void { writer.writeUnt8(getPacketId(pkt.type)); switch (pkt.type) { case ChunkTilePacket.TYPE: diff --git a/server/src/protocol/packets.ts b/server/src/protocol/packets.ts index b16b3ab9..ec6f0bf6 100644 --- a/server/src/protocol/packets.ts +++ b/server/src/protocol/packets.ts @@ -1,5 +1,4 @@ -import { BufReader } from "./BufReader.ts"; -import { BufWriter } from "./BufWriter.ts"; +import { BufferWriter, BufferReader } from "./buffers.ts"; import { SHA1_HASH_LENGTH } from "../constants.ts"; interface Packet { @@ -29,7 +28,7 @@ export class ServerboundHandshakePacket implements Packet { public readonly dimension: string, ) {} - public static decode(reader: BufReader): ServerboundHandshakePacket { + public static decode(reader: BufferReader): ServerboundHandshakePacket { return new ServerboundHandshakePacket( reader.readString(), reader.readString(), @@ -49,7 +48,7 @@ export class ClientboundEncryptionRequestPacket implements Packet { public readonly verifyToken: Buffer, ) {} - public encode(writer: BufWriter) { + public encode(writer: BufferWriter) { writer.writeBufWithLen(this.publicKey); writer.writeBufWithLen(this.verifyToken); } @@ -65,7 +64,7 @@ export class ServerboundEncryptionResponsePacket implements Packet { public readonly verifyToken: Buffer, ) {} - public static decode(reader: BufReader): ServerboundEncryptionResponsePacket { + public static decode(reader: BufferReader): ServerboundEncryptionResponsePacket { return new ServerboundEncryptionResponsePacket( reader.readBufWithLen(), reader.readBufWithLen() @@ -87,7 +86,7 @@ export class ClientboundRegionTimestampsPacket implements Packet { }>, ) {} - public encode(writer: BufWriter) { + public encode(writer: BufferWriter) { writer.writeString(this.dimension); writer.writeInt16(this.regions.length); for (const region of this.regions) { @@ -111,7 +110,7 @@ export class ServerboundChunkTimestampsRequestPacket implements Packet { }>, ) {} - public static decode(reader: BufReader): ServerboundChunkTimestampsRequestPacket { + public static decode(reader: BufferReader): ServerboundChunkTimestampsRequestPacket { return new ServerboundChunkTimestampsRequestPacket( reader.readString(), readArray(reader.readInt16(), () => ({ @@ -136,7 +135,7 @@ export class ClientboundChunkTimestampsResponsePacket implements Packet { }>, ) {} - public encode(writer: BufWriter) { + public encode(writer: BufferWriter) { writer.writeString(this.dimension); writer.writeUnt32(this.chunks.length); for (const chunk of this.chunks) { @@ -161,13 +160,13 @@ export class ServerboundCatchupRequestPacket implements Packet { }>, ) {} - public static decode(reader: BufReader): ServerboundCatchupRequestPacket { + public static decode(reader: BufferReader): ServerboundCatchupRequestPacket { return new ServerboundCatchupRequestPacket( reader.readString(), - readArray(reader.readUInt32(), () => ({ + readArray(reader.readUnt32(), () => ({ chunkX: reader.readInt32(), chunkZ: reader.readInt32(), - timestamp: reader.readUInt64() + timestamp: reader.readUnt64() })) ); } @@ -188,7 +187,7 @@ export class ChunkTilePacket implements Packet { public readonly data: Buffer, ) {} - public encode(writer: BufWriter) { + public encode(writer: BufferWriter) { writer.writeString(this.dimension); writer.writeInt32(this.chunkX); writer.writeInt32(this.chunkZ); @@ -198,13 +197,13 @@ export class ChunkTilePacket implements Packet { writer.writeBufRaw(this.data); // XXX do we need to prefix with length? } - public static decode(reader: BufReader): ChunkTilePacket { + public static decode(reader: BufferReader): ChunkTilePacket { return new ChunkTilePacket( reader.readString(), reader.readInt32(), reader.readInt32(), - reader.readUInt64(), - reader.readUInt16(), + reader.readUnt64(), + reader.readUnt16(), reader.readBufLen(SHA1_HASH_LENGTH), reader.readRemainder() ); diff --git a/server/src/server.ts b/server/src/server.ts index 6e6ff14e..d382ef28 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -2,8 +2,7 @@ import { listen, type Socket, type TCPSocketListener } from "bun"; import * as crypto from "./crypto.ts"; import type { ClientPacket, ServerPacket } from "./protocol"; import { decodePacket, encodePacket } from "./protocol"; -import { BufReader } from "./protocol/BufReader"; -import { BufWriter } from "./protocol/BufWriter"; +import { BufferWriter, BufferReader } from "./protocol/buffers.ts"; import { SUPPORTED_VERSIONS } from "./constants"; import { ClientboundEncryptionRequestPacket, @@ -122,12 +121,12 @@ export class TcpClient { if (this.#receivedBuffer.byteLength < 4 + frameSize) return; // wait for more data - const frameReader = new BufReader(this.#receivedBuffer.subarray(4)); + const frameReader = new BufferReader(this.#receivedBuffer.subarray(4)); const packetBuffer = frameReader.readBufLen(frameSize); this.#receivedBuffer = frameReader.readRemainder(); try { - const packet = decodePacket(new BufReader(packetBuffer)); + const packet = decodePacket(new BufferReader(packetBuffer)); await this.handlePacketReceived(packet); } catch (err) { this.warn(err); @@ -178,7 +177,7 @@ export class TcpClient { if (doCrypto && !this.ciphers) throw new Error(`Can't encrypt: handshake not finished`); - const writer = new BufWriter(); // TODO size hint + const writer = new BufferWriter(); // TODO size hint writer.writeUnt32(0); // set later, but reserve space in buffer encodePacket(pkt, writer); let buf = writer.getBuffer(); From d26820d1c36fa6bbbad0d6ede88cf0014424404e Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 17:03:32 +0100 Subject: [PATCH 14/27] Run prettier --- server/src/crypto.ts | 18 +++++------- server/src/database.ts | 6 ++-- server/src/main.ts | 54 ++++++++++++++++++++++------------ server/src/protocol/buffers.ts | 4 +-- server/src/protocol/index.ts | 4 ++- server/src/protocol/packets.ts | 41 +++++++++++++++----------- server/src/server.ts | 22 +++++++++----- 7 files changed, 89 insertions(+), 60 deletions(-) diff --git a/server/src/crypto.ts b/server/src/crypto.ts index 0d9d6cdd..f492ef04 100644 --- a/server/src/crypto.ts +++ b/server/src/crypto.ts @@ -1,15 +1,15 @@ import node_crypto from "node:crypto"; export { randomBytes, createHash } from "node:crypto"; -const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { modulusLength: 1024 }); +const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { + modulusLength: 1024, +}); export const PUBLIC_KEY = KEY_PAIR.publicKey.export({ type: "spki", format: "der", }); -export function decrypt( - buf: Buffer -): Buffer { +export function decrypt(buf: Buffer): Buffer { return node_crypto.privateDecrypt( { key: KEY_PAIR.privateKey, @@ -20,14 +20,12 @@ export function decrypt( } export type Ciphers = { - encipher: node_crypto.Cipheriv, - decipher: node_crypto.Decipheriv + encipher: node_crypto.Cipheriv; + decipher: node_crypto.Decipheriv; }; -export function createCiphers( - secret: Buffer -): Ciphers { +export function createCiphers(secret: Buffer): Ciphers { return { encipher: node_crypto.createCipheriv("aes-128-cfb8", secret, secret), - decipher: node_crypto.createDecipheriv("aes-128-cfb8", secret, secret) + decipher: node_crypto.createDecipheriv("aes-128-cfb8", secret, secret), }; } diff --git a/server/src/database.ts b/server/src/database.ts index 7703e997..94b004bd 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -31,9 +31,9 @@ export function get() { { create: true, readwrite: true, - } - ) as unknown as kysely.SqliteDatabase - } + }, + ) as unknown as kysely.SqliteDatabase; + }, }), }); } diff --git a/server/src/main.ts b/server/src/main.ts index 0f08e937..6aeec269 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -4,8 +4,11 @@ import * as metadata from "./metadata"; import { type ClientPacket } from "./protocol"; import { type ProtocolHandler, TcpClient, TcpServer } from "./server"; import { - ChunkTilePacket, ClientboundChunkTimestampsResponsePacket, - ClientboundRegionTimestampsPacket, ServerboundCatchupRequestPacket, ServerboundChunkTimestampsRequestPacket, + ChunkTilePacket, + ClientboundChunkTimestampsResponsePacket, + ClientboundRegionTimestampsPacket, + ServerboundCatchupRequestPacket, + ServerboundChunkTimestampsRequestPacket, } from "./protocol/packets.ts"; let config: metadata.Config = null!; @@ -62,11 +65,20 @@ Promise.resolve().then(async () => { client.debug(client.mcName + " <- " + packet.type.toString()); switch (packet.type) { case ChunkTilePacket.TYPE: - return this.handleChunkTilePacket(client, packet as ChunkTilePacket); + return this.handleChunkTilePacket( + client, + packet as ChunkTilePacket, + ); case ServerboundCatchupRequestPacket.TYPE: - return this.handleCatchupRequest(client, packet as ServerboundCatchupRequestPacket); + return this.handleCatchupRequest( + client, + packet as ServerboundCatchupRequestPacket, + ); case ServerboundChunkTimestampsRequestPacket.TYPE: - return this.handleRegionCatchupPacket(client, packet as ServerboundChunkTimestampsRequestPacket); + return this.handleRegionCatchupPacket( + client, + packet as ServerboundChunkTimestampsRequestPacket, + ); default: throw new Error( `Unknown packet '${(packet as any).type}' from client ${ @@ -141,15 +153,17 @@ Promise.resolve().then(async () => { if (chunk.ts > req.timestamp) continue; // someone sent a new chunk, which presumably got relayed to the client if (chunk.ts < req.timestamp) continue; // the client already has a chunk newer than this - await client.send(new ChunkTilePacket( - packet.dimension, - req.chunkX, - req.chunkZ, - req.timestamp, - chunk.version, - chunk.hash, - chunk.data - )); + await client.send( + new ChunkTilePacket( + packet.dimension, + req.chunkX, + req.chunkZ, + req.timestamp, + chunk.version, + chunk.hash, + chunk.data, + ), + ); } } @@ -165,14 +179,16 @@ Promise.resolve().then(async () => { packet.dimension, packet.regions.map((region) => ({ x: region.regionX, - z: region.regionZ + z: region.regionZ, })), ); if (chunks.length) { - await client.send(new ClientboundChunkTimestampsResponsePacket( - packet.dimension, - chunks - )); + await client.send( + new ClientboundChunkTimestampsResponsePacket( + packet.dimension, + chunks, + ), + ); } } })(), diff --git a/server/src/protocol/buffers.ts b/server/src/protocol/buffers.ts index e5d45f18..d822902f 100644 --- a/server/src/protocol/buffers.ts +++ b/server/src/protocol/buffers.ts @@ -85,9 +85,7 @@ export class BufferWriter { export class BufferReader { private offset = 0; - public constructor( - private readonly buffer: Buffer - ) {} + public constructor(private readonly buffer: Buffer) {} public readUnt8(): number { const val = this.buffer.readUInt8(this.offset); diff --git a/server/src/protocol/index.ts b/server/src/protocol/index.ts index d5419b7d..9dd3477f 100644 --- a/server/src/protocol/index.ts +++ b/server/src/protocol/index.ts @@ -66,7 +66,9 @@ export function encodePacket(pkt: ServerPacket, writer: BufferWriter): void { case ChunkTilePacket.TYPE: return (pkt as ChunkTilePacket).encode(writer); case ClientboundChunkTimestampsResponsePacket.TYPE: - return (pkt as ClientboundChunkTimestampsResponsePacket).encode(writer); + return (pkt as ClientboundChunkTimestampsResponsePacket).encode( + writer, + ); case ClientboundEncryptionRequestPacket.TYPE: return (pkt as ClientboundEncryptionRequestPacket).encode(writer); case ClientboundRegionTimestampsPacket.TYPE: diff --git a/server/src/protocol/packets.ts b/server/src/protocol/packets.ts index ec6f0bf6..7e824bc5 100644 --- a/server/src/protocol/packets.ts +++ b/server/src/protocol/packets.ts @@ -2,16 +2,13 @@ import { BufferWriter, BufferReader } from "./buffers.ts"; import { SHA1_HASH_LENGTH } from "../constants.ts"; interface Packet { - type: Symbol + type: Symbol; } -function readArray( - length: number, - parser: () => T -): Array { +function readArray(length: number, parser: () => T): Array { const array: T[] = new Array(length); for (let i = 0; i < length; i++) { - array[i] = parser() + array[i] = parser(); } return array; } @@ -33,7 +30,7 @@ export class ServerboundHandshakePacket implements Packet { reader.readString(), reader.readString(), reader.readString(), - reader.readString() + reader.readString(), ); } } @@ -64,10 +61,12 @@ export class ServerboundEncryptionResponsePacket implements Packet { public readonly verifyToken: Buffer, ) {} - public static decode(reader: BufferReader): ServerboundEncryptionResponsePacket { + public static decode( + reader: BufferReader, + ): ServerboundEncryptionResponsePacket { return new ServerboundEncryptionResponsePacket( reader.readBufWithLen(), - reader.readBufWithLen() + reader.readBufWithLen(), ); } } @@ -98,7 +97,9 @@ export class ClientboundRegionTimestampsPacket implements Packet { } export class ServerboundChunkTimestampsRequestPacket implements Packet { - public static readonly TYPE = Symbol("ServerboundChunkTimestampsRequestPacket"); + public static readonly TYPE = Symbol( + "ServerboundChunkTimestampsRequestPacket", + ); public readonly type = ServerboundChunkTimestampsRequestPacket.TYPE; @@ -110,19 +111,23 @@ export class ServerboundChunkTimestampsRequestPacket implements Packet { }>, ) {} - public static decode(reader: BufferReader): ServerboundChunkTimestampsRequestPacket { + public static decode( + reader: BufferReader, + ): ServerboundChunkTimestampsRequestPacket { return new ServerboundChunkTimestampsRequestPacket( reader.readString(), readArray(reader.readInt16(), () => ({ regionX: reader.readInt16(), regionZ: reader.readInt16(), - })) + })), ); } } export class ClientboundChunkTimestampsResponsePacket implements Packet { - public static readonly TYPE = Symbol("ClientboundChunkTimestampsResponsePacket"); + public static readonly TYPE = Symbol( + "ClientboundChunkTimestampsResponsePacket", + ); public readonly type = ClientboundChunkTimestampsResponsePacket.TYPE; @@ -160,14 +165,16 @@ export class ServerboundCatchupRequestPacket implements Packet { }>, ) {} - public static decode(reader: BufferReader): ServerboundCatchupRequestPacket { + public static decode( + reader: BufferReader, + ): ServerboundCatchupRequestPacket { return new ServerboundCatchupRequestPacket( reader.readString(), readArray(reader.readUnt32(), () => ({ chunkX: reader.readInt32(), chunkZ: reader.readInt32(), - timestamp: reader.readUnt64() - })) + timestamp: reader.readUnt64(), + })), ); } } @@ -205,7 +212,7 @@ export class ChunkTilePacket implements Packet { reader.readUnt64(), reader.readUnt16(), reader.readBufLen(SHA1_HASH_LENGTH), - reader.readRemainder() + reader.readRemainder(), ); } } diff --git a/server/src/server.ts b/server/src/server.ts index d382ef28..ee8ee52c 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -121,7 +121,9 @@ export class TcpClient { if (this.#receivedBuffer.byteLength < 4 + frameSize) return; // wait for more data - const frameReader = new BufferReader(this.#receivedBuffer.subarray(4)); + const frameReader = new BufferReader( + this.#receivedBuffer.subarray(4), + ); const packetBuffer = frameReader.readBufLen(frameSize); this.#receivedBuffer = frameReader.readRemainder(); @@ -141,9 +143,13 @@ export class TcpClient { // not authenticated yet switch (pkt.type) { case ServerboundHandshakePacket.TYPE: - return await this.handleHandshakePacket(pkt as ServerboundHandshakePacket); + return await this.handleHandshakePacket( + pkt as ServerboundHandshakePacket, + ); case ServerboundEncryptionResponsePacket.TYPE: - return await this.handleEncryptionResponsePacket(pkt as ServerboundEncryptionResponsePacket); + return await this.handleEncryptionResponsePacket( + pkt as ServerboundEncryptionResponsePacket, + ); } throw new Error( `Packet ${pkt.type.toString()} from unauth'd client ${this.id}`, @@ -208,10 +214,12 @@ export class TcpClient { this.world = packet.dimension; this.verifyToken = crypto.randomBytes(4); - await this.sendInternal(new ClientboundEncryptionRequestPacket( - crypto.PUBLIC_KEY, - this.verifyToken - )); + await this.sendInternal( + new ClientboundEncryptionRequestPacket( + crypto.PUBLIC_KEY, + this.verifyToken, + ), + ); } private async handleEncryptionResponsePacket( From d75a4a4deb13cbbe35f9e00a48e859e27389996f Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 17:26:56 +0100 Subject: [PATCH 15/27] Slight reorganisation This also fixes up imports and consolidates the deps/*.ts files into a lang.ts file. --- server/src/Renderer.ts | 6 +- server/src/cli.ts | 16 ++-- server/src/database.ts | 4 +- server/src/deps/errors.ts | 49 ------------ server/src/deps/json.ts | 16 ---- server/src/lang.ts | 77 +++++++++++++++++++ server/src/main.ts | 17 ++-- server/src/metadata.ts | 11 ++- server/src/{protocol => net}/buffers.ts | 0 server/src/{protocol => net}/packets.ts | 0 .../{protocol/index.ts => net/protocol.ts} | 7 +- server/src/{ => net}/server.ts | 28 ++++--- 12 files changed, 123 insertions(+), 108 deletions(-) delete mode 100644 server/src/deps/errors.ts delete mode 100644 server/src/deps/json.ts create mode 100644 server/src/lang.ts rename server/src/{protocol => net}/buffers.ts (100%) rename server/src/{protocol => net}/packets.ts (100%) rename server/src/{protocol/index.ts => net/protocol.ts} (93%) rename server/src/{ => net}/server.ts (93%) diff --git a/server/src/Renderer.ts b/server/src/Renderer.ts index 3d6f603b..295126a9 100644 --- a/server/src/Renderer.ts +++ b/server/src/Renderer.ts @@ -1,6 +1,6 @@ -import { spawn } from "child_process"; -import { promisify } from "util"; -import * as database from "./database"; +import { spawn } from "node:child_process"; +import { promisify } from "node:util"; +import * as database from "./database.ts"; export async function renderTile( dimension: string, diff --git a/server/src/cli.ts b/server/src/cli.ts index 81ccf1f5..51fd5c23 100644 --- a/server/src/cli.ts +++ b/server/src/cli.ts @@ -1,15 +1,15 @@ -import lib_readline from "readline"; -import lib_stream from "stream"; +import node_readline from "node:readline"; +import node_stream from "node:stream"; -import * as metadata from "./metadata"; +import * as metadata from "./metadata.ts"; //idk where these come from lol interface TerminalExtras { - output: lib_stream.Writable; + output: node_stream.Writable; _refreshLine(): void; } -type TermType = lib_readline.Interface & TerminalExtras; -const term = lib_readline.createInterface({ +type TermType = node_readline.Interface & TerminalExtras; +const term = node_readline.createInterface({ input: process.stdin, output: process.stdout, }) as TermType; @@ -21,8 +21,8 @@ if (!("MAPSYNC_DUMB_TERM" in process.env)) { var newStdout = Object.create(oldStdout); var oldStderr = process.stderr; var newStderr = Object.create(oldStdout); - function write_func(outout: lib_stream.Writable) { - return function (this: lib_stream.Writable) { + function write_func(outout: node_stream.Writable) { + return function (this: node_stream.Writable) { term.output.write("\x1b[2K\r"); var result = outout.write.apply( this, diff --git a/server/src/database.ts b/server/src/database.ts index 94b004bd..80bab862 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -1,7 +1,7 @@ import * as kysely from "kysely"; import { Database as BunSqliteDatabase } from "bun:sqlite"; -import { DATA_FOLDER } from "./metadata"; -import { type Pos2D } from "./model"; +import { DATA_FOLDER } from "./metadata.ts"; +import { type Pos2D } from "./model.ts"; let database: kysely.Kysely | null = null; diff --git a/server/src/deps/errors.ts b/server/src/deps/errors.ts deleted file mode 100644 index 129a0314..00000000 --- a/server/src/deps/errors.ts +++ /dev/null @@ -1,49 +0,0 @@ -import node_os from "node:os"; -import node_utils from "node:util"; - -export enum ErrorType { - FileExists, - FileNotFound, - UNKNOWN, -} - -/** - * Attempts to transform Node's less-than-helpful exceptions into something - * more readable and logic-able. - */ -export function getErrorType(error: any): ErrorType { - switch (Math.abs(error.errno ?? Infinity)) { - case node_os.constants.errno.ENOENT: - return ErrorType.FileNotFound; - case node_os.constants.errno.EEXIST: - return ErrorType.FileExists; - default: - return ErrorType.UNKNOWN; - } -} - -/** - * Utility that guarantees that the error is an instance of Error. - */ -export function ensureError(error: any): Error { - if (error instanceof Error) { - return error; - } - switch (typeof error) { - case "string": - return new Error(error); - case "number": - case "bigint": - return new Error(String(error)); - } - return new Error(node_utils.inspect(error)); -} - -/** - * This is useful in cases where you need to throw but can't because of - * Javascript. Read more for context: - * https://www.proposals.es/proposals/throw%20expressions - */ -export function inlineThrow(error: any): T { - throw error; -} diff --git a/server/src/deps/json.ts b/server/src/deps/json.ts deleted file mode 100644 index 21eda8f6..00000000 --- a/server/src/deps/json.ts +++ /dev/null @@ -1,16 +0,0 @@ -export type JSONObject = { [key: string]: JSONValue | undefined }; -export type JSONArray = JSONValue[]; -export type JSONValue = - | JSONObject - | JSONArray - | string - | number - | boolean - | null; - -/** - * Wrapper function for JSON.parse() that provides a proper return type. - */ -export function parse(raw: string): JSONValue { - return JSON.parse(raw); -} diff --git a/server/src/lang.ts b/server/src/lang.ts new file mode 100644 index 00000000..8337bcf8 --- /dev/null +++ b/server/src/lang.ts @@ -0,0 +1,77 @@ +import node_os from "node:os"; +import node_utils from "node:util"; + +export function exists(obj: T): obj is NonNullable { + return (obj ?? null) !== null; +} + +export const INT8_SIZE = 1; +export const INT16_SIZE = 2; +export const INT32_SIZE = 4; +export const INT64_SIZE = 8; + +export type JSONObject = { [key: string]: JSONValue | undefined }; +export type JSONArray = JSONValue[]; +export type JSONValue = + | JSONObject + | JSONArray + | string + | number + | boolean + | null; + +/** + * Wrapper function for JSON.parse() that provides a proper return type. + */ +export function parseJson(raw: string): JSONValue { + return JSON.parse(raw); +} + +export namespace Errors { + export enum ErrorType { + FileExists, + FileNotFound, + UNKNOWN, + } + + /** + * Attempts to transform Node's less-than-helpful exceptions into something + * more readable and logic-able. + */ + export function getErrorType(error: any): ErrorType { + switch (Math.abs(error.errno ?? Infinity)) { + case node_os.constants.errno.ENOENT: + return ErrorType.FileNotFound; + case node_os.constants.errno.EEXIST: + return ErrorType.FileExists; + default: + return ErrorType.UNKNOWN; + } + } + + /** + * Utility that guarantees that the error is an instance of Error. + */ + export function ensureError(error: any): Error { + if (error instanceof Error) { + return error; + } + switch (typeof error) { + case "string": + return new Error(error); + case "number": + case "bigint": + return new Error(String(error)); + } + return new Error(node_utils.inspect(error)); + } + + /** + * This is useful in cases where you need to throw but can't because of + * Javascript. Read more for context: + * https://www.proposals.es/proposals/throw%20expressions + */ + export function inlineThrow(error: any): T { + throw error; + } +} diff --git a/server/src/main.ts b/server/src/main.ts index 6aeec269..1f67d51d 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,15 +1,16 @@ -import "./cli"; -import * as database from "./database"; -import * as metadata from "./metadata"; -import { type ClientPacket } from "./protocol"; -import { type ProtocolHandler, TcpClient, TcpServer } from "./server"; +import "./cli.ts"; +import * as database from "./database.ts"; +import * as metadata from "./metadata.ts"; +import { type ClientPacket } from "./net/protocol.ts"; +import { type ProtocolHandler, TcpClient, TcpServer } from "./net/server.ts"; import { ChunkTilePacket, ClientboundChunkTimestampsResponsePacket, ClientboundRegionTimestampsPacket, ServerboundCatchupRequestPacket, ServerboundChunkTimestampsRequestPacket, -} from "./protocol/packets.ts"; +} from "./net/packets.ts"; +import { exists } from "./lang.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { @@ -115,9 +116,7 @@ Promise.resolve().then(async () => { await Promise.allSettled( Object.values(server.clients) .filter( - (other) => - other !== client && - (other.uuid ?? null) !== null, + (other) => other !== client && exists(other.uuid), ) .map((other) => other.send(packet)), ); diff --git a/server/src/metadata.ts b/server/src/metadata.ts index f2519833..16e5d23e 100644 --- a/server/src/metadata.ts +++ b/server/src/metadata.ts @@ -1,17 +1,16 @@ import node_fs from "node:fs"; import node_path from "node:path"; import { Mutex } from "async-mutex"; -import * as errors from "./deps/errors"; -import * as json from "./deps/json"; import * as z from "zod"; import { fromZodError } from "zod-validation-error"; +import { Errors, type JSONValue, parseJson } from "./lang.ts"; export const DATA_FOLDER = process.env["MAPSYNC_DATA_DIR"] ?? "./mapsync"; try { node_fs.mkdirSync(DATA_FOLDER, { recursive: true }); console.log(`Created data folder "${DATA_FOLDER}"`); } catch (e: any) { - if (errors.getErrorType(e) !== errors.ErrorType.FileExists) throw e; + if (Errors.getErrorType(e) !== Errors.ErrorType.FileExists) throw e; console.log(`Using data folder "${DATA_FOLDER}"`); } @@ -25,7 +24,7 @@ try { */ function parseConfigFile( file: string, - parser: (raw: json.JSONValue) => T, + parser: (raw: JSONValue) => T, defaultSupplier: () => any, ): T { file = node_path.resolve(DATA_FOLDER, file); @@ -33,7 +32,7 @@ function parseConfigFile( try { fileContents = node_fs.readFileSync(file, "utf8"); } catch (e) { - if (errors.getErrorType(e) !== errors.ErrorType.FileNotFound) { + if (Errors.getErrorType(e) !== Errors.ErrorType.FileNotFound) { throw e; } // Could not find the config file, so attempt to create a default one @@ -46,7 +45,7 @@ function parseConfigFile( return defaultContent; } try { - return parser(json.parse(fileContents)); + return parser(parseJson(fileContents)); } catch (e) { if (e instanceof z.ZodError) { throw "Could not parse " + file + ": " + fromZodError(e); diff --git a/server/src/protocol/buffers.ts b/server/src/net/buffers.ts similarity index 100% rename from server/src/protocol/buffers.ts rename to server/src/net/buffers.ts diff --git a/server/src/protocol/packets.ts b/server/src/net/packets.ts similarity index 100% rename from server/src/protocol/packets.ts rename to server/src/net/packets.ts diff --git a/server/src/protocol/index.ts b/server/src/net/protocol.ts similarity index 93% rename from server/src/protocol/index.ts rename to server/src/net/protocol.ts index 9dd3477f..5cfedd9b 100644 --- a/server/src/protocol/index.ts +++ b/server/src/net/protocol.ts @@ -1,5 +1,4 @@ -import { BufReader } from "./BufReader"; -import { BufferWriter } from "./buffers.ts"; +import { BufferWriter, BufferReader } from "./buffers.ts"; import { ChunkTilePacket, ClientboundEncryptionRequestPacket, @@ -42,8 +41,8 @@ export function getPacketId(type: ServerPacket["type"]) { return id; } -export function decodePacket(reader: BufReader): ClientPacket { - const packetType = reader.readUInt8(); +export function decodePacket(reader: BufferReader): ClientPacket { + const packetType = reader.readUnt8(); switch (packetIds[packetType]) { case ChunkTilePacket.TYPE: return ChunkTilePacket.decode(reader); diff --git a/server/src/server.ts b/server/src/net/server.ts similarity index 93% rename from server/src/server.ts rename to server/src/net/server.ts index ee8ee52c..68f64fe0 100644 --- a/server/src/server.ts +++ b/server/src/net/server.ts @@ -1,14 +1,19 @@ import { listen, type Socket, type TCPSocketListener } from "bun"; -import * as crypto from "./crypto.ts"; -import type { ClientPacket, ServerPacket } from "./protocol"; -import { decodePacket, encodePacket } from "./protocol"; -import { BufferWriter, BufferReader } from "./protocol/buffers.ts"; -import { SUPPORTED_VERSIONS } from "./constants"; +import * as crypto from "../crypto.ts"; +import { + decodePacket, + encodePacket, + type ClientPacket, + type ServerPacket, +} from "./protocol.ts"; +import { BufferWriter, BufferReader } from "./buffers.ts"; +import { SUPPORTED_VERSIONS } from "../constants.ts"; import { ClientboundEncryptionRequestPacket, ServerboundEncryptionResponsePacket, ServerboundHandshakePacket, -} from "./protocol/packets.ts"; +} from "./packets.ts"; +import { exists, INT32_SIZE } from "../lang.ts"; export interface ProtocolHandler { handleClientConnected(client: TcpClient): Promise; @@ -46,7 +51,7 @@ export class TcpServer { async close(socket, err) { const client: TcpClient = socket.data; delete self.clients[client.id]; - if ((err ?? null) !== null) { + if (exists(err)) { client.warn(`Closed due to an error!`, err); } await self.handler.handleClientDisconnected(client); @@ -97,7 +102,7 @@ export class TcpClient { static readonly #EMPTY_BUFFER = Buffer.allocUnsafe(0); #receivedBuffer: Buffer = TcpClient.#EMPTY_BUFFER; public async handleReceivedData(data: Buffer) { - if (this.ciphers) { + if (exists(this.ciphers)) { data = this.ciphers.decipher.update(data); } @@ -106,7 +111,7 @@ export class TcpClient { // we may receive multiple frames in one call while (true) { - if (this.#receivedBuffer.byteLength <= 4) return; // wait for more data + if (this.#receivedBuffer.byteLength <= INT32_SIZE) return; // wait for more data const frameSize = this.#receivedBuffer.readUInt32BE(); // prevent Out of Memory @@ -119,10 +124,11 @@ export class TcpClient { ); } - if (this.#receivedBuffer.byteLength < 4 + frameSize) return; // wait for more data + if (this.#receivedBuffer.byteLength < INT32_SIZE + frameSize) + return; // wait for more data const frameReader = new BufferReader( - this.#receivedBuffer.subarray(4), + this.#receivedBuffer.subarray(INT32_SIZE), ); const packetBuffer = frameReader.readBufLen(frameSize); this.#receivedBuffer = frameReader.readRemainder(); From 13b09aa39c78389ec134268698735302956408d2 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 19:37:40 +0100 Subject: [PATCH 16/27] Switch handshake and auth to a state model --- server/package.json | 1 + server/src/crypto.ts | 31 ----- server/src/main.ts | 89 ++++++------- server/src/net/auth.ts | 213 +++++++++++++++++++++++++++++++ server/src/net/protocol.ts | 6 + server/src/net/server.ts | 252 +++++++++++-------------------------- 6 files changed, 332 insertions(+), 260 deletions(-) delete mode 100644 server/src/crypto.ts create mode 100644 server/src/net/auth.ts diff --git a/server/package.json b/server/package.json index 40b1ac91..6a80faa1 100644 --- a/server/package.json +++ b/server/package.json @@ -11,6 +11,7 @@ "test": "bun test ./src/*.test.ts", "start": "bun src/main.ts", "start:dev": "bun --inspect src/main.ts", + "check": "bunx tsc", "compile": "bun build --compile . --outfile out/mapsync-server" }, "dependencies": { diff --git a/server/src/crypto.ts b/server/src/crypto.ts deleted file mode 100644 index f492ef04..00000000 --- a/server/src/crypto.ts +++ /dev/null @@ -1,31 +0,0 @@ -import node_crypto from "node:crypto"; -export { randomBytes, createHash } from "node:crypto"; - -const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { - modulusLength: 1024, -}); -export const PUBLIC_KEY = KEY_PAIR.publicKey.export({ - type: "spki", - format: "der", -}); - -export function decrypt(buf: Buffer): Buffer { - return node_crypto.privateDecrypt( - { - key: KEY_PAIR.privateKey, - padding: node_crypto.constants.RSA_PKCS1_PADDING, - }, - buf, - ); -} - -export type Ciphers = { - encipher: node_crypto.Cipheriv; - decipher: node_crypto.Decipheriv; -}; -export function createCiphers(secret: Buffer): Ciphers { - return { - encipher: node_crypto.createCipheriv("aes-128-cfb8", secret, secret), - decipher: node_crypto.createDecipheriv("aes-128-cfb8", secret, secret), - }; -} diff --git a/server/src/main.ts b/server/src/main.ts index 1f67d51d..ca9ec3e3 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,7 +1,7 @@ import "./cli.ts"; import * as database from "./database.ts"; import * as metadata from "./metadata.ts"; -import { type ClientPacket } from "./net/protocol.ts"; +import { type ClientPacket, UnexpectedPacket } from "./net/protocol.ts"; import { type ProtocolHandler, TcpClient, TcpServer } from "./net/server.ts"; import { ChunkTilePacket, @@ -10,7 +10,7 @@ import { ServerboundCatchupRequestPacket, ServerboundChunkTimestampsRequestPacket, } from "./net/packets.ts"; -import { exists } from "./lang.ts"; +import { isAuthed, OnlineAuth, requireAuth } from "./net/auth.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { @@ -32,19 +32,18 @@ Promise.resolve().then(async () => { public async handleClientDisconnected(client: TcpClient) {} public async handleClientAuthenticated(client: TcpClient) { - if (!client.uuid) { - throw new Error("Client not authenticated"); - } - - metadata.cachePlayerUuid(client.mcName!, client.uuid!); - await metadata.saveUuidCache(); + if (client.auth instanceof OnlineAuth) { + metadata.cachePlayerUuid( + client.auth.name, + client.auth.uuid, + ); + await metadata.saveUuidCache(); - if (config.whitelist) { - if (!metadata.whitelist.has(client.uuid)) { - client.log( - `Rejected unwhitelisted user ${client.mcName} (${client.uuid})`, - ); - client.kick(`Not whitelisted`); + if ( + config.whitelist && + !metadata.whitelist.has(client.auth.uuid) + ) { + client.kick(`Not whitelisted!`); return; } } @@ -53,8 +52,8 @@ Promise.resolve().then(async () => { await client.send( new ClientboundRegionTimestampsPacket( - client.world!, - await database.getRegionTimestamps(client.world!), + client.dimension!, + await database.getRegionTimestamps(client.dimension!), ), ); } @@ -63,29 +62,27 @@ Promise.resolve().then(async () => { client: TcpClient, packet: ClientPacket, ) { - client.debug(client.mcName + " <- " + packet.type.toString()); switch (packet.type) { case ChunkTilePacket.TYPE: - return this.handleChunkTilePacket( + await this.handleChunkTilePacket( client, packet as ChunkTilePacket, ); + return; case ServerboundCatchupRequestPacket.TYPE: - return this.handleCatchupRequest( + await this.handleCatchupRequest( client, packet as ServerboundCatchupRequestPacket, ); + return; case ServerboundChunkTimestampsRequestPacket.TYPE: - return this.handleRegionCatchupPacket( + await this.handleRegionCatchupPacket( client, packet as ServerboundChunkTimestampsRequestPacket, ); + return; default: - throw new Error( - `Unknown packet '${(packet as any).type}' from client ${ - client.id - }`, - ); + throw new UnexpectedPacket(packet.type.toString()); } } @@ -93,31 +90,29 @@ Promise.resolve().then(async () => { client: TcpClient, packet: ChunkTilePacket, ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); - } + requireAuth(client); // TODO ignore if same chunk hash exists in db - await database - .storeChunkData( - packet.dimension, - packet.chunkX, - packet.chunkZ, - client.uuid!, - packet.timestamp, - packet.version, - packet.hash, - packet.data, - ) - .catch(console.error); + if (client.auth instanceof OnlineAuth) { + await database + .storeChunkData( + packet.dimension, + packet.chunkX, + packet.chunkZ, + client.auth.uuid, + packet.timestamp, + packet.version, + packet.hash, + packet.data, + ) + .catch(client.warn); + } // TODO small timeout, then skip if other client already has it await Promise.allSettled( Object.values(server.clients) - .filter( - (other) => other !== client && exists(other.uuid), - ) + .filter((other) => other !== client && isAuthed(other)) .map((other) => other.send(packet)), ); @@ -128,9 +123,7 @@ Promise.resolve().then(async () => { client: TcpClient, packet: ServerboundCatchupRequestPacket, ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); - } + requireAuth(client); for (const req of packet.chunks) { let chunk = await database.getChunkData( @@ -170,9 +163,7 @@ Promise.resolve().then(async () => { client: TcpClient, packet: ServerboundChunkTimestampsRequestPacket, ) { - if (!client.uuid) { - throw new Error(`${client.name} is not authenticated`); - } + requireAuth(client); const chunks = await database.getChunkTimestamps( packet.dimension, diff --git a/server/src/net/auth.ts b/server/src/net/auth.ts new file mode 100644 index 00000000..73737653 --- /dev/null +++ b/server/src/net/auth.ts @@ -0,0 +1,213 @@ +import node_crypto from "node:crypto"; + +import { z } from "zod"; +import { fromZodError } from "zod-validation-error"; + +import { type TcpClient } from "./server.ts"; +import { + ClientboundEncryptionRequestPacket, + ServerboundEncryptionResponsePacket, + type ServerboundHandshakePacket, +} from "./packets.ts"; +import { UnexpectedPacket } from "./protocol.ts"; +import { SUPPORTED_VERSIONS } from "../constants.ts"; + +const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { + modulusLength: 1024, +}); +const PUBLIC_KEY = KEY_PAIR.publicKey.export({ + type: "spki", + format: "der", +}); + +// ============================================================ +// Handshake +// ============================================================ + +class AwaitingHandshake {} + +export async function handleConnected(client: TcpClient) { + client.auth = new AwaitingHandshake(); +} + +export async function handleHandshake( + client: TcpClient, + packet: ServerboundHandshakePacket, +) { + if (!(client.auth instanceof AwaitingHandshake)) { + throw new UnexpectedPacket(packet.type.toString()); + } + + if (!SUPPORTED_VERSIONS.has(packet.modVersion)) { + client.kick( + `Connected with unsupported version [${packet.modVersion}]`, + ); + return; + } + + client.claimedMojangUsername = packet.mojangName; + client.gameAddress = packet.gameAddress; + client.dimension = packet.dimension; + + const verifyToken = node_crypto.randomBytes(4); + + client.auth = new AwaitingEncryptionResponse(verifyToken); + await client.send( + new ClientboundEncryptionRequestPacket(PUBLIC_KEY, verifyToken), + ); +} + +// ============================================================ +// Encryption Response +// ============================================================ + +export function decrypt(buf: Buffer): Buffer { + return node_crypto.privateDecrypt( + { + key: KEY_PAIR.privateKey, + padding: node_crypto.constants.RSA_PKCS1_PADDING, + }, + buf, + ); +} + +class AwaitingEncryptionResponse { + public constructor(public readonly verifyToken: Buffer) {} +} + +export async function handleEncryptionResponse( + client: TcpClient, + packet: ServerboundEncryptionResponsePacket, +) { + if (!(client.auth instanceof AwaitingEncryptionResponse)) { + throw new UnexpectedPacket(packet.type.toString()); + } + + const decryptedVerifyToken = decrypt(packet.verifyToken); + if (!client.auth.verifyToken.equals(decryptedVerifyToken)) { + client.kick("verifyToken does not match!"); + client.debug( + `Expected [${client.auth.verifyToken.toHex()}], received [${decryptedVerifyToken.toHex()}]`, + ); + return; + } + + const decryptedSharedSecret = decrypt(packet.sharedSecret); + client.ciphers = { + encipher: node_crypto.createCipheriv( + "aes-128-cfb8", + decryptedSharedSecret, + decryptedSharedSecret, + ), + decipher: node_crypto.createDecipheriv( + "aes-128-cfb8", + decryptedSharedSecret, + decryptedSharedSecret, + ), + }; + client.debug("Connection is now encrypted!"); + + if (Bun.env["MAPSYNC_DISABLE_AUTH"] === "true") { + client.auth = new OfflineAuth(client.claimedMojangUsername!); + client.name += "?:" + client.claimedMojangUsername!; + } else { + const auth = await fetchHasJoined( + client, + node_crypto + .createHash("sha1") + .update(decryptedSharedSecret) + .update(PUBLIC_KEY) + .digest() + .toString("hex"), + ); + if (auth === null) { + client.kick("Not authenticated!"); + return; + } + + client.auth = new OnlineAuth(auth.name, auth.uuid); + client.name += ":" + auth.name; + } + + await client.handlers.handleClientAuthenticated(client); +} + +// ============================================================ +// Authentication +// ============================================================ + +export class OfflineAuth { + public constructor(public readonly name: string) {} +} + +export class OnlineAuth { + public constructor( + public readonly name: string, + public readonly uuid: string, + ) {} +} + +export function isAuthed(client: TcpClient) { + return ( + client.auth instanceof OnlineAuth || client.auth instanceof OfflineAuth + ); +} + +export function requireAuth(client: TcpClient) { + if (!isAuthed(client)) { + throw new Error("User not authenticated!"); + } +} + +const MOJANG_AUTH_RESPONSE_SCHEMA = z.object({ + id: z.string().uuid(), + name: z.string(), +}); + +async function fetchHasJoined( + client: TcpClient, + shaHex: string, +): Promise<{ + name: string; + uuid: string; +} | null> { + let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${client.claimedMojangUsername!}&serverId=${shaHex}`; + + let response: Response; + try { + response = await fetch(url); + } catch (error) { + client.warn("Could not complete auth request!", error); + return null; + } + if (response.status === 204) { + return null; + } + + let raw: unknown; + try { + raw = await response.json(); + } catch (error) { + client.warn("Could not parse auth response as json!", error); + return null; + } + + let auth: z.infer; + try { + auth = MOJANG_AUTH_RESPONSE_SCHEMA.parse(raw); + } catch (error) { + client.warn( + "Could not validate auth response!", + fromZodError(error as z.ZodError), + ); + return null; + } + + return { + name: auth.name, + uuid: auth.id.replace( + /^(........)-?(....)-?(....)-?(....)-?(............)$/, + "$1-$2-$3-$4-$5", + ), + }; +} diff --git a/server/src/net/protocol.ts b/server/src/net/protocol.ts index 5cfedd9b..50f91a17 100644 --- a/server/src/net/protocol.ts +++ b/server/src/net/protocol.ts @@ -76,3 +76,9 @@ export function encodePacket(pkt: ServerPacket, writer: BufferWriter): void { throw new Error(`Unknown packet type ${(pkt as any).type}`); } } + +export class UnexpectedPacket extends Error { + public constructor(message?: string) { + super(message); + } +} diff --git a/server/src/net/server.ts b/server/src/net/server.ts index 68f64fe0..753d71a2 100644 --- a/server/src/net/server.ts +++ b/server/src/net/server.ts @@ -1,19 +1,28 @@ import { listen, type Socket, type TCPSocketListener } from "bun"; -import * as crypto from "../crypto.ts"; + +import node_crypto from "node:crypto"; + +import { exists, INT32_SIZE } from "../lang.ts"; import { + type ClientPacket, decodePacket, encodePacket, - type ClientPacket, type ServerPacket, + UnexpectedPacket, } from "./protocol.ts"; -import { BufferWriter, BufferReader } from "./buffers.ts"; -import { SUPPORTED_VERSIONS } from "../constants.ts"; +import { BufferReader, BufferWriter } from "./buffers.ts"; import { - ClientboundEncryptionRequestPacket, + ChunkTilePacket, + ServerboundCatchupRequestPacket, + ServerboundChunkTimestampsRequestPacket, ServerboundEncryptionResponsePacket, ServerboundHandshakePacket, } from "./packets.ts"; -import { exists, INT32_SIZE } from "../lang.ts"; +import { + handleConnected, + handleEncryptionResponse, + handleHandshake, +} from "./auth.ts"; export interface ProtocolHandler { handleClientConnected(client: TcpClient): Promise; @@ -29,13 +38,13 @@ export interface ProtocolHandler { } export class TcpServer { - server: TCPSocketListener; - clients: Record = {}; + public readonly server: TCPSocketListener; + public readonly clients: Record = {}; - constructor( + public constructor( host: string, port: number, - readonly handler: ProtocolHandler, + public readonly handlers: ProtocolHandler, ) { const self = this; this.server = listen({ @@ -44,9 +53,10 @@ export class TcpServer { socket: { binaryType: "buffer", async open(socket) { - const client = new TcpClient(socket, self, handler); + const client = new TcpClient(socket, self.handlers); self.clients[client.id] = socket.data = client; - await self.handler.handleClientConnected(client); + await handleConnected(client); + await self.handlers.handleClientConnected(client); }, async close(socket, err) { const client: TcpClient = socket.data; @@ -54,7 +64,7 @@ export class TcpServer { if (exists(err)) { client.warn(`Closed due to an error!`, err); } - await self.handler.handleClientDisconnected(client); + await self.handlers.handleClientDisconnected(client); }, async data(socket, data) { const client: TcpClient = socket.data; @@ -67,34 +77,29 @@ export class TcpServer { } let nextClientId = 1; +const MAX_FRAME_SIZE = 2 ** 15; /** Prefixes packets with their length (UInt32BE); * handles Mojang authentication */ export class TcpClient { - readonly id = nextClientId++; + public readonly id = nextClientId++; /** contains mojang name once logged in */ - name = "Client" + this.id; - - modVersion: string | undefined; - gameAddress: string | undefined; - uuid: string | undefined; - mcName: string | undefined; - world: string | undefined; + public name = "Client" + this.id; - /** prevent Out of Memory when client sends a large packet */ - maxFrameSize = 2 ** 15; + public claimedMojangUsername: string | null = null; + public gameAddress: string | null = null; + public dimension: string | null = null; /** sent by client during handshake */ - private claimedMojangName?: string; - private verifyToken?: Buffer; - /** we need to wait for the mojang auth response - * before we can en/decrypt packets following the handshake */ - private ciphers: crypto.Ciphers | null = null; + public auth: any; + public ciphers: { + encipher: node_crypto.Cipheriv; + decipher: node_crypto.Decipheriv; + } | null = null; - constructor( + public constructor( private socket: Socket, - private server: TcpServer, - private handler: ProtocolHandler, + public handlers: ProtocolHandler, ) { this.log("Connected from", socket.remoteAddress); } @@ -115,7 +120,7 @@ export class TcpClient { const frameSize = this.#receivedBuffer.readUInt32BE(); // prevent Out of Memory - if (frameSize > this.maxFrameSize) { + if (frameSize > MAX_FRAME_SIZE) { return this.kick( "Frame too large: " + frameSize + @@ -144,174 +149,61 @@ export class TcpClient { } } - private async handlePacketReceived(pkt: ClientPacket) { - if (!this.uuid) { - // not authenticated yet - switch (pkt.type) { - case ServerboundHandshakePacket.TYPE: - return await this.handleHandshakePacket( - pkt as ServerboundHandshakePacket, - ); - case ServerboundEncryptionResponsePacket.TYPE: - return await this.handleEncryptionResponsePacket( - pkt as ServerboundEncryptionResponsePacket, - ); - } - throw new Error( - `Packet ${pkt.type.toString()} from unauth'd client ${this.id}`, - ); - } else { - return await this.handler.handleClientPacketReceived(this, pkt); + private async handlePacketReceived(packet: ClientPacket) { + this.debug("Received packet: " + packet.type.toString()); + switch (packet.type) { + case ServerboundHandshakePacket.TYPE: + await handleHandshake( + this, + packet as ServerboundHandshakePacket, + ); + return; + case ServerboundEncryptionResponsePacket.TYPE: + await handleEncryptionResponse( + this, + packet as ServerboundEncryptionResponsePacket, + ); + return; + case ServerboundChunkTimestampsRequestPacket.TYPE: + case ServerboundCatchupRequestPacket.TYPE: + case ChunkTilePacket.TYPE: + await this.handlers.handleClientPacketReceived(this, packet); + return; + default: + throw new UnexpectedPacket(packet.type.toString()); } } - kick(internalReason: string) { + public kick(internalReason: string) { this.log(`Kicking:`, internalReason); this.socket.end(); } - async send(pkt: ServerPacket) { - if (!this.ciphers) { - this.debug("Not encrypted, dropping packet", pkt.type); - return; - } - if (!this.uuid) { - this.debug("Not authenticated, dropping packet", pkt.type); - return; - } - this.debug(this.mcName + " -> " + pkt.type.toString()); - await this.sendInternal(pkt, true); - } - - private async sendInternal(pkt: ServerPacket, doCrypto = false) { - if (this.socket.readyState <= 0) - return this.debug("Socket closed, dropping", pkt.type); - if (doCrypto && !this.ciphers) - throw new Error(`Can't encrypt: handshake not finished`); - - const writer = new BufferWriter(); // TODO size hint - writer.writeUnt32(0); // set later, but reserve space in buffer - encodePacket(pkt, writer); - let buf = writer.getBuffer(); - buf.writeUInt32BE(buf.length - 4, 0); // write into space reserved above - - if (doCrypto) { - buf = this.ciphers!.encipher.update(buf); - } - - this.socket.write(buf); - } + public async send(packet: ServerPacket) { + const writer = new BufferWriter(); + writer.writeUnt32(0); // Placeholder for frame length, will write later + encodePacket(packet, writer); - private async handleHandshakePacket(packet: ServerboundHandshakePacket) { - if (this.ciphers) throw new Error(`Already authenticated`); - if (this.verifyToken) throw new Error(`Encryption already started`); + let buffer = writer.getBuffer(); + buffer.writeUInt32BE(buffer.byteLength - INT32_SIZE, 0); - if (!SUPPORTED_VERSIONS.has(packet.modVersion)) { - this.kick( - "Connected with unsupported version [" + - packet.modVersion + - "]", - ); - return; + if (exists(this.ciphers)) { + buffer = this.ciphers.encipher.update(buffer); } - this.gameAddress = packet.gameAddress; - this.claimedMojangName = packet.mojangName; - this.world = packet.dimension; - this.verifyToken = crypto.randomBytes(4); - - await this.sendInternal( - new ClientboundEncryptionRequestPacket( - crypto.PUBLIC_KEY, - this.verifyToken, - ), - ); + this.socket.write(buffer); } - private async handleEncryptionResponsePacket( - pkt: ServerboundEncryptionResponsePacket, - ) { - if (this.ciphers) throw new Error(`Already authenticated`); - if (!this.claimedMojangName) - throw new Error(`Encryption has not started: no mojangName`); - if (!this.verifyToken) - throw new Error(`Encryption has not started: no verifyToken`); - - const verifyToken = crypto.decrypt(pkt.verifyToken); - if (!this.verifyToken.equals(verifyToken)) { - throw new Error( - `verifyToken mismatch: got ${verifyToken} expected ${this.verifyToken}`, - ); - } - - const secret = crypto.decrypt(pkt.sharedSecret); - - const shaHex = crypto - .createHash("sha1") - .update(secret) - .update(crypto.PUBLIC_KEY) - .digest() - .toString("hex"); - - this.ciphers = await fetchHasJoined({ - username: this.claimedMojangName, - shaHex, - }).then(async (mojangAuth) => { - if (!mojangAuth?.uuid) { - this.kick(`Mojang auth failed`); - throw new Error(`Mojang auth failed`); - } - - this.log("Authenticated as", mojangAuth); - - this.uuid = mojangAuth.uuid; - this.mcName = mojangAuth.name; - this.name += ":" + mojangAuth.name; - - return crypto.createCiphers(secret); - }); - - await this.handler.handleClientAuthenticated(this); - } - - debug(...args: any[]) { + public debug(...args: any[]) { if (process.env.NODE_ENV === "production") return; console.debug(`[${this.name}]`, ...args); } - log(...args: any[]) { + public log(...args: any[]) { console.log(`[${this.name}]`, ...args); } - warn(...args: any[]) { + public warn(...args: any[]) { console.error(`[${this.name}]`, ...args); } } - -async function fetchHasJoined(args: { - username: string; - shaHex: string; - clientIp?: string; -}) { - const { username, shaHex, clientIp } = args; - - // if auth is disabled, return a "usable" item - if ("DISABLE_AUTH" in process.env) - return { name: username, uuid: `AUTH-DISABLED-${username}` }; - - let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${username}&serverId=${shaHex}`; - if (clientIp) url += `&ip=${clientIp}`; - const res = await fetch(url); - try { - if (res.status === 204) return null; - let { id, name } = (await res.json()) as { id: string; name: string }; - const uuid = id.replace( - /^(........)-?(....)-?(....)-?(....)-?(............)$/, - "$1-$2-$3-$4-$5", - ); - return { uuid, name }; - } catch (err) { - console.error(res); - throw err; - } -} From d429342bb710e84c9ea316f55189413157c644b7 Mon Sep 17 00:00:00 2001 From: Alexander Date: Tue, 10 Jun 2025 23:37:15 +0100 Subject: [PATCH 17/27] Switch to websockets This was caused by Bun no longer supporting "RSA_PKCS1_PADDING" or "AES-128-CFB8" ciphers, the former being due to CVE-2023-46809, and the latter probably just being an implementation gap. Either way, since the protocol needs to be changed anyway, might as well switch to websockets and let TLS optionally protect the connection. Okx be having an aneurysm rn. --- mod/common/build.gradle | 3 + .../mapsync/common/CatchupLogic.java | 2 +- .../minecraft/mapsync/common/MapSyncMod.java | 64 +-- .../gjum/minecraft/mapsync/common/ModGui.java | 18 +- .../mapsync/common/net/ClientHandler.java | 62 --- .../common/net/ClientboundPacketDecoder.java | 37 -- .../minecraft/mapsync/common/net/Packet.java | 9 + .../common/net/ServerboundPacketEncoder.java | 28 -- .../mapsync/common/net/SyncAddress.java | 69 +++ .../mapsync/common/net/SyncClient.java | 428 ++++++++++-------- .../net/encryption/EncryptionDecoder.java | 31 -- .../net/encryption/EncryptionEncoder.java | 30 -- .../net/encryption/EncryptionTranslator.java | 48 -- .../packet/ClientboundAuthRequestPacket.java | 28 ++ .../ClientboundEncryptionRequestPacket.java | 42 -- .../net/packet/ClientboundWelcomePacket.java | 19 + .../packet/ServerboundAuthResponsePacket.java | 30 ++ .../ServerboundEncryptionResponsePacket.java | 34 -- .../packet/ServerboundHandshakePacket.java | 2 +- .../src/main/resources/default-config.json | 2 +- mod/fabric/build.gradle | 3 + mod/forge/build.gradle | 3 + server/bun.lock | 7 +- server/package.json | 3 +- server/src/constants.ts | 3 + server/src/main.ts | 18 +- server/src/net/auth.ts | 130 ++---- server/src/net/buffers.ts | 6 +- server/src/net/packets.ts | 42 +- server/src/net/protocol.ts | 49 +- server/src/net/server.ts | 157 +++---- 31 files changed, 631 insertions(+), 776 deletions(-) delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java create mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java create mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java create mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java create mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java delete mode 100644 mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java diff --git a/mod/common/build.gradle b/mod/common/build.gradle index 999ab4b2..b40281c0 100644 --- a/mod/common/build.gradle +++ b/mod/common/build.gradle @@ -20,6 +20,9 @@ dependencies { modCompileOnly("maven.modrinth:journeymap:5JbcGXLn") // https://modrinth.com/mod/xaeros-minimap/version/23.6.2_Fabric_1.18.2 (23.6.2 fabric) modCompileOnly("maven.modrinth:xaeros-minimap:Jwydpps9") + + // https://github.com/TooTallNate/Java-WebSocket + compileOnly("org.java-websocket:Java-WebSocket:1.6.0") } tasks { diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java index d75d001a..b71d134b 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/CatchupLogic.java @@ -32,7 +32,7 @@ public void addCatchupChunks(List catchupChunks) { if (catchupChunks.isEmpty()) return; var catchupDim = catchupChunks.get(0).dimension(); if (!dimensionState.dimension.equals(catchupDim)) { - logger.warn("Catchup chunks from wrong dimension " + catchupDim + ", expected " + dimensionState.dimension); + LOGGER.warn("Catchup chunks from wrong dimension " + catchupDim + ", expected " + dimensionState.dimension); return; } synchronized (this.catchupChunks) { diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java index ce47224e..c67236d9 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/MapSyncMod.java @@ -4,13 +4,16 @@ import gjum.minecraft.mapsync.common.config.ModConfig; import gjum.minecraft.mapsync.common.config.ServerConfig; import gjum.minecraft.mapsync.common.data.*; +import gjum.minecraft.mapsync.common.net.SyncAddress; import gjum.minecraft.mapsync.common.net.SyncClient; import gjum.minecraft.mapsync.common.net.packet.*; +import java.util.stream.Collectors; import net.minecraft.client.KeyMapping; import net.minecraft.client.Minecraft; import net.minecraft.client.multiplayer.ServerData; import net.minecraft.network.protocol.game.ClientboundLoginPacket; import net.minecraft.network.protocol.game.ClientboundRespawnPacket; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; @@ -18,7 +21,6 @@ import org.lwjgl.glfw.GLFW; import java.util.*; -import java.util.stream.Collectors; import static gjum.minecraft.mapsync.common.Cartography.chunkTileFromLevel; @@ -27,7 +29,7 @@ public abstract class MapSyncMod { private static final Minecraft mc = Minecraft.getInstance(); - public static final Logger logger = LogManager.getLogger(MapSyncMod.class); + public static final Logger LOGGER = LogManager.getLogger(MapSyncMod.class); private static MapSyncMod INSTANCE; @@ -123,30 +125,41 @@ public void handleRespawn(ClientboundRespawnPacket packet) { if (syncServerAddresses.isEmpty()) return shutDownSyncClients(); // will be filled with clients that are still wanted (address) and are still connected - var existingClients = new HashMap(); + var existingClients = new HashMap(); - for (SyncClient client : syncClients) { - if (client.isShutDown) continue; + for (final SyncClient client : this.syncClients) { + if (client.isShutDown) { + continue; + } // avoid reconnecting to same sync server, to keep shared state (expensive to resync) - if (!client.gameAddress.equals(serverConfig.gameAddress)) { - debugLog("Disconnecting sync client; different game server"); + if (!StringUtils.equals(client.gameAddress, serverConfig.gameAddress)) { + LOGGER.warn("Disconnecting sync client; different game server"); client.shutDown(); - } else if (!syncServerAddresses.contains(client.address)) { - debugLog("Disconnecting sync client; different sync address"); + } + else if (!syncServerAddresses.contains(client.syncAddress.toString())) { + LOGGER.warn("Disconnecting sync client; different sync address"); client.shutDown(); - } else { - existingClients.put(client.address, client); + } + else { + existingClients.put(client.syncAddress, client); } } - syncClients = syncServerAddresses.stream().map(address -> { - var client = existingClients.get(address); - if (client == null) client = new SyncClient(address, serverConfig.gameAddress); - client.autoReconnect = true; - return client; - }).collect(Collectors.toList()); - - return syncClients; + this.syncClients = syncServerAddresses.stream() + .map(SyncAddress::of) + .filter(Objects::nonNull) + .distinct() + .map((address) -> { + SyncClient client = existingClients.get(address); + if (client == null) { + client = new SyncClient(address, serverConfig.gameAddress); + } + client.autoReconnect = true; + return client; + }) + .collect(Collectors.toCollection(ArrayList::new)); + + return this.syncClients; } public List shutDownSyncClients() { @@ -214,11 +227,6 @@ public void handleMcChunkPartialChange(int cx, int cz) { // TODO update ChunkTile in a second or so; remember dimension in case it changes til then } - public void handleSyncServerEncryptionSuccess() { - debugLog("tcp encrypted"); - // TODO tell server our current dimension - } - public void handleRegionTimestamps(ClientboundRegionTimestampsPacket packet, SyncClient client) { DimensionState dimension = getDimensionState(); if (dimension == null) return; @@ -258,7 +266,7 @@ public void handleSharedChunk(ChunkTile chunkTile) { public void handleCatchupData(ClientboundChunkTimestampsResponsePacket packet) { var dimensionState = getDimensionState(); if (dimensionState == null) return; - debugLog("received catchup: " + packet.chunks.size() + " " + packet.chunks.get(0).syncClient.address); + debugLog("received catchup: " + packet.chunks.size() + " " + packet.chunks.get(0).syncClient.syncAddress); dimensionState.addCatchupChunks(packet.chunks); } @@ -269,9 +277,9 @@ public void requestCatchupData(List chunks) { } debugLog("requesting more catchup: " + chunks.size()); - var byServer = new HashMap>(); + var byServer = new HashMap>(); for (CatchupChunk chunk : chunks) { - var list = byServer.computeIfAbsent(chunk.syncClient.address, (a) -> new ArrayList<>()); + var list = byServer.computeIfAbsent(chunk.syncClient.syncAddress, (a) -> new ArrayList<>()); list.add(chunk); } for (List chunksForServer : byServer.values()) { @@ -283,7 +291,7 @@ public void requestCatchupData(List chunks) { public static void debugLog(String msg) { // we could also make use of slf4j's debug() but I don't know how to reconfigure that at runtime based on globalConfig if (modConfig.isShowDebugLog()) { - logger.info(msg); + LOGGER.info(msg); } } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java index a81215c8..3703c17e 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/ModGui.java @@ -2,15 +2,17 @@ import com.mojang.blaze3d.vertex.PoseStack; import gjum.minecraft.mapsync.common.config.ServerConfig; +import java.util.ArrayList; +import java.util.stream.Collectors; +import java.util.stream.Stream; import net.minecraft.client.Minecraft; import net.minecraft.client.gui.components.Button; import net.minecraft.client.gui.components.EditBox; import net.minecraft.client.gui.screens.Screen; import net.minecraft.network.chat.TextComponent; +import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; -import java.util.List; - import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; public class ModGui extends Screen { @@ -78,8 +80,12 @@ protected void init() { public void connectClicked(Button btn) { try { if (syncServerAddressField == null) return; - var addresses = List.of(syncServerAddressField.getValue().split("[^-_.:A-Za-z0-9]+")); - serverConfig.setSyncServerAddresses(addresses); + serverConfig.setSyncServerAddresses( + Stream.of(StringUtils.split(syncServerAddressField.getValue(), ",")) + .map(String::trim) + .filter(StringUtils::isNotEmpty) + .collect(Collectors.toCollection(ArrayList::new)) + ); getMod().shutDownSyncClients(); getMod().getSyncClients(); btn.active = false; @@ -117,7 +123,7 @@ public void render(@NotNull PoseStack poseStack, int i, int j, float f) { for (var client : syncClients) { int statusColor; String statusText; - if (client.isEncrypted()) { + if (client.isEstablished()) { numConnected++; statusColor = 0x008800; statusText = "Connected"; @@ -128,7 +134,7 @@ public void render(@NotNull PoseStack poseStack, int i, int j, float f) { statusColor = 0xffffff; statusText = "Connecting..."; } - statusText = client.address + " " + statusText; + statusText = client.syncAddress + " " + statusText; drawString(poseStack, font, statusText, left, msgY, statusColor); msgY += 10; } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java deleted file mode 100644 index 37e2fa90..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientHandler.java +++ /dev/null @@ -1,62 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.data.CatchupChunk; -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelInboundHandlerAdapter; - -import java.io.IOException; -import java.net.ConnectException; - -import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; - -/** - * tightly coupled to {@link SyncClient} - */ -public class ClientHandler extends ChannelInboundHandlerAdapter { - private final SyncClient client; - - public ClientHandler(SyncClient client) { - this.client = client; - } - - @Override - public void channelRead(ChannelHandlerContext ctx, Object packet) { - try { - if (!client.isEncrypted()) { - if (packet instanceof ClientboundEncryptionRequestPacket pktEncryptionRequest) { - client.setUpEncryption(ctx, pktEncryptionRequest); - } else throw new Error("Expected encryption request, got " + packet); - } else if (packet instanceof ChunkTilePacket pktChunkTile) { - getMod().handleSharedChunk(pktChunkTile.chunkTile); - } else if (packet instanceof ClientboundRegionTimestampsPacket pktRegionTimestamps) { - getMod().handleRegionTimestamps(pktRegionTimestamps, client); - } else if (packet instanceof ClientboundChunkTimestampsResponsePacket pktCatchup) { - for (CatchupChunk chunk : pktCatchup.chunks) { - chunk.syncClient = this.client; - } - getMod().handleCatchupData((ClientboundChunkTimestampsResponsePacket) packet); - } else throw new Error("Expected packet, got " + packet); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } - - @Override - public void exceptionCaught(ChannelHandlerContext ctx, Throwable err) throws Exception { - if (err instanceof IOException && "Connection reset by peer".equals(err.getMessage())) return; - if (err instanceof ConnectException && err.getMessage().startsWith("Connection refused: ")) return; - - SyncClient.logger.info("[map-sync] Network Error: " + err); - err.printStackTrace(); - ctx.close(); - super.exceptionCaught(ctx, err); - } - - @Override - public void channelInactive(ChannelHandlerContext ctx) throws Exception { - client.handleDisconnect(new RuntimeException("Channel inactive")); - super.channelInactive(ctx); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java deleted file mode 100644 index aac61eb0..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ClientboundPacketDecoder.java +++ /dev/null @@ -1,37 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.ReplayingDecoder; -import org.jetbrains.annotations.Nullable; - -import java.util.List; - -public class ClientboundPacketDecoder extends ReplayingDecoder { - public static @Nullable Packet constructServerPacket(int id, ByteBuf buf) { - if (id == ChunkTilePacket.PACKET_ID) return ChunkTilePacket.read(buf); - if (id == ClientboundEncryptionRequestPacket.PACKET_ID) return ClientboundEncryptionRequestPacket.read(buf); - if (id == ClientboundChunkTimestampsResponsePacket.PACKET_ID) return ClientboundChunkTimestampsResponsePacket.read(buf); - if (id == ClientboundRegionTimestampsPacket.PACKET_ID) return ClientboundRegionTimestampsPacket.read(buf); - return null; - } - - @Override - protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List out) { - try { - byte id = buf.readByte(); - final Packet packet = constructServerPacket(id, buf); - if (packet == null) { - SyncClient.logger.error("[ServerPacketDecoder] " + - "Unknown server packet id " + id + " 0x" + Integer.toHexString(id)); - ctx.close(); - return; - } - out.add(packet); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java index c9672085..70cc5669 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/Packet.java @@ -81,4 +81,13 @@ static void writeResourceKey( resourceKey.location().toString() ); } + + static void assertNoRemainder( + final @NotNull ByteBuf in + ) { + final int remainder = in.readableBytes(); + if (remainder > 0) { + throw new IllegalStateException("Found [" + remainder + "] remaining bytes!"); + } + } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java deleted file mode 100644 index 56f8b746..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/ServerboundPacketEncoder.java +++ /dev/null @@ -1,28 +0,0 @@ -package gjum.minecraft.mapsync.common.net; - -import gjum.minecraft.mapsync.common.net.packet.*; -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToByteEncoder; - -public class ServerboundPacketEncoder extends MessageToByteEncoder { - public static int getClientPacketId(Packet packet) { - if (packet instanceof ChunkTilePacket) return ChunkTilePacket.PACKET_ID; - if (packet instanceof ServerboundHandshakePacket) return ServerboundHandshakePacket.PACKET_ID; - if (packet instanceof ServerboundEncryptionResponsePacket) return ServerboundEncryptionResponsePacket.PACKET_ID; - if (packet instanceof ServerboundCatchupRequestPacket) return ServerboundCatchupRequestPacket.PACKET_ID; - if (packet instanceof ServerboundChunkTimestampsRequestPacket) return ServerboundChunkTimestampsRequestPacket.PACKET_ID; - throw new IllegalArgumentException("Unknown client packet class " + packet); - } - - @Override - protected void encode(ChannelHandlerContext ctx, Packet packet, ByteBuf out) { - try { - out.writeByte(getClientPacketId(packet)); - packet.write(out); - } catch (Throwable err) { - err.printStackTrace(); - ctx.close(); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java new file mode 100644 index 00000000..66e5c24f --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncAddress.java @@ -0,0 +1,69 @@ +package gjum.minecraft.mapsync.common.net; + +import java.net.URI; +import java.net.URISyntaxException; +import org.apache.http.client.utils.URIBuilder; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +public record SyncAddress( + @NotNull URI address +) { + public SyncAddress( + final @NotNull URI address + ) { + final var builder = new URIBuilder(); + + final String scheme = address.getScheme(); + if (scheme == null) { + throw new IllegalArgumentException("Must specify a scheme (ws/wss)!"); + } + builder.setScheme(switch (scheme) { + case "ws", "wss", "http", "https" -> scheme; + default -> throw new IllegalArgumentException("Only ws/wss is permitted!"); + }); + builder.setHost(address.getHost()); + builder.setPort(address.getPort()); + + try { + this.address = builder.build(); + } + catch (final URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + + @Override + public @NotNull String toString() { + return address().toString(); + } + + public static @Nullable SyncAddress of( + final URI syncAddress + ) { + if (syncAddress == null) { + return null; + } + try { + return new SyncAddress(syncAddress); + } + catch (final IllegalArgumentException e) { + return null; + } + } + + public static @Nullable SyncAddress of( + String syncAddress + ) { + if (syncAddress == null) { + return null; + } + syncAddress = syncAddress.trim(); + try { + return of(new URI(syncAddress)); + } + catch (final URISyntaxException e) { + return null; + } + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java index 5c3f9d55..350f065f 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java @@ -2,36 +2,40 @@ import com.mojang.authlib.exceptions.AuthenticationException; import gjum.minecraft.mapsync.common.MapSyncMod; +import gjum.minecraft.mapsync.common.data.CatchupChunk; import gjum.minecraft.mapsync.common.data.ChunkTile; -import gjum.minecraft.mapsync.common.net.encryption.EncryptionDecoder; -import gjum.minecraft.mapsync.common.net.encryption.EncryptionEncoder; -import gjum.minecraft.mapsync.common.net.packet.*; +import gjum.minecraft.mapsync.common.net.packet.ChunkTilePacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundChunkTimestampsResponsePacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundAuthRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundRegionTimestampsPacket; +import gjum.minecraft.mapsync.common.net.packet.ClientboundWelcomePacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundCatchupRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundChunkTimestampsRequestPacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundAuthResponsePacket; +import gjum.minecraft.mapsync.common.net.packet.ServerboundHandshakePacket; import gjum.minecraft.mapsync.common.utils.Hasher; -import io.netty.bootstrap.Bootstrap; -import io.netty.channel.*; -import io.netty.channel.nio.NioEventLoopGroup; -import io.netty.channel.socket.SocketChannel; -import io.netty.channel.socket.nio.NioSocketChannel; -import io.netty.handler.codec.LengthFieldBasedFrameDecoder; -import io.netty.handler.codec.LengthFieldPrepender; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HexFormat; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.ThreadLocalRandom; import net.minecraft.client.Minecraft; import net.minecraft.client.User; import net.minecraft.world.level.ChunkPos; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.apache.commons.lang3.StringUtils; +import org.java_websocket.client.WebSocketClient; +import org.java_websocket.enums.ReadyState; +import org.java_websocket.handshake.ServerHandshake; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; - -import javax.crypto.*; -import javax.crypto.spec.SecretKeySpec; -import java.security.*; -import java.util.*; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import static gjum.minecraft.mapsync.common.MapSyncMod.debugLog; -import static gjum.minecraft.mapsync.common.MapSyncMod.getMod; +import org.jetbrains.annotations.UnknownNullability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * handles reconnection, authentication, encryption @@ -42,7 +46,7 @@ public class SyncClient { public synchronized void sendChunkTile(ChunkTile chunkTile) { var serverKnownHash = getServerKnownChunkHash(chunkTile.chunkPos()); if (Arrays.equals(chunkTile.dataHash(), serverKnownHash)) { - debugLog("server already has chunk (hash) " + chunkTile.chunkPos()); + MapSyncMod.debugLog("server already has chunk (hash) " + chunkTile.chunkPos()); return; // server already has this chunk } @@ -62,11 +66,10 @@ public synchronized void setServerKnownChunkHash(ChunkPos chunkPos, byte[] hash) // XXX end of hotfix - public static final Logger logger = LogManager.getLogger(SyncClient.class); - - public int retrySec = 5; + public static final Logger LOGGER = LoggerFactory.getLogger(SyncClient.class); + public static final int RESTART_DELAY = 5; - public final @NotNull String address; + public final @NotNull SyncAddress syncAddress; public final @NotNull String gameAddress; /** @@ -79,118 +82,151 @@ public synchronized void setServerKnownChunkHash(ChunkPos chunkPos, byte[] hash) * and disconnect when coming across this during a check */ public boolean isShutDown = false; - private boolean isEncrypted = false; private @Nullable String lastError; /** * limited (on insert) to 199 entries */ - private ArrayList queue = new ArrayList<>(); - private @Nullable Channel channel; - private static @Nullable NioEventLoopGroup workerGroup; - - public SyncClient(@NotNull String address, @NotNull String gameAddress) { - if (!address.contains(":")) address = address + ":12312"; - this.address = address; - this.gameAddress = gameAddress; - connect(); + private final ArrayList queue = new ArrayList<>(); + private final SyncConnection connection; + /** Whether the connection has survived the handshake and login exchange */ + private boolean isEstablished = false; + + public SyncClient( + final @NotNull SyncAddress syncAddress, + final @NotNull String gameAddress + ) { + this.syncAddress = Objects.requireNonNull(syncAddress); + this.gameAddress = Objects.requireNonNull(gameAddress); + this.connection = new SyncConnection(syncAddress); + this.connection.connect(); } - private void connect() { - try { - if (isShutDown) return; - - if (workerGroup != null && !workerGroup.isShuttingDown()) { - // end any tasks of the old connection - workerGroup.shutdownGracefully(); - } - workerGroup = new NioEventLoopGroup(); - isEncrypted = false; - - var bootstrap = new Bootstrap(); - bootstrap.group(workerGroup); - bootstrap.channel(NioSocketChannel.class); - bootstrap.option(ChannelOption.SO_KEEPALIVE, true); - bootstrap.handler(new ChannelInitializer() { - public void initChannel(SocketChannel ch) { - ch.pipeline().addLast( - new LengthFieldPrepender(4), - new LengthFieldBasedFrameDecoder(1 << 15, 0, 4, 0, 4), - new ClientboundPacketDecoder(), - new ServerboundPacketEncoder(), - new ClientHandler(SyncClient.this)); - } - }); - - String[] hostPortArr = address.split(":"); - int port = Integer.parseInt(hostPortArr[1]); - - final var channelFuture = bootstrap.connect(hostPortArr[0], port); - channel = channelFuture.channel(); - channelFuture.addListener(future -> { - if (future.isSuccess()) { - logger.info("[map-sync] Connected to " + address); - channelFuture.channel().writeAndFlush(new ServerboundHandshakePacket( - getMod().getVersion(), - Minecraft.getInstance().getUser().getName(), - gameAddress, - getMod().getDimensionState().dimension.location().toString())); - } else { - handleDisconnect(future.cause()); + private class SyncConnection extends WebSocketClient { + public SyncConnection( + final @NotNull SyncAddress serverUri + ) { + super(serverUri.address()); + } + @Override + public void onOpen( + final @NotNull ServerHandshake handshake + ) { + LOGGER.info("[map-sync] OPENED!"); + INTERNAL_send(new ServerboundHandshakePacket( + MapSyncMod.getMod().getVersion(), + Minecraft.getInstance().getUser().getName(), + SyncClient.this.gameAddress, + MapSyncMod.getMod().getDimensionState().dimension.location().toString() + )); + } + @Override + public void onClose( + final int code, + final @UnknownNullability String reason, + final boolean remote + ) { + LOGGER.info("[map-sync] Closed!"); + SyncClient.this.handleDisconnect(code, reason, remote); + } + @Override + public void onError( + final @NotNull Exception thrown + ) { + LOGGER.warn("[map-sync] Something went wrong", thrown); + SyncClient.this.lastError = thrown.getMessage(); + close(); + } + @Override + public void onMessage( + final @NotNull String message + ) { + LOGGER.warn("[map-sync] Received a string message from the server!"); + SyncClient.this.lastError = "Server sent unsupported packets!"; + SyncClient.this.autoReconnect = false; + SyncClient.this.isShutDown = true; + SyncClient.this.isEstablished = false; + close(); + } + @Override + public void onMessage( + @NotNull ByteBuffer bytes + ) { + LOGGER.info("[map-sync] Received bytes!"); + final ByteBuf buf = Unpooled.wrappedBuffer(bytes); + try { + final byte packetId = buf.readByte(); + switch (packetId) { + case ChunkTilePacket.PACKET_ID -> { + final var packet = (ChunkTilePacket) ChunkTilePacket.read(buf); + Packet.assertNoRemainder(buf); + MapSyncMod.getMod().handleSharedChunk(packet.chunkTile); + } + case ClientboundAuthRequestPacket.PACKET_ID -> { + final ClientboundAuthRequestPacket packet = ClientboundAuthRequestPacket.read(buf); + Packet.assertNoRemainder(buf); + handleAuthRequest(this, packet); + } + case ClientboundWelcomePacket.PACKET_ID -> { + final ClientboundWelcomePacket packet = ClientboundWelcomePacket.read(buf); + Packet.assertNoRemainder(buf); + handleWelcome(packet); + } + case ClientboundChunkTimestampsResponsePacket.PACKET_ID -> { + final var packet = (ClientboundChunkTimestampsResponsePacket) ClientboundChunkTimestampsResponsePacket.read(buf); + Packet.assertNoRemainder(buf); + for (CatchupChunk chunk : packet.chunks) { + chunk.syncClient = SyncClient.this; + } + MapSyncMod.getMod().handleCatchupData(packet); + } + case ClientboundRegionTimestampsPacket.PACKET_ID -> { + final var packet = (ClientboundRegionTimestampsPacket) ClientboundRegionTimestampsPacket.read(buf); + Packet.assertNoRemainder(buf); + MapSyncMod.getMod().handleRegionTimestamps(packet, SyncClient.this); + } } - }); - } catch (Throwable e) { - e.printStackTrace(); - handleDisconnect(e); + } + catch (final Exception thrown) { + onError(thrown); + } } } - void handleDisconnect(Throwable err) { - isEncrypted = false; - - if (Minecraft.getInstance().level == null) shutDown(); - - String errMsg = err.getMessage(); - if (errMsg == null) errMsg = err.toString(); - lastError = errMsg; - if (isShutDown) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (has shut down)"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } else if (!autoReconnect) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (autoReconnect=false)"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } else if (workerGroup == null) { - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Won't retry (workerGroup=null)"); - err.printStackTrace(); - } else { - workerGroup.schedule(this::connect, retrySec, TimeUnit.SECONDS); - - if (!errMsg.startsWith("Connection refused: ")) { // reduce spam - logger.warn("[map-sync] Got disconnected from '" + address + "'." + - " Retrying in " + retrySec + " sec"); - if (!errMsg.contains("Channel inactive")) err.printStackTrace(); - } + public synchronized void connect() { + if (this.isShutDown) { + return; } + if (this.connection.getReadyState() == ReadyState.OPEN) { + this.connection.close(); + } + this.connection.connect(); } - public synchronized void handleEncryptionSuccess() { - if (channel == null) return; + private void handleDisconnect( + final int code, + final @UnknownNullability String reason, + final boolean remote + ) { + this.isEstablished = false; + + if (Minecraft.getInstance().level == null) { + this.isShutDown = true; + } + + if (StringUtils.isNotEmpty(reason)) { + this.lastError = reason; + } - lastError = null; - isEncrypted = true; - getMod().handleSyncServerEncryptionSuccess(); + LOGGER.warn("[map-sync] Got disconnected from '{}': {}", this.syncAddress, this.lastError); - for (Packet packet : queue) { - channel.write(packet); + if (!this.isShutDown && this.autoReconnect && !remote) { + // TODO: Readd auto-reconnect + // workerGroup.schedule(this::connect, retrySec, TimeUnit.SECONDS); } - queue.clear(); - channel.flush(); } - public boolean isEncrypted() { - return isEncrypted; + public boolean isEstablished() { + return this.isEstablished; } public String getError() { @@ -200,92 +236,90 @@ public String getError() { /** * Send if encrypted, or queue and send once encryption is set up. */ - public void send(Packet packet) { - send(packet, true); - } - - /** - * Send if encrypted, or queue and send once encryption is set up. - */ - public synchronized void send(Packet packet, boolean flush) { - try { - if (isEncrypted() && channel != null && channel.isActive()) { - if (flush) channel.writeAndFlush(packet); - else channel.write(packet); - } else { - queue.add(packet); - // don't let the queue occupy too much memory - if (queue.size() > 200) { - logger.warn("[map-sync] Dropping 100 oldest packets from queue"); - queue = queue.stream() - .skip(100) - .collect(Collectors.toCollection(ArrayList::new)); - } + public synchronized void send(Packet packet) { + if (this.connection == null || this.connection.getReadyState() != ReadyState.OPEN) { + this.queue.add(packet); + final int queueSize = this.queue.size(); + if (queueSize > 200) { + final List slice = List.copyOf(this.queue.subList(100, queueSize)); + this.queue.clear(); + this.queue.addAll(slice); } - } catch (Throwable e) { - e.printStackTrace(); + return; } + INTERNAL_send(packet); + } + + private void INTERNAL_send( + final @NotNull Packet packet + ) { + final ByteBuf buf = Unpooled.buffer(); + buf.writeByte(getClientPacketId(packet)); + packet.write(buf); + + final byte[] bytes = new byte[buf.readableBytes()]; + buf.readBytes(bytes); + + this.connection.send(bytes); + } + + private static int getClientPacketId(Packet packet) { + if (packet instanceof ChunkTilePacket) return ChunkTilePacket.PACKET_ID; + if (packet instanceof ServerboundHandshakePacket) return ServerboundHandshakePacket.PACKET_ID; + if (packet instanceof ServerboundAuthResponsePacket) return ServerboundAuthResponsePacket.PACKET_ID; + if (packet instanceof ServerboundCatchupRequestPacket) return ServerboundCatchupRequestPacket.PACKET_ID; + if (packet instanceof ServerboundChunkTimestampsRequestPacket) return ServerboundChunkTimestampsRequestPacket.PACKET_ID; + throw new IllegalArgumentException("Unknown client packet class " + packet); } public synchronized void shutDown() { - isShutDown = true; - if (channel != null) { - channel.disconnect(); - channel.eventLoop().shutdownGracefully(); - channel = null; - } - if (workerGroup != null && !workerGroup.isShuttingDown()) { - // this also stops any ongoing reconnect timeout - workerGroup.shutdownGracefully(); - workerGroup = null; - } + this.isShutDown = true; + this.isEstablished = false; + this.connection.close(); } - void setUpEncryption(ChannelHandlerContext ctx, ClientboundEncryptionRequestPacket packet) { + private void handleAuthRequest( + final @NotNull WebSocketClient connection, + final @NotNull ClientboundAuthRequestPacket packet + ) { + final var clientSecret = new byte[Long.BYTES]; + ThreadLocalRandom.current().nextBytes(clientSecret); + + // note that this is different from minecraft (we get no negative hashes) + final String shaHex = HexFormat.of().formatHex(Hasher.sha1() + .update(clientSecret) + .update(packet.serverSecret()) + .generateHash() + ); + + final User session = Minecraft.getInstance().getUser(); try { - byte[] sharedSecret = new byte[16]; - ThreadLocalRandom.current().nextBytes(sharedSecret); - - if (!MapSyncMod.getMod().isDevMode()) { - // note that this is different from minecraft (we get no negative hashes) - final String shaHex = HexFormat.of().formatHex(Hasher.sha1() - .update(sharedSecret) - .update(packet.publicKey.getEncoded()) - .generateHash() - ); - - final User session = Minecraft.getInstance().getUser(); - Minecraft.getInstance().getMinecraftSessionService().joinServer( - session.getGameProfile(), - session.getAccessToken(), - shaHex - ); - } + Minecraft.getInstance().getMinecraftSessionService().joinServer( + session.getGameProfile(), + session.getAccessToken(), + shaHex + ); + } + catch (final AuthenticationException authenticationFailure) { + LOGGER.warn("Failed authentication check!"); + connection.close(); + return; + } - try { - ctx.channel().writeAndFlush(new ServerboundEncryptionResponsePacket( - encrypt(packet.publicKey, sharedSecret), - encrypt(packet.publicKey, packet.verifyToken))); - } catch (NoSuchAlgorithmException | InvalidKeyException | NoSuchPaddingException | BadPaddingException | - IllegalBlockSizeException e) { - shutDown(); - throw new RuntimeException(e); - } + INTERNAL_send(new ServerboundAuthResponsePacket( + clientSecret + )); + } - SecretKey secretKey = new SecretKeySpec(sharedSecret, "AES"); - ctx.pipeline() - .addFirst("encrypt", new EncryptionEncoder(secretKey)) - .addFirst("decrypt", new EncryptionDecoder(secretKey)); + private synchronized void handleWelcome( + final @NotNull ClientboundWelcomePacket packet + ) { + this.isEstablished = true; + this.lastError = null; - handleEncryptionSuccess(); - } catch (AuthenticationException e) { - SyncClient.logger.warn("Auth error: " + e.getMessage(), e); + for (final Packet pendingPacket : List.copyOf(this.queue)) { + INTERNAL_send(pendingPacket); } - } - - private static byte[] encrypt(PublicKey key, byte[] data) throws NoSuchPaddingException, NoSuchAlgorithmException, BadPaddingException, IllegalBlockSizeException, InvalidKeyException { - Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding"); - cipher.init(Cipher.ENCRYPT_MODE, key); - return cipher.doFinal(data); + this.queue.clear(); } } diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java deleted file mode 100644 index d456051a..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionDecoder.java +++ /dev/null @@ -1,31 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToMessageDecoder; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; -import javax.crypto.spec.IvParameterSpec; -import java.security.GeneralSecurityException; -import java.security.Key; -import java.util.List; - -public class EncryptionDecoder extends MessageToMessageDecoder { - private final EncryptionTranslator decryptionCodec; - - public EncryptionDecoder(Key key) { - try { - Cipher cipher = Cipher.getInstance("AES/CFB8/NoPadding"); - cipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(key.getEncoded())); - decryptionCodec = new EncryptionTranslator(cipher); - } catch (GeneralSecurityException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws ShortBufferException { - out.add(decryptionCodec.decipher(ctx, in)); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java deleted file mode 100644 index ef59d71f..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionEncoder.java +++ /dev/null @@ -1,30 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; -import io.netty.handler.codec.MessageToByteEncoder; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; -import javax.crypto.spec.IvParameterSpec; -import java.security.GeneralSecurityException; -import java.security.Key; - -public class EncryptionEncoder extends MessageToByteEncoder { - private final EncryptionTranslator encryptionCodec; - - public EncryptionEncoder(Key key) { - try { - Cipher cipher = Cipher.getInstance("AES/CFB8/NoPadding"); - cipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(key.getEncoded())); - encryptionCodec = new EncryptionTranslator(cipher); - } catch (GeneralSecurityException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) throws ShortBufferException { - encryptionCodec.encipher(in, out); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java deleted file mode 100644 index 080afeca..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/encryption/EncryptionTranslator.java +++ /dev/null @@ -1,48 +0,0 @@ -package gjum.minecraft.mapsync.common.net.encryption; - -import io.netty.buffer.ByteBuf; -import io.netty.channel.ChannelHandlerContext; - -import javax.crypto.Cipher; -import javax.crypto.ShortBufferException; - -public class EncryptionTranslator { - private final Cipher cipher; - private byte[] inputBuffer = new byte[0]; - private byte[] outputBuffer = new byte[0]; - - protected EncryptionTranslator(Cipher cipher) { - this.cipher = cipher; - } - - private byte[] bufToBytes(ByteBuf buf) { - int i = buf.readableBytes(); - - if (this.inputBuffer.length < i) { - this.inputBuffer = new byte[i]; - } - - buf.readBytes(this.inputBuffer, 0, i); - return this.inputBuffer; - } - - protected ByteBuf decipher(ChannelHandlerContext ctx, ByteBuf buffer) throws ShortBufferException { - int i = buffer.readableBytes(); - byte[] bytes = this.bufToBytes(buffer); - ByteBuf bytebuf = ctx.alloc().heapBuffer(this.cipher.getOutputSize(i)); - bytebuf.writerIndex(this.cipher.update(bytes, 0, i, bytebuf.array(), bytebuf.arrayOffset())); - return bytebuf; - } - - protected void encipher(ByteBuf in, ByteBuf out) throws ShortBufferException { - int i = in.readableBytes(); - byte[] bytes = this.bufToBytes(in); - int j = this.cipher.getOutputSize(i); - - if (this.outputBuffer.length < j) { - this.outputBuffer = new byte[j]; - } - - out.writeBytes(this.outputBuffer, 0, this.cipher.update(bytes, 0, i, this.outputBuffer)); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java new file mode 100644 index 00000000..bb5c5170 --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundAuthRequestPacket.java @@ -0,0 +1,28 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import java.util.Objects; +import org.jetbrains.annotations.NotNull; + +/** + * You will receive this in response to {@link ServerboundHandshakePacket}, and + * will expect a {@link ServerboundAuthResponsePacket} in response. + */ +public record ClientboundAuthRequestPacket( + byte @NotNull [] serverSecret +) implements Packet { + public static final int PACKET_ID = 2; + + public ClientboundAuthRequestPacket { + Objects.requireNonNull(serverSecret); + } + + public static ClientboundAuthRequestPacket read( + final @NotNull ByteBuf buf + ) { + return new ClientboundAuthRequestPacket( + Packet.readIntLengthByteArray(buf) + ); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java deleted file mode 100644 index 67ff10cc..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundEncryptionRequestPacket.java +++ /dev/null @@ -1,42 +0,0 @@ -package gjum.minecraft.mapsync.common.net.packet; - -import gjum.minecraft.mapsync.common.net.Packet; -import io.netty.buffer.ByteBuf; -import org.jetbrains.annotations.NotNull; - -import java.security.*; -import java.security.spec.InvalidKeySpecException; -import java.security.spec.X509EncodedKeySpec; - -/** - * You will receive this in response to {@link ServerboundHandshakePacket}, and - * will expect a {@link ServerboundEncryptionResponsePacket} in response. - */ -public class ClientboundEncryptionRequestPacket implements Packet { - public static final int PACKET_ID = 2; - - public final @NotNull PublicKey publicKey; - public final byte @NotNull [] verifyToken; - - public ClientboundEncryptionRequestPacket(@NotNull PublicKey publicKey, byte @NotNull [] verifyToken) { - this.publicKey = publicKey; - this.verifyToken = verifyToken; - } - - public static Packet read(ByteBuf buf) { - return new ClientboundEncryptionRequestPacket( - readKey(buf), - Packet.readIntLengthByteArray(buf)); - } - - protected static PublicKey readKey(ByteBuf in) { - try { - byte[] encodedKey = Packet.readIntLengthByteArray(in); - X509EncodedKeySpec keySpec = new X509EncodedKeySpec(encodedKey); - KeyFactory keyFactory = KeyFactory.getInstance("RSA"); - return keyFactory.generatePublic(keySpec); - } catch (NoSuchAlgorithmException | InvalidKeySpecException e) { - throw new RuntimeException(e); - } - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java new file mode 100644 index 00000000..1c533bbb --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ClientboundWelcomePacket.java @@ -0,0 +1,19 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import org.jetbrains.annotations.NotNull; + +/** + * You will receive this in response to {@link ServerboundHandshakePacket}, and + * will expect a {@link ServerboundAuthResponsePacket} in response. + */ +public record ClientboundWelcomePacket() implements Packet { + public static final int PACKET_ID = 9; + + public static ClientboundWelcomePacket read( + final @NotNull ByteBuf buf + ) { + return new ClientboundWelcomePacket(); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java new file mode 100644 index 00000000..7850d022 --- /dev/null +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundAuthResponsePacket.java @@ -0,0 +1,30 @@ +package gjum.minecraft.mapsync.common.net.packet; + +import gjum.minecraft.mapsync.common.net.Packet; +import io.netty.buffer.ByteBuf; +import java.util.Objects; +import org.jetbrains.annotations.NotNull; + +/** + * This is sent to the server in response to a {@link ClientboundAuthRequestPacket}, + * after which, if the connection persists, you are considered authenticated + * with the server. You should then receive a {@link ClientboundRegionTimestampsPacket}. + * + * @param clientSecret encrypted with server's public key + */ +public record ServerboundAuthResponsePacket( + byte @NotNull [] clientSecret +) implements Packet { + public static final int PACKET_ID = 3; + + public ServerboundAuthResponsePacket { + Objects.requireNonNull(clientSecret); + } + + @Override + public void write( + final @NotNull ByteBuf out + ) { + Packet.writeIntLengthByteArray(out, clientSecret()); + } +} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java deleted file mode 100644 index e769f4c4..00000000 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundEncryptionResponsePacket.java +++ /dev/null @@ -1,34 +0,0 @@ -package gjum.minecraft.mapsync.common.net.packet; - -import gjum.minecraft.mapsync.common.net.Packet; -import io.netty.buffer.ByteBuf; -import org.jetbrains.annotations.NotNull; - -/** - * This is sent to the server in response to a {@link ClientboundEncryptionRequestPacket}, - * after which, if the connection persists, you are considered authenticated - * with the server. You should then receive a {@link ClientboundRegionTimestampsPacket}. - */ -public class ServerboundEncryptionResponsePacket implements Packet { - public static final int PACKET_ID = 3; - - /** - * encrypted with server's public key - */ - public final byte[] sharedSecret; - /** - * encrypted with server's public key - */ - public final byte[] verifyToken; - - public ServerboundEncryptionResponsePacket(byte[] sharedSecret, byte[] verifyToken) { - this.sharedSecret = sharedSecret; - this.verifyToken = verifyToken; - } - - @Override - public void write(@NotNull ByteBuf out) { - Packet.writeIntLengthByteArray(out, sharedSecret); - Packet.writeIntLengthByteArray(out, verifyToken); - } -} diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java index e1bbc895..a5bccfb4 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/packet/ServerboundHandshakePacket.java @@ -6,7 +6,7 @@ /** * This should be sent to the server IMMEDIATELY upon connection. If the - * server accepts the connection, you will receive a {@link ClientboundEncryptionRequestPacket}. + * server accepts the connection, you will receive a {@link ClientboundAuthRequestPacket}. */ public class ServerboundHandshakePacket implements Packet { public static final int PACKET_ID = 1; diff --git a/mod/common/src/main/resources/default-config.json b/mod/common/src/main/resources/default-config.json index c86f8511..054e88e0 100644 --- a/mod/common/src/main/resources/default-config.json +++ b/mod/common/src/main/resources/default-config.json @@ -2,7 +2,7 @@ "servers": { "localhost:25565": { "syncServerAddresses": [ - "localhost:12312" + "ws://localhost:12312" ] } } diff --git a/mod/fabric/build.gradle b/mod/fabric/build.gradle index 955500fd..57aea303 100644 --- a/mod/fabric/build.gradle +++ b/mod/fabric/build.gradle @@ -26,6 +26,9 @@ dependencies { // https://modrinth.com/mod/modmenu/version/3.2.5 (3.2.5 fabric) modCompileOnly("maven.modrinth:modmenu:nVxObSbX") + + // https://github.com/TooTallNate/Java-WebSocket + include("org.java-websocket:Java-WebSocket:1.6.0") } processResources { diff --git a/mod/forge/build.gradle b/mod/forge/build.gradle index 24ccefbd..82bf3ab9 100644 --- a/mod/forge/build.gradle +++ b/mod/forge/build.gradle @@ -26,6 +26,9 @@ dependencies { common(project(path: ":common", configuration: "namedElements")) { transitive false } shadowCommon(project(path: ":common", configuration: "transformProductionForge")) { transitive false } + + // https://github.com/TooTallNate/Java-WebSocket + include("org.java-websocket:Java-WebSocket:1.6.0") } processResources { diff --git a/server/bun.lock b/server/bun.lock index c3774d68..45e719e0 100644 --- a/server/bun.lock +++ b/server/bun.lock @@ -6,8 +6,7 @@ "dependencies": { "async-mutex": "^0.4.0", "kysely": "^0.26.1", - "zod": "^3.21.4", - "zod-validation-error": "^1.3.1", + "zod": "^3.25.57", }, "devDependencies": { "@types/bun": "^1.2.15", @@ -35,8 +34,6 @@ "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], - "zod": ["zod@3.25.56", "", {}, "sha512-rd6eEF3BTNvQnR2e2wwolfTmUTnp70aUTqr0oaGbHifzC3BKJsoV+Gat8vxUMR1hwOKBs6El+qWehrHbCpW6SQ=="], - - "zod-validation-error": ["zod-validation-error@1.5.0", "", { "peerDependencies": { "zod": "^3.18.0" } }, "sha512-/7eFkAI4qV0tcxMBB/3+d2c1P6jzzZYdYSlBuAklzMuCrJu5bzJfHS0yVAS87dRHVlhftd6RFJDIvv03JgkSbw=="], + "zod": ["zod@3.25.57", "", {}, "sha512-6tgzLuwVST5oLUxXTmBqoinKMd3JeesgbgseXeFasKKj8Q1FCZrHnbqJOyiEvr4cVAlbug+CgIsmJ8cl/pU5FA=="], } } diff --git a/server/package.json b/server/package.json index 6a80faa1..6051c376 100644 --- a/server/package.json +++ b/server/package.json @@ -17,8 +17,7 @@ "dependencies": { "async-mutex": "^0.4.0", "kysely": "^0.26.1", - "zod": "^3.21.4", - "zod-validation-error": "^1.3.1" + "zod": "^3.25.57" }, "devDependencies": { "@types/bun": "^1.2.15", diff --git a/server/src/constants.ts b/server/src/constants.ts index 94161821..6fc5aab6 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -6,3 +6,6 @@ export const SUPPORTED_VERSIONS = new Set([ // SHA1 produces 160-bit (20-byte) hashes // https://en.wikipedia.org/wiki/SHA-1 export const SHA1_HASH_LENGTH = 20; + +export const UUID_REGEX = + /^(........)-?(....)-?(....)-?(....)-?(............)$/; diff --git a/server/src/main.ts b/server/src/main.ts index ca9ec3e3..270d6e9a 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,7 +1,11 @@ import "./cli.ts"; import * as database from "./database.ts"; import * as metadata from "./metadata.ts"; -import { type ClientPacket, UnexpectedPacket } from "./net/protocol.ts"; +import { + type ClientPacket, + encodePacketToBytes, + UnexpectedPacket, +} from "./net/protocol.ts"; import { type ProtocolHandler, TcpClient, TcpServer } from "./net/server.ts"; import { ChunkTilePacket, @@ -43,7 +47,9 @@ Promise.resolve().then(async () => { config.whitelist && !metadata.whitelist.has(client.auth.uuid) ) { - client.kick(`Not whitelisted!`); + client.kick( + `Not whitelisted! [${Bun.inspect(client.auth)}]`, + ); return; } } @@ -110,10 +116,12 @@ Promise.resolve().then(async () => { } // TODO small timeout, then skip if other client already has it + const packetRaw = encodePacketToBytes(packet); await Promise.allSettled( - Object.values(server.clients) + server.clients + .values() .filter((other) => other !== client && isAuthed(other)) - .map((other) => other.send(packet)), + .map((other) => other.sendRaw(packet.type, packetRaw)), ); // TODO queue tile render for web map @@ -172,7 +180,7 @@ Promise.resolve().then(async () => { z: region.regionZ, })), ); - if (chunks.length) { + if (chunks.length > 0) { await client.send( new ClientboundChunkTimestampsResponsePacket( packet.dimension, diff --git a/server/src/net/auth.ts b/server/src/net/auth.ts index 73737653..f0632569 100644 --- a/server/src/net/auth.ts +++ b/server/src/net/auth.ts @@ -1,24 +1,17 @@ import node_crypto from "node:crypto"; -import { z } from "zod"; -import { fromZodError } from "zod-validation-error"; +import { z } from "zod/v4"; import { type TcpClient } from "./server.ts"; import { - ClientboundEncryptionRequestPacket, - ServerboundEncryptionResponsePacket, + ClientboundAuthRequestPacket, + ClientboundWelcomePacket, + ServerboundAuthResponsePacket, type ServerboundHandshakePacket, } from "./packets.ts"; import { UnexpectedPacket } from "./protocol.ts"; -import { SUPPORTED_VERSIONS } from "../constants.ts"; - -const KEY_PAIR = node_crypto.generateKeyPairSync("rsa", { - modulusLength: 1024, -}); -const PUBLIC_KEY = KEY_PAIR.publicKey.export({ - type: "spki", - format: "der", -}); +import { SUPPORTED_VERSIONS, UUID_REGEX } from "../constants.ts"; +import { INT64_SIZE } from "../lang.ts"; // ============================================================ // Handshake @@ -45,89 +38,58 @@ export async function handleHandshake( return; } - client.claimedMojangUsername = packet.mojangName; client.gameAddress = packet.gameAddress; client.dimension = packet.dimension; - const verifyToken = node_crypto.randomBytes(4); + if (Bun.env["MAPSYNC_DISABLE_AUTH"] === "true") { + client.auth = new OfflineAuth(packet.mojangName); + client.name += "?:" + packet.mojangName; + await client.send(new ClientboundWelcomePacket()); + return; + } - client.auth = new AwaitingEncryptionResponse(verifyToken); - await client.send( - new ClientboundEncryptionRequestPacket(PUBLIC_KEY, verifyToken), - ); + const serverSecret = node_crypto.randomBytes(INT64_SIZE); + client.auth = new AwaitingAuthResponse(serverSecret, packet.mojangName); + await client.send(new ClientboundAuthRequestPacket(serverSecret)); } // ============================================================ // Encryption Response // ============================================================ -export function decrypt(buf: Buffer): Buffer { - return node_crypto.privateDecrypt( - { - key: KEY_PAIR.privateKey, - padding: node_crypto.constants.RSA_PKCS1_PADDING, - }, - buf, - ); -} - -class AwaitingEncryptionResponse { - public constructor(public readonly verifyToken: Buffer) {} +class AwaitingAuthResponse { + public constructor( + public readonly serverSecret: Buffer, + public readonly claimedMojangUsername: string, + ) {} } -export async function handleEncryptionResponse( +export async function handleAuthResponse( client: TcpClient, - packet: ServerboundEncryptionResponsePacket, + packet: ServerboundAuthResponsePacket, ) { - if (!(client.auth instanceof AwaitingEncryptionResponse)) { + if (!(client.auth instanceof AwaitingAuthResponse)) { throw new UnexpectedPacket(packet.type.toString()); } - const decryptedVerifyToken = decrypt(packet.verifyToken); - if (!client.auth.verifyToken.equals(decryptedVerifyToken)) { - client.kick("verifyToken does not match!"); - client.debug( - `Expected [${client.auth.verifyToken.toHex()}], received [${decryptedVerifyToken.toHex()}]`, - ); + const auth = await fetchHasJoined( + client, + client.auth.claimedMojangUsername, + node_crypto + .createHash("sha1") + .update(packet.clientSecret) + .update(client.auth.serverSecret) + .digest() + .toString("hex"), + ); + if (auth === null) { + client.kick("Not authenticated!"); return; } - const decryptedSharedSecret = decrypt(packet.sharedSecret); - client.ciphers = { - encipher: node_crypto.createCipheriv( - "aes-128-cfb8", - decryptedSharedSecret, - decryptedSharedSecret, - ), - decipher: node_crypto.createDecipheriv( - "aes-128-cfb8", - decryptedSharedSecret, - decryptedSharedSecret, - ), - }; - client.debug("Connection is now encrypted!"); - - if (Bun.env["MAPSYNC_DISABLE_AUTH"] === "true") { - client.auth = new OfflineAuth(client.claimedMojangUsername!); - client.name += "?:" + client.claimedMojangUsername!; - } else { - const auth = await fetchHasJoined( - client, - node_crypto - .createHash("sha1") - .update(decryptedSharedSecret) - .update(PUBLIC_KEY) - .digest() - .toString("hex"), - ); - if (auth === null) { - client.kick("Not authenticated!"); - return; - } - - client.auth = new OnlineAuth(auth.name, auth.uuid); - client.name += ":" + auth.name; - } + client.auth = new OnlineAuth(auth.name, auth.uuid); + client.name += ":" + auth.name; + await client.send(new ClientboundWelcomePacket()); await client.handlers.handleClientAuthenticated(client); } @@ -160,18 +122,19 @@ export function requireAuth(client: TcpClient) { } const MOJANG_AUTH_RESPONSE_SCHEMA = z.object({ - id: z.string().uuid(), + id: z.string().regex(UUID_REGEX), name: z.string(), }); async function fetchHasJoined( client: TcpClient, + username: string, shaHex: string, ): Promise<{ name: string; uuid: string; } | null> { - let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${client.claimedMojangUsername!}&serverId=${shaHex}`; + let url = `https://sessionserver.mojang.com/session/minecraft/hasJoined?username=${username}&serverId=${shaHex}`; let response: Response; try { @@ -196,18 +159,13 @@ async function fetchHasJoined( try { auth = MOJANG_AUTH_RESPONSE_SCHEMA.parse(raw); } catch (error) { - client.warn( - "Could not validate auth response!", - fromZodError(error as z.ZodError), - ); + client.warn("Could not validate auth response!"); + client.warn(z.prettifyError(error as z.ZodError)); return null; } return { name: auth.name, - uuid: auth.id.replace( - /^(........)-?(....)-?(....)-?(....)-?(............)$/, - "$1-$2-$3-$4-$5", - ), + uuid: auth.id.replace(UUID_REGEX, "$1-$2-$3-$4-$5"), }; } diff --git a/server/src/net/buffers.ts b/server/src/net/buffers.ts index d822902f..6cc39e26 100644 --- a/server/src/net/buffers.ts +++ b/server/src/net/buffers.ts @@ -87,6 +87,10 @@ export class BufferReader { public constructor(private readonly buffer: Buffer) {} + public get remainder(): number { + return this.buffer.length - this.offset; + } + public readUnt8(): number { const val = this.buffer.readUInt8(this.offset); this.offset += 1; @@ -164,6 +168,6 @@ export class BufferReader { /** any reads after this will fail */ public readRemainder(): Buffer { - return this.readBufLen(this.buffer.length - this.offset); + return this.readBufLen(this.remainder); } } diff --git a/server/src/net/packets.ts b/server/src/net/packets.ts index 7e824bc5..842d97f9 100644 --- a/server/src/net/packets.ts +++ b/server/src/net/packets.ts @@ -35,42 +35,38 @@ export class ServerboundHandshakePacket implements Packet { } } -export class ClientboundEncryptionRequestPacket implements Packet { - public static readonly TYPE = Symbol("ClientboundEncryptionRequestPacket"); +export class ClientboundAuthRequestPacket implements Packet { + public static readonly TYPE = Symbol("ClientboundAuthRequestPacket"); - public readonly type = ClientboundEncryptionRequestPacket.TYPE; + public readonly type = ClientboundAuthRequestPacket.TYPE; - public constructor( - public readonly publicKey: Buffer, - public readonly verifyToken: Buffer, - ) {} + public constructor(public readonly serverSecret: Buffer) {} public encode(writer: BufferWriter) { - writer.writeBufWithLen(this.publicKey); - writer.writeBufWithLen(this.verifyToken); + writer.writeBufWithLen(this.serverSecret); } } -export class ServerboundEncryptionResponsePacket implements Packet { - public static readonly TYPE = Symbol("ServerboundEncryptionResponsePacket"); +export class ServerboundAuthResponsePacket implements Packet { + public static readonly TYPE = Symbol("ServerboundAuthResponsePacket"); - public readonly type = ServerboundEncryptionResponsePacket.TYPE; + public readonly type = ServerboundAuthResponsePacket.TYPE; - public constructor( - public readonly sharedSecret: Buffer, - public readonly verifyToken: Buffer, - ) {} + public constructor(public readonly clientSecret: Buffer) {} - public static decode( - reader: BufferReader, - ): ServerboundEncryptionResponsePacket { - return new ServerboundEncryptionResponsePacket( - reader.readBufWithLen(), - reader.readBufWithLen(), - ); + public static decode(reader: BufferReader): ServerboundAuthResponsePacket { + return new ServerboundAuthResponsePacket(reader.readBufWithLen()); } } +export class ClientboundWelcomePacket implements Packet { + public static readonly TYPE = Symbol("Welcome"); + + public readonly type = ClientboundWelcomePacket.TYPE; + + public encode(writer: BufferWriter) {} +} + export class ClientboundRegionTimestampsPacket implements Packet { public static readonly TYPE = Symbol("ClientboundRegionTimestampsPacket"); diff --git a/server/src/net/protocol.ts b/server/src/net/protocol.ts index 50f91a17..9de378ee 100644 --- a/server/src/net/protocol.ts +++ b/server/src/net/protocol.ts @@ -1,43 +1,46 @@ import { BufferWriter, BufferReader } from "./buffers.ts"; import { ChunkTilePacket, - ClientboundEncryptionRequestPacket, + ClientboundAuthRequestPacket, ClientboundRegionTimestampsPacket, ServerboundChunkTimestampsRequestPacket, - ServerboundEncryptionResponsePacket, + ServerboundAuthResponsePacket, ServerboundHandshakePacket, ClientboundChunkTimestampsResponsePacket, ServerboundCatchupRequestPacket, + ClientboundWelcomePacket, } from "./packets.ts"; export type ClientPacket = | ChunkTilePacket - | ServerboundEncryptionResponsePacket + | ServerboundAuthResponsePacket | ServerboundHandshakePacket | ServerboundCatchupRequestPacket | ServerboundChunkTimestampsRequestPacket; export type ServerPacket = | ChunkTilePacket - | ClientboundEncryptionRequestPacket + | ClientboundAuthRequestPacket | ClientboundChunkTimestampsResponsePacket - | ClientboundRegionTimestampsPacket; + | ClientboundRegionTimestampsPacket + | ClientboundWelcomePacket; export const packetIds = [ "ERROR:pkt0", ServerboundHandshakePacket.TYPE, - ClientboundEncryptionRequestPacket.TYPE, - ServerboundEncryptionResponsePacket.TYPE, + ClientboundAuthRequestPacket.TYPE, + ServerboundAuthResponsePacket.TYPE, ChunkTilePacket.TYPE, ClientboundChunkTimestampsResponsePacket.TYPE, ServerboundCatchupRequestPacket.TYPE, ClientboundRegionTimestampsPacket.TYPE, ServerboundChunkTimestampsRequestPacket.TYPE, + ClientboundWelcomePacket.TYPE, ]; export function getPacketId(type: ServerPacket["type"]) { const id = packetIds.indexOf(type); - if (id === -1) throw new Error(`Unknown packet type ${type.toString()}`); + if (id <= 0) throw new Error(`Unknown packet type ${type.toString()}`); return id; } @@ -48,8 +51,8 @@ export function decodePacket(reader: BufferReader): ClientPacket { return ChunkTilePacket.decode(reader); case ServerboundHandshakePacket.TYPE: return ServerboundHandshakePacket.decode(reader); - case ServerboundEncryptionResponsePacket.TYPE: - return ServerboundEncryptionResponsePacket.decode(reader); + case ServerboundAuthResponsePacket.TYPE: + return ServerboundAuthResponsePacket.decode(reader); case ServerboundCatchupRequestPacket.TYPE: return ServerboundCatchupRequestPacket.decode(reader); case ServerboundChunkTimestampsRequestPacket.TYPE: @@ -59,24 +62,32 @@ export function decodePacket(reader: BufferReader): ClientPacket { } } -export function encodePacket(pkt: ServerPacket, writer: BufferWriter): void { - writer.writeUnt8(getPacketId(pkt.type)); - switch (pkt.type) { +export function encodePacket(packet: ServerPacket, writer: BufferWriter): void { + writer.writeUnt8(getPacketId(packet.type)); + switch (packet.type) { case ChunkTilePacket.TYPE: - return (pkt as ChunkTilePacket).encode(writer); + return (packet as ChunkTilePacket).encode(writer); case ClientboundChunkTimestampsResponsePacket.TYPE: - return (pkt as ClientboundChunkTimestampsResponsePacket).encode( + return (packet as ClientboundChunkTimestampsResponsePacket).encode( writer, ); - case ClientboundEncryptionRequestPacket.TYPE: - return (pkt as ClientboundEncryptionRequestPacket).encode(writer); + case ClientboundAuthRequestPacket.TYPE: + return (packet as ClientboundAuthRequestPacket).encode(writer); + case ClientboundWelcomePacket.TYPE: + return (packet as ClientboundWelcomePacket).encode(writer); case ClientboundRegionTimestampsPacket.TYPE: - return (pkt as ClientboundRegionTimestampsPacket).encode(writer); + return (packet as ClientboundRegionTimestampsPacket).encode(writer); default: - throw new Error(`Unknown packet type ${(pkt as any).type}`); + throw new Error(`Unknown packet type ${(packet as any).type}`); } } +export function encodePacketToBytes(packet: ServerPacket): Buffer { + const writer = new BufferWriter(); + encodePacket(packet, writer); + return writer.getBuffer(); +} + export class UnexpectedPacket extends Error { public constructor(message?: string) { super(message); diff --git a/server/src/net/server.ts b/server/src/net/server.ts index 753d71a2..e21df3c2 100644 --- a/server/src/net/server.ts +++ b/server/src/net/server.ts @@ -1,26 +1,24 @@ -import { listen, type Socket, type TCPSocketListener } from "bun"; +import { serve, type Server, type ServerWebSocket } from "bun"; -import node_crypto from "node:crypto"; - -import { exists, INT32_SIZE } from "../lang.ts"; +import { exists } from "../lang.ts"; import { type ClientPacket, decodePacket, - encodePacket, + encodePacketToBytes, type ServerPacket, UnexpectedPacket, } from "./protocol.ts"; -import { BufferReader, BufferWriter } from "./buffers.ts"; +import { BufferReader } from "./buffers.ts"; import { ChunkTilePacket, ServerboundCatchupRequestPacket, ServerboundChunkTimestampsRequestPacket, - ServerboundEncryptionResponsePacket, + ServerboundAuthResponsePacket, ServerboundHandshakePacket, } from "./packets.ts"; import { handleConnected, - handleEncryptionResponse, + handleAuthResponse, handleHandshake, } from "./auth.ts"; @@ -38,8 +36,8 @@ export interface ProtocolHandler { } export class TcpServer { - public readonly server: TCPSocketListener; - public readonly clients: Record = {}; + public readonly server: Server; + public readonly clients = new Map(); public constructor( host: string, @@ -47,37 +45,76 @@ export class TcpServer { public readonly handlers: ProtocolHandler, ) { const self = this; - this.server = listen({ + + this.server = serve({ hostname: host, port: port, - socket: { - binaryType: "buffer", + async fetch(req, server) { + const url = URL.parse(req.url); + if (url === null) { + return new Response(null, { + status: 400, + }); + } + if (url.pathname !== "/") { + return new Response(null, { + status: 404, + }); + } + if (!server.upgrade(req)) { + return new Response(null, { + status: 426, + }); + } + // Bun automatically returns a 101 Switching Protocols + return undefined; + }, + websocket: { async open(socket) { const client = new TcpClient(socket, self.handlers); - self.clients[client.id] = socket.data = client; + self.clients.set(client.id, (socket.data = client)); await handleConnected(client); await self.handlers.handleClientConnected(client); + client.log("Connected"); }, async close(socket, err) { const client: TcpClient = socket.data; - delete self.clients[client.id]; + self.clients.delete(client.id); if (exists(err)) { client.warn(`Closed due to an error!`, err); } await self.handlers.handleClientDisconnected(client); + client.log("Disconnected"); }, - async data(socket, data) { + async message(socket, message) { const client: TcpClient = socket.data; - await client.handleReceivedData(data); + if (typeof message === "string") { + socket.close(1003, "String messages are not supported"); + return; + } + try { + const reader = new BufferReader(message); + const packet = decodePacket(reader); + const remainder = reader.remainder; + if (remainder > 0) { + throw new Error( + `Packet did not consume all data! Remainder: [${remainder}]`, + ); + } + await client.handlePacketReceived(packet); + } catch (err) { + client.warn(err); + client.kick("Error in packet handler"); + return; + } }, }, }); - console.log("[TcpServer] Listening on", host, port); + console.log("[WsServer] Listening on", host, port); } } let nextClientId = 1; -const MAX_FRAME_SIZE = 2 ** 15; /** Prefixes packets with their length (UInt32BE); * handles Mojang authentication */ @@ -86,70 +123,18 @@ export class TcpClient { /** contains mojang name once logged in */ public name = "Client" + this.id; - public claimedMojangUsername: string | null = null; public gameAddress: string | null = null; public dimension: string | null = null; /** sent by client during handshake */ public auth: any; - public ciphers: { - encipher: node_crypto.Cipheriv; - decipher: node_crypto.Decipheriv; - } | null = null; public constructor( - private socket: Socket, + private socket: ServerWebSocket, public handlers: ProtocolHandler, - ) { - this.log("Connected from", socket.remoteAddress); - } - - static readonly #EMPTY_BUFFER = Buffer.allocUnsafe(0); - #receivedBuffer: Buffer = TcpClient.#EMPTY_BUFFER; - public async handleReceivedData(data: Buffer) { - if (exists(this.ciphers)) { - data = this.ciphers.decipher.update(data); - } + ) {} - // creating a new buffer every time is fine in our case, because we expect most frames to be large - this.#receivedBuffer = Buffer.concat([this.#receivedBuffer, data]); - - // we may receive multiple frames in one call - while (true) { - if (this.#receivedBuffer.byteLength <= INT32_SIZE) return; // wait for more data - const frameSize = this.#receivedBuffer.readUInt32BE(); - - // prevent Out of Memory - if (frameSize > MAX_FRAME_SIZE) { - return this.kick( - "Frame too large: " + - frameSize + - " have " + - this.#receivedBuffer.byteLength, - ); - } - - if (this.#receivedBuffer.byteLength < INT32_SIZE + frameSize) - return; // wait for more data - - const frameReader = new BufferReader( - this.#receivedBuffer.subarray(INT32_SIZE), - ); - const packetBuffer = frameReader.readBufLen(frameSize); - this.#receivedBuffer = frameReader.readRemainder(); - - try { - const packet = decodePacket(new BufferReader(packetBuffer)); - await this.handlePacketReceived(packet); - } catch (err) { - this.warn(err); - this.kick("Error in packet handler"); - return; - } - } - } - - private async handlePacketReceived(packet: ClientPacket) { + async handlePacketReceived(packet: ClientPacket) { this.debug("Received packet: " + packet.type.toString()); switch (packet.type) { case ServerboundHandshakePacket.TYPE: @@ -158,10 +143,10 @@ export class TcpClient { packet as ServerboundHandshakePacket, ); return; - case ServerboundEncryptionResponsePacket.TYPE: - await handleEncryptionResponse( + case ServerboundAuthResponsePacket.TYPE: + await handleAuthResponse( this, - packet as ServerboundEncryptionResponsePacket, + packet as ServerboundAuthResponsePacket, ); return; case ServerboundChunkTimestampsRequestPacket.TYPE: @@ -176,22 +161,16 @@ export class TcpClient { public kick(internalReason: string) { this.log(`Kicking:`, internalReason); - this.socket.end(); + this.socket.close(); } public async send(packet: ServerPacket) { - const writer = new BufferWriter(); - writer.writeUnt32(0); // Placeholder for frame length, will write later - encodePacket(packet, writer); - - let buffer = writer.getBuffer(); - buffer.writeUInt32BE(buffer.byteLength - INT32_SIZE, 0); - - if (exists(this.ciphers)) { - buffer = this.ciphers.encipher.update(buffer); - } + await this.sendRaw(packet.type, encodePacketToBytes(packet)); + } - this.socket.write(buffer); + public async sendRaw(type: Symbol, raw: Buffer) { + this.debug("Sending packet: " + type.toString()); + this.socket.sendBinary(raw); } public debug(...args: any[]) { From b0f377f5133993997a7cc2c0501df5c4adb3b8d9 Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 11 Jun 2025 06:03:35 +0100 Subject: [PATCH 18/27] Drastically improve database performance The fact that `getChunkTimestamps` would create an entire disposable shadow-table is wonder to behold. Also fixed a bug caused by my naivety Bun's sqlite type could just be cast to kysely's since they're both based off of better-sqlite3. Given that there weren't any errors, my guess is that additional parameters were just being silently swallowed? There's currently no migration for pre-existing MapSync databases. --- server/bun.lock | 7 ++- server/package.json | 3 +- server/src/database.ts | 106 ++++++++++++++++++++--------------------- server/src/main.ts | 91 +++++++++++++++++++---------------- 4 files changed, 110 insertions(+), 97 deletions(-) diff --git a/server/bun.lock b/server/bun.lock index 45e719e0..664dd14b 100644 --- a/server/bun.lock +++ b/server/bun.lock @@ -5,7 +5,8 @@ "name": "civmap-server", "dependencies": { "async-mutex": "^0.4.0", - "kysely": "^0.26.1", + "kysely": "^0.28.2", + "kysely-bun-sqlite": "^0.4.0", "zod": "^3.25.57", }, "devDependencies": { @@ -24,7 +25,9 @@ "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], - "kysely": ["kysely@0.26.3", "", {}, "sha512-yWSgGi9bY13b/W06DD2OCDDHQmq1kwTGYlQ4wpZkMOJqMGCstVCFIvxCCVG4KfY1/3G0MhDAcZsip/Lw8/vJWw=="], + "kysely": ["kysely@0.28.2", "", {}, "sha512-4YAVLoF0Sf0UTqlhgQMFU9iQECdah7n+13ANkiuVfRvlK+uI0Etbgd7bVP36dKlG+NXWbhGua8vnGt+sdhvT7A=="], + + "kysely-bun-sqlite": ["kysely-bun-sqlite@0.4.0", "", { "dependencies": { "bun-types": "^1.1.31" }, "peerDependencies": { "kysely": "^0.28.2" } }, "sha512-2EkQE5sT4ewiw7IWfJsAkpxJ/QPVKXKO5sRYI/xjjJIJlECuOdtG+ssYM0twZJySrdrmuildNPFYVreyu1EdZg=="], "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], diff --git a/server/package.json b/server/package.json index 6051c376..ddcd5a88 100644 --- a/server/package.json +++ b/server/package.json @@ -16,7 +16,8 @@ }, "dependencies": { "async-mutex": "^0.4.0", - "kysely": "^0.26.1", + "kysely": "^0.28.2", + "kysely-bun-sqlite": "^0.4.0", "zod": "^3.25.57" }, "devDependencies": { diff --git a/server/src/database.ts b/server/src/database.ts index 80bab862..52dbc555 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -1,5 +1,8 @@ -import * as kysely from "kysely"; import { Database as BunSqliteDatabase } from "bun:sqlite"; + +import * as kysely from "kysely"; +import { BunSqliteDialect } from "kysely-bun-sqlite"; + import { DATA_FOLDER } from "./metadata.ts"; import { type Pos2D } from "./model.ts"; @@ -15,6 +18,8 @@ export interface Database { world: string; chunk_x: number; chunk_z: number; + region_x: kysely.Generated; + region_z: kysely.Generated; uuid: string; ts: number; hash: Buffer; @@ -22,22 +27,17 @@ export interface Database { } export function get() { - if (!database) { - database = new kysely.Kysely({ - dialect: new kysely.SqliteDialect({ - database: async () => { - return new BunSqliteDatabase( - Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, - { - create: true, - readwrite: true, - }, - ) as unknown as kysely.SqliteDatabase; + return (database ??= new kysely.Kysely({ + dialect: new BunSqliteDialect({ + database: new BunSqliteDatabase( + Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, + { + create: true, + readwrite: true, }, - }), - }); - } - return database; + ), + }), + })); } export async function setup() { @@ -54,6 +54,16 @@ export async function setup() { .addColumn("world", "text", (col) => col.notNull()) .addColumn("chunk_x", "integer", (col) => col.notNull()) .addColumn("chunk_z", "integer", (col) => col.notNull()) + .addColumn("region_x", "integer", (col) => + col + .generatedAlwaysAs(kysely.sql`floor(chunk_x / 32.0)`) + .notNull(), + ) + .addColumn("region_z", "integer", (col) => + col + .generatedAlwaysAs(kysely.sql`floor(chunk_z / 32.0)`) + .notNull(), + ) .addColumn("uuid", "text", (col) => col.notNull()) .addColumn("ts", "bigint", (col) => col.notNull()) .addColumn("hash", "blob", (col) => col.notNull()) @@ -77,24 +87,17 @@ export async function setup() { * Converts the entire database of player chunks into regions, with each region * having the highest (aka newest) timestamp. */ -export function getRegionTimestamps(dimension: string) { - // computing region coordinates in SQL requires truncating, not rounding - return get() +export async function getRegionTimestamps(dimension: string) { + return await get() .selectFrom("player_chunk") .select([ - (eb) => - kysely.sql`floor(${eb.ref("chunk_x")} / 32.0)`.as( - "regionX", - ), - (eb) => - kysely.sql`floor(${eb.ref("chunk_z")} / 32.0)`.as( - "regionZ", - ), + "region_x as regionX", + "region_z as regionZ", (eb) => eb.fn.max("ts").as("timestamp"), ]) .where("world", "=", dimension) .groupBy(["regionX", "regionZ"]) - .orderBy("regionX", "desc") + .orderBy("timestamp", "asc") .execute(); } @@ -102,31 +105,25 @@ export function getRegionTimestamps(dimension: string) { * Converts an array of region coords into an array of timestamped chunk coords. */ export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { - return get() - .with("regions", (db) => - db - .selectFrom("player_chunk") - .select([ - (eb) => - kysely.sql`(cast(floor(${eb.ref( - "chunk_x", - )} / 32.0) as int) || '_' || cast(floor(${eb.ref( - "chunk_z", - )} / 32.0) as int))`.as("region"), - "chunk_x as x", - "chunk_z as z", - (eb) => eb.fn.max("ts").as("timestamp"), - ]) - .where("world", "=", dimension) - .groupBy(["x", "z"]), - ) - .selectFrom("regions") - .select(["x as chunkX", "z as chunkZ", "timestamp"]) - .where( - "region", - "in", - regions.map((region) => region.x + "_" + region.z), + return await get() + .selectFrom("player_chunk") + .select([ + "chunk_x as chunkX", + "chunk_z as chunkZ", + (eb) => eb.fn.max("ts").as("timestamp"), + ]) + .where((eb) => + eb.or( + regions.map((region) => + eb.and([ + eb("region_x", "=", region.x), + eb("region_z", "=", region.z), + ]), + ), + ), ) + .where("world", "=", dimension) + .groupBy(["chunkX", "chunkZ"]) .orderBy("timestamp", "desc") .execute(); } @@ -142,7 +139,7 @@ export async function getChunkData( chunkX: number, chunkZ: number, ) { - return get() + return await get() .selectFrom("player_chunk") .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") .select([ @@ -202,7 +199,7 @@ export async function getRegionChunks( maxChunkX = minChunkX + 16; const minChunkZ = regionZ << 4, maxChunkZ = minChunkZ + 16; - return get() + return await get() .selectFrom("player_chunk") .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") .select([ @@ -217,6 +214,7 @@ export async function getRegionChunks( .where("player_chunk.chunk_x", "<", maxChunkX) .where("player_chunk.chunk_z", ">=", minChunkZ) .where("player_chunk.chunk_z", "<", maxChunkZ) + .groupBy(["chunk_x", "chunk_z", "version", "data"]) .orderBy("player_chunk.ts", "desc") .execute(); } diff --git a/server/src/main.ts b/server/src/main.ts index 270d6e9a..f23a335f 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -92,39 +92,40 @@ Promise.resolve().then(async () => { } } - private async handleChunkTilePacket( + private async handleRegionCatchupPacket( client: TcpClient, - packet: ChunkTilePacket, + packet: ServerboundChunkTimestampsRequestPacket, ) { requireAuth(client); - // TODO ignore if same chunk hash exists in db - - if (client.auth instanceof OnlineAuth) { - await database - .storeChunkData( - packet.dimension, - packet.chunkX, - packet.chunkZ, - client.auth.uuid, - packet.timestamp, - packet.version, - packet.hash, - packet.data, - ) - .catch(client.warn); + if (packet.regions.length < 1) { + client.warn( + "Client requested chunk-timestamps without specifying any regions.", + ); + return; } - // TODO small timeout, then skip if other client already has it - const packetRaw = encodePacketToBytes(packet); - await Promise.allSettled( - server.clients - .values() - .filter((other) => other !== client && isAuthed(other)) - .map((other) => other.sendRaw(packet.type, packetRaw)), + const chunks = await database.getChunkTimestamps( + packet.dimension, + packet.regions.map((region) => ({ + x: region.regionX, + z: region.regionZ, + })), ); - // TODO queue tile render for web map + if (chunks.length < 1) { + client.warn( + `Client's request chunk-timestamps for [${packet.regions.length}] regions has no results.`, + ); + return; + } + + await client.send( + new ClientboundChunkTimestampsResponsePacket( + packet.dimension, + chunks, + ), + ); } private async handleCatchupRequest( @@ -167,27 +168,37 @@ Promise.resolve().then(async () => { } } - private async handleRegionCatchupPacket( + private async handleChunkTilePacket( client: TcpClient, - packet: ServerboundChunkTimestampsRequestPacket, + packet: ChunkTilePacket, ) { requireAuth(client); - const chunks = await database.getChunkTimestamps( - packet.dimension, - packet.regions.map((region) => ({ - x: region.regionX, - z: region.regionZ, - })), - ); - if (chunks.length > 0) { - await client.send( - new ClientboundChunkTimestampsResponsePacket( + if (client.auth instanceof OnlineAuth) { + await database + .storeChunkData( packet.dimension, - chunks, - ), - ); + packet.chunkX, + packet.chunkZ, + client.auth.uuid, + packet.timestamp, + packet.version, + packet.hash, + packet.data, + ) + .catch(client.warn); } + + // TODO small timeout, then skip if other client already has it + const packetRaw = encodePacketToBytes(packet); + await Promise.allSettled( + server.clients + .values() + .filter((other) => other !== client && isAuthed(other)) + .map((other) => other.sendRaw(packet.type, packetRaw)), + ); + + // TODO queue tile render for web map } })(), ); From 1810c8778092fc98b8f93b7a2edc8d9e55a49b52 Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 11 Jun 2025 17:11:17 +0100 Subject: [PATCH 19/27] Set max frame/payload length to max u16 value --- .../gjum/minecraft/mapsync/common/net/SyncClient.java | 11 ++++++++++- .../minecraft/mapsync/common/utils/MagicValues.java | 4 ++++ server/src/constants.ts | 4 ++++ server/src/net/server.ts | 9 +++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java index 350f065f..5e380a87 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/net/SyncClient.java @@ -14,6 +14,7 @@ import gjum.minecraft.mapsync.common.net.packet.ServerboundAuthResponsePacket; import gjum.minecraft.mapsync.common.net.packet.ServerboundHandshakePacket; import gjum.minecraft.mapsync.common.utils.Hasher; +import gjum.minecraft.mapsync.common.utils.MagicValues; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import java.nio.ByteBuffer; @@ -29,6 +30,7 @@ import net.minecraft.world.level.ChunkPos; import org.apache.commons.lang3.StringUtils; import org.java_websocket.client.WebSocketClient; +import org.java_websocket.drafts.Draft_6455; import org.java_websocket.enums.ReadyState; import org.java_websocket.handshake.ServerHandshake; import org.jetbrains.annotations.NotNull; @@ -105,7 +107,14 @@ private class SyncConnection extends WebSocketClient { public SyncConnection( final @NotNull SyncAddress serverUri ) { - super(serverUri.address()); + super( + serverUri.address(), + new Draft_6455( + List.of(), // plugins + List.of(), // protocols + MagicValues.MAX_WS_FRAME_SIZE + ) + ); } @Override public void onOpen( diff --git a/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java b/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java index d365ba6a..66109e38 100644 --- a/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java +++ b/mod/common/src/main/java/gjum/minecraft/mapsync/common/utils/MagicValues.java @@ -4,4 +4,8 @@ public final class MagicValues { // SHA1 produces 160-bit (20-byte) hashes // https://en.wikipedia.org/wiki/SHA-1 public static final int SHA1_HASH_LENGTH = 20; + + // Sets the maximum frame length as the maximum 16-bit unsigned int value + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + public static final int MAX_WS_FRAME_SIZE = (1 << 16) - 1; } diff --git a/server/src/constants.ts b/server/src/constants.ts index 6fc5aab6..1b872b93 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -9,3 +9,7 @@ export const SHA1_HASH_LENGTH = 20; export const UUID_REGEX = /^(........)-?(....)-?(....)-?(....)-?(............)$/; + +// Sets the maximum frame length as the maximum 16-bit unsigned int value +// https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 +export const MAX_WS_FRAME_LENGTH = (1 << 16) - 1; diff --git a/server/src/net/server.ts b/server/src/net/server.ts index e21df3c2..3d7f623d 100644 --- a/server/src/net/server.ts +++ b/server/src/net/server.ts @@ -21,6 +21,7 @@ import { handleAuthResponse, handleHandshake, } from "./auth.ts"; +import { MAX_WS_FRAME_LENGTH } from "../constants.ts"; export interface ProtocolHandler { handleClientConnected(client: TcpClient): Promise; @@ -70,6 +71,14 @@ export class TcpServer { return undefined; }, websocket: { + maxPayloadLength: MAX_WS_FRAME_LENGTH, + // Allow 20 full frames of data of backpressure. Keep in mind + // that this is still >12x less than the default backpressure + // of 16MB. + backpressureLimit: MAX_WS_FRAME_LENGTH * 20, + closeOnBackpressureLimit: true, + idleTimeout: 60, // 60 seconds + async open(socket) { const client = new TcpClient(socket, self.handlers); self.clients.set(client.id, (socket.data = client)); From e7e5700985d5c5d674c95d0da2c7e0593337177f Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 11 Jun 2025 20:32:46 +0100 Subject: [PATCH 20/27] Turns out where-in is WAY more efficient than where-and-or-chaining Tested this out and where-in took around 2.9 seconds using an example 4.3GB database, whereas the where-and-or-chain version took ***44.2*** seconds. --- server/src/database.ts | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/server/src/database.ts b/server/src/database.ts index 52dbc555..44a5438e 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -20,6 +20,7 @@ export interface Database { chunk_z: number; region_x: kysely.Generated; region_z: kysely.Generated; + region_coord: kysely.Generated; uuid: string; ts: number; hash: Buffer; @@ -64,6 +65,11 @@ export async function setup() { .generatedAlwaysAs(kysely.sql`floor(chunk_z / 32.0)`) .notNull(), ) + .addColumn("region_coord", "text", (col) => { + return col + .generatedAlwaysAs(kysely.sql`cast(floor(chunk_x / 32.0) as int) || '_' || cast(floor(chunk_z / 32.0) as int)`) + .notNull(); + }) .addColumn("uuid", "text", (col) => col.notNull()) .addColumn("ts", "bigint", (col) => col.notNull()) .addColumn("hash", "blob", (col) => col.notNull()) @@ -112,15 +118,10 @@ export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { "chunk_z as chunkZ", (eb) => eb.fn.max("ts").as("timestamp"), ]) - .where((eb) => - eb.or( - regions.map((region) => - eb.and([ - eb("region_x", "=", region.x), - eb("region_z", "=", region.z), - ]), - ), - ), + .where( + "region_coord", + "in", + regions.map((region) => region.x + "_" + region.z), ) .where("world", "=", dimension) .groupBy(["chunkX", "chunkZ"]) From 26bae6bb85b975b3d51b107eebaed31a6eadc838 Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 11 Jun 2025 23:28:41 +0100 Subject: [PATCH 21/27] Fix Zod dependency error This came from removing zod-validation-error as a dependency in d429342bb710e84c9ea316f55189413157c644b7 since Zod v4 provides its own error pretty-printer. But removing it apparently didn't remove the files from my node_modules. Alas. --- server/src/metadata.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/server/src/metadata.ts b/server/src/metadata.ts index 16e5d23e..f0d40dbf 100644 --- a/server/src/metadata.ts +++ b/server/src/metadata.ts @@ -1,8 +1,7 @@ import node_fs from "node:fs"; import node_path from "node:path"; import { Mutex } from "async-mutex"; -import * as z from "zod"; -import { fromZodError } from "zod-validation-error"; +import z, { prettifyError } from "zod/v4"; import { Errors, type JSONValue, parseJson } from "./lang.ts"; export const DATA_FOLDER = process.env["MAPSYNC_DATA_DIR"] ?? "./mapsync"; @@ -48,7 +47,7 @@ function parseConfigFile( return parser(parseJson(fileContents)); } catch (e) { if (e instanceof z.ZodError) { - throw "Could not parse " + file + ": " + fromZodError(e); + throw "Could not parse " + file + ": " + prettifyError(e); } throw e; } @@ -124,7 +123,7 @@ export async function saveWhitelist() { const UUID_CACHE_FILE = "uuid_cache.json"; const UUID_CACHE_MUTEX = new Mutex(); -const UUID_CACHE_SCHEMA = z.record(z.string().uuid()); +const UUID_CACHE_SCHEMA = z.record(z.string(), z.uuid()); // IGN UUID const uuid_cache = new Map(); From 632bca36d490b0bada3fdf6ad94034a49cc8f802 Mon Sep 17 00:00:00 2001 From: Alexander Date: Wed, 11 Jun 2025 23:45:50 +0100 Subject: [PATCH 22/27] Add gen prefixes to columns and add transactions Added the "gen_" prefix to generated columns to convey that these are basically readonly values. Also, being able to make generated columns from generated columns is amazing. And switching setup() and storeChunkData() to transactions. --- server/src/database.ts | 160 +++++++++++++++++++++-------------------- 1 file changed, 82 insertions(+), 78 deletions(-) diff --git a/server/src/database.ts b/server/src/database.ts index 44a5438e..4eb0eefd 100644 --- a/server/src/database.ts +++ b/server/src/database.ts @@ -18,9 +18,9 @@ export interface Database { world: string; chunk_x: number; chunk_z: number; - region_x: kysely.Generated; - region_z: kysely.Generated; - region_coord: kysely.Generated; + gen_region_x: kysely.Generated; + gen_region_z: kysely.Generated; + gen_region_coord: kysely.Generated; uuid: string; ts: number; hash: Buffer; @@ -43,62 +43,71 @@ export function get() { export async function setup() { await get() - .schema.createTable("chunk_data") - .ifNotExists() - .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) - .addColumn("version", "integer", (col) => col.notNull()) - .addColumn("data", "blob", (col) => col.notNull()) - .execute(); - await get() - .schema.createTable("player_chunk") - .ifNotExists() - .addColumn("world", "text", (col) => col.notNull()) - .addColumn("chunk_x", "integer", (col) => col.notNull()) - .addColumn("chunk_z", "integer", (col) => col.notNull()) - .addColumn("region_x", "integer", (col) => - col - .generatedAlwaysAs(kysely.sql`floor(chunk_x / 32.0)`) - .notNull(), - ) - .addColumn("region_z", "integer", (col) => - col - .generatedAlwaysAs(kysely.sql`floor(chunk_z / 32.0)`) - .notNull(), - ) - .addColumn("region_coord", "text", (col) => { - return col - .generatedAlwaysAs(kysely.sql`cast(floor(chunk_x / 32.0) as int) || '_' || cast(floor(chunk_z / 32.0) as int)`) - .notNull(); - }) - .addColumn("uuid", "text", (col) => col.notNull()) - .addColumn("ts", "bigint", (col) => col.notNull()) - .addColumn("hash", "blob", (col) => col.notNull()) - .addPrimaryKeyConstraint("PK_coords_and_player", [ - "world", - "chunk_x", - "chunk_z", - "uuid", - ]) - .addForeignKeyConstraint( - "FK_chunk_ref", - ["hash"], - "chunk_data", - ["hash"], - (fk) => fk.onUpdate("no action").onDelete("no action"), - ) - .execute(); + .transaction() + .execute(async (db) => { + await db.schema + .createTable("chunk_data") + .ifNotExists() + .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) + .addColumn("version", "integer", (col) => col.notNull()) + .addColumn("data", "blob", (col) => col.notNull()) + .execute(); + await db.schema + .createTable("player_chunk") + .ifNotExists() + .addColumn("world", "text", (col) => col.notNull()) + .addColumn("chunk_x", "integer", (col) => col.notNull()) + .addColumn("chunk_z", "integer", (col) => col.notNull()) + .addColumn("gen_region_x", "integer", (col) => + col + .generatedAlwaysAs( + kysely.sql`floor(chunk_x / 32.0)`, + ) + .notNull(), + ) + .addColumn("gen_region_z", "integer", (col) => + col + .generatedAlwaysAs( + kysely.sql`floor(chunk_z / 32.0)`, + ) + .notNull(), + ) + .addColumn("gen_region_coord", "text", (col) => { + return col + .generatedAlwaysAs( + kysely.sql`gen_region_x || '_' || gen_region_z`, + ) + .notNull(); + }) + .addColumn("uuid", "text", (col) => col.notNull()) + .addColumn("ts", "bigint", (col) => col.notNull()) + .addColumn("hash", "blob", (col) => col.notNull()) + .addPrimaryKeyConstraint("PK_coords_and_player", [ + "world", + "chunk_x", + "chunk_z", + "uuid", + ]) + .addForeignKeyConstraint( + "FK_chunk_ref", + ["hash"], + "chunk_data", + ["hash"], + (fk) => fk.onUpdate("no action").onDelete("no action"), + ) + .execute(); + }); } /** - * Converts the entire database of player chunks into regions, with each region - * having the highest (aka newest) timestamp. + * Gets the timestamps for ALL regions stored. */ export async function getRegionTimestamps(dimension: string) { return await get() .selectFrom("player_chunk") .select([ - "region_x as regionX", - "region_z as regionZ", + "gen_region_x as regionX", + "gen_region_z as regionZ", (eb) => eb.fn.max("ts").as("timestamp"), ]) .where("world", "=", dimension) @@ -107,9 +116,6 @@ export async function getRegionTimestamps(dimension: string) { .execute(); } -/** - * Converts an array of region coords into an array of timestamped chunk coords. - */ export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { return await get() .selectFrom("player_chunk") @@ -119,7 +125,7 @@ export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { (eb) => eb.fn.max("ts").as("timestamp"), ]) .where( - "region_coord", + "gen_region_coord", "in", regions.map((region) => region.x + "_" + region.z), ) @@ -171,21 +177,25 @@ export async function storeChunkData( data: Buffer, ) { await get() - .insertInto("chunk_data") - .values({ hash, version, data }) - .onConflict((oc) => oc.column("hash").doNothing()) - .execute(); - await get() - .replaceInto("player_chunk") - .values({ - world: dimension, - chunk_x: chunkX, - chunk_z: chunkZ, - uuid, - ts: timestamp, - hash, - }) - .execute(); + .transaction() + .execute(async (db) => { + await db + .insertInto("chunk_data") + .values({ hash, version, data }) + .onConflict((oc) => oc.column("hash").doNothing()) + .execute(); + await db + .replaceInto("player_chunk") + .values({ + world: dimension, + chunk_x: chunkX, + chunk_z: chunkZ, + uuid, + ts: timestamp, + hash, + }) + .execute(); + }); } /** @@ -196,10 +206,6 @@ export async function getRegionChunks( regionX: number, regionZ: number, ) { - const minChunkX = regionX << 4, - maxChunkX = minChunkX + 16; - const minChunkZ = regionZ << 4, - maxChunkZ = minChunkZ + 16; return await get() .selectFrom("player_chunk") .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") @@ -211,10 +217,8 @@ export async function getRegionChunks( "chunk_data.data as data", ]) .where("player_chunk.world", "=", dimension) - .where("player_chunk.chunk_x", ">=", minChunkX) - .where("player_chunk.chunk_x", "<", maxChunkX) - .where("player_chunk.chunk_z", ">=", minChunkZ) - .where("player_chunk.chunk_z", "<", maxChunkZ) + .where("player_chunk.gen_region_x", "=", regionX) + .where("player_chunk.gen_region_z", "=", regionZ) .groupBy(["chunk_x", "chunk_z", "version", "data"]) .orderBy("player_chunk.ts", "desc") .execute(); From 003df4707399519b9945c71d3d872adb8947ad29 Mon Sep 17 00:00:00 2001 From: Alexander Date: Thu, 12 Jun 2025 00:40:00 +0100 Subject: [PATCH 23/27] Add database migrations This'll mean that any existing MapSync database (like the example 4.6GB database) will be migrated to have generated columns. --- server/src/Renderer.ts | 6 +- server/src/{ => db}/database.ts | 99 ++++++++------------------- server/src/db/migrations.ts | 117 ++++++++++++++++++++++++++++++++ server/src/main.ts | 20 +++++- 4 files changed, 167 insertions(+), 75 deletions(-) rename server/src/{ => db}/database.ts (58%) create mode 100644 server/src/db/migrations.ts diff --git a/server/src/Renderer.ts b/server/src/Renderer.ts index 295126a9..9e90221b 100644 --- a/server/src/Renderer.ts +++ b/server/src/Renderer.ts @@ -1,6 +1,6 @@ import { spawn } from "node:child_process"; import { promisify } from "node:util"; -import * as database from "./database.ts"; +import * as database from "./db/database.ts"; export async function renderTile( dimension: string, @@ -25,8 +25,8 @@ export async function renderTile( const chunkHeaderBuf = Buffer.allocUnsafe(4 + 4 + 2); // reused. 32+32+16 bit for (const chunk of allChunks) { - chunkHeaderBuf.writeInt32BE(chunk.chunk_x, 0); - chunkHeaderBuf.writeInt32BE(chunk.chunk_z, 4); + chunkHeaderBuf.writeInt32BE(chunk.chunkX, 0); + chunkHeaderBuf.writeInt32BE(chunk.chunkZ, 4); chunkHeaderBuf.writeUInt16BE(chunk.version, 8); await write(chunkHeaderBuf); await write(chunk.data); diff --git a/server/src/database.ts b/server/src/db/database.ts similarity index 58% rename from server/src/database.ts rename to server/src/db/database.ts index 4eb0eefd..95b8c1f9 100644 --- a/server/src/database.ts +++ b/server/src/db/database.ts @@ -1,12 +1,13 @@ import { Database as BunSqliteDatabase } from "bun:sqlite"; -import * as kysely from "kysely"; +import { Kysely, type Generated, Migrator } from "kysely"; import { BunSqliteDialect } from "kysely-bun-sqlite"; -import { DATA_FOLDER } from "./metadata.ts"; -import { type Pos2D } from "./model.ts"; +import { DATA_FOLDER } from "../metadata.ts"; +import Migrations from "./migrations.ts"; +import { type Pos2D } from "../model.ts"; -let database: kysely.Kysely | null = null; +let database: Kysely | null = null; export interface Database { chunk_data: { @@ -18,9 +19,9 @@ export interface Database { world: string; chunk_x: number; chunk_z: number; - gen_region_x: kysely.Generated; - gen_region_z: kysely.Generated; - gen_region_coord: kysely.Generated; + gen_region_x: Generated; + gen_region_z: Generated; + gen_region_coord: Generated; uuid: string; ts: number; hash: Buffer; @@ -28,7 +29,7 @@ export interface Database { } export function get() { - return (database ??= new kysely.Kysely({ + return (database ??= new Kysely({ dialect: new BunSqliteDialect({ database: new BunSqliteDatabase( Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, @@ -41,62 +42,20 @@ export function get() { })); } +export function getMigrations(): Migrator { + return new Migrator({ + db: get(), + provider: new Migrations(), + }); +} + +/** Convenience function to migrate to latest */ export async function setup() { - await get() - .transaction() - .execute(async (db) => { - await db.schema - .createTable("chunk_data") - .ifNotExists() - .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) - .addColumn("version", "integer", (col) => col.notNull()) - .addColumn("data", "blob", (col) => col.notNull()) - .execute(); - await db.schema - .createTable("player_chunk") - .ifNotExists() - .addColumn("world", "text", (col) => col.notNull()) - .addColumn("chunk_x", "integer", (col) => col.notNull()) - .addColumn("chunk_z", "integer", (col) => col.notNull()) - .addColumn("gen_region_x", "integer", (col) => - col - .generatedAlwaysAs( - kysely.sql`floor(chunk_x / 32.0)`, - ) - .notNull(), - ) - .addColumn("gen_region_z", "integer", (col) => - col - .generatedAlwaysAs( - kysely.sql`floor(chunk_z / 32.0)`, - ) - .notNull(), - ) - .addColumn("gen_region_coord", "text", (col) => { - return col - .generatedAlwaysAs( - kysely.sql`gen_region_x || '_' || gen_region_z`, - ) - .notNull(); - }) - .addColumn("uuid", "text", (col) => col.notNull()) - .addColumn("ts", "bigint", (col) => col.notNull()) - .addColumn("hash", "blob", (col) => col.notNull()) - .addPrimaryKeyConstraint("PK_coords_and_player", [ - "world", - "chunk_x", - "chunk_z", - "uuid", - ]) - .addForeignKeyConstraint( - "FK_chunk_ref", - ["hash"], - "chunk_data", - ["hash"], - (fk) => fk.onUpdate("no action").onDelete("no action"), - ) - .execute(); - }); + const results = await getMigrations().migrateToLatest(); + if (results.error) { + throw results.error; + } + return results.results ?? []; } /** @@ -178,13 +137,13 @@ export async function storeChunkData( ) { await get() .transaction() - .execute(async (db) => { - await db + .execute(async (transaction) => { + await transaction .insertInto("chunk_data") .values({ hash, version, data }) .onConflict((oc) => oc.column("hash").doNothing()) .execute(); - await db + await transaction .replaceInto("player_chunk") .values({ world: dimension, @@ -210,8 +169,8 @@ export async function getRegionChunks( .selectFrom("player_chunk") .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") .select([ - "player_chunk.chunk_x as chunk_x", - "player_chunk.chunk_z as chunk_z", + "player_chunk.chunk_x as chunkX", + "player_chunk.chunk_z as chunkZ", (eb) => eb.fn.max("player_chunk.ts").as("timestamp"), "chunk_data.version as version", "chunk_data.data as data", @@ -219,7 +178,7 @@ export async function getRegionChunks( .where("player_chunk.world", "=", dimension) .where("player_chunk.gen_region_x", "=", regionX) .where("player_chunk.gen_region_z", "=", regionZ) - .groupBy(["chunk_x", "chunk_z", "version", "data"]) - .orderBy("player_chunk.ts", "desc") + .groupBy(["chunkX", "chunkZ", "version", "data"]) + .orderBy("timestamp", "desc") .execute(); } diff --git a/server/src/db/migrations.ts b/server/src/db/migrations.ts new file mode 100644 index 00000000..076d097b --- /dev/null +++ b/server/src/db/migrations.ts @@ -0,0 +1,117 @@ +import { Kysely, sql, type Migration, type MigrationProvider } from "kysely"; + +type MigrationRegistry = Record; +type MigrationClass = { name: string } & (new () => Migration); + +export default class Migrations implements MigrationProvider { + public async getMigrations(): Promise { + return this.generateMigrationRegistry([ + Migration_0001_InitialSetup, + Migration_0002_GenerateRegionCoordColumns, + ]); + } + + private generateMigrationRegistry( + migrations: Array, + ): MigrationRegistry { + const registry: MigrationRegistry = {}; + for (const clazz of migrations) { + registry[clazz.name] = new clazz(); + } + return registry; + } +} + +// ============================================================ +// WARNING FOR WRITING MIGRATIONS! +// +// Kysely does not respect class functions: your "up" and "down" methods MUST +// be fields, not class functions, otherwise your migration will fail! +// ============================================================ + +export class Migration_0001_InitialSetup implements Migration { + public up = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .createTable("chunk_data") + .ifNotExists() + .addColumn("hash", "blob", (col) => col.notNull().primaryKey()) + .addColumn("version", "integer", (col) => col.notNull()) + .addColumn("data", "blob", (col) => col.notNull()) + .execute(); + await transaction.schema + .createTable("player_chunk") + .ifNotExists() + .addColumn("world", "text", (col) => col.notNull()) + .addColumn("chunk_x", "integer", (col) => col.notNull()) + .addColumn("chunk_z", "integer", (col) => col.notNull()) + .addColumn("uuid", "text", (col) => col.notNull()) + .addColumn("ts", "bigint", (col) => col.notNull()) + .addColumn("hash", "blob", (col) => col.notNull()) + .addPrimaryKeyConstraint("PK_coords_and_player", [ + "world", + "chunk_x", + "chunk_z", + "uuid", + ]) + .addForeignKeyConstraint( + "FK_chunk_ref", + ["hash"], + "chunk_data", + ["hash"], + (fk) => fk.onUpdate("no action").onDelete("no action"), + ) + .execute(); + }); + }; + // Probably shouldn't define a "down" since that just means an empty db +} + +export class Migration_0002_GenerateRegionCoordColumns implements Migration { + public up = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_x", "integer", (col) => { + return col + .generatedAlwaysAs(sql`floor(chunk_x / 32.0)`) + .notNull(); + }) + .execute(); + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_z", "integer", (col) => { + return col + .generatedAlwaysAs(sql`floor(chunk_z / 32.0)`) + .notNull(); + }) + .execute(); + await transaction.schema + .alterTable("player_chunk") + .addColumn("gen_region_coord", "text", (col) => { + return col + .generatedAlwaysAs( + sql`gen_region_x || '_' || gen_region_z`, + ) + .notNull(); + }) + .execute(); + }); + }; + public down = async (db: Kysely) => { + await db.transaction().execute(async (transaction) => { + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_coord") + .execute(); + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_x") + .execute(); + await transaction.schema + .alterTable("player_chunk") + .dropColumn("gen_region_z") + .execute(); + }); + }; +} diff --git a/server/src/main.ts b/server/src/main.ts index f23a335f..f9d46903 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,5 +1,5 @@ import "./cli.ts"; -import * as database from "./database.ts"; +import * as database from "./db/database.ts"; import * as metadata from "./metadata.ts"; import { type ClientPacket, @@ -18,7 +18,23 @@ import { isAuthed, OnlineAuth, requireAuth } from "./net/auth.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { - await database.setup(); + for (const result of await database.setup()) { + switch (result.status) { + case "Success": + console.info(`Migration [${result.migrationName}] applied!`); + break; + case "Error": + console.error( + `Migration [${result.migrationName}] failed to apply!`, + ); + break; + case "NotExecuted": + console.warn( + `Migration [${result.migrationName}] was not applied!`, + ); + break; + } + } config = metadata.getConfig(); From 71ba39712d6cc982e1379968521ae64e3ea52731 Mon Sep 17 00:00:00 2001 From: Alexander Date: Sat, 14 Jun 2025 16:18:25 +0100 Subject: [PATCH 24/27] Update workflows This *should* fix the workflows. The build-* workflows no longer publish a new release, instead there's a new release workflow that builds the mod (and later the server, assuming that's wanted). Also updated the Dockerfile with direction from Husky, but some additional work is needed. --- .github/workflows/build-mod.yml | 39 ++++++++---------------------- .github/workflows/build-server.yml | 37 ++++++++++++++++++---------- .github/workflows/release.yml | 35 +++++++++++++++++++++++++++ Dockerfile | 22 ++++++++--------- server/package.json | 5 ++-- 5 files changed, 83 insertions(+), 55 deletions(-) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/build-mod.yml b/.github/workflows/build-mod.yml index ff102b50..ca4d9c51 100644 --- a/.github/workflows/build-mod.yml +++ b/.github/workflows/build-mod.yml @@ -1,4 +1,4 @@ -name: Build Mod +name: "Build Mods" on: push: @@ -7,39 +7,20 @@ on: pull_request: paths: - "mod/**/*" + workflow_call: jobs: build: - runs-on: ubuntu-latest + runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@v3 - - name: Set up JDK 17 - uses: actions/setup-java@v3 + - uses: "actions/checkout@v4" + + - name: "Setting up JDK 17" + uses: "actions/setup-java@v4" with: java-version: "17" distribution: "adopt" - - run: ./gradlew build - working-directory: ./mod - - - name: Upload Forge Build - uses: actions/upload-artifact@v3 - with: - name: Forge - path: mod/dist/*-forge.jar - - name: Upload Fabric Build - uses: actions/upload-artifact@v3 - with: - name: Fabric - path: mod/dist/*-fabric.jar - - - name: Release Tag - if: startsWith(github.ref, 'refs/tags/v') - uses: softprops/action-gh-release@v1 - with: - prerelease: true - fail_on_unmatched_files: true - files: | - mod/dist/*.jar - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Building mods" + working-directory: "./mod" + run: "./gradlew build" diff --git a/.github/workflows/build-server.yml b/.github/workflows/build-server.yml index be351377..3648d6fa 100644 --- a/.github/workflows/build-server.yml +++ b/.github/workflows/build-server.yml @@ -1,4 +1,4 @@ -name: Build+Test Server +name: "Build+Test Server" on: push: @@ -7,20 +7,31 @@ on: pull_request: paths: - "server/**/*" + workflow_call: jobs: build: - runs-on: ubuntu-latest - strategy: - matrix: - version: ["1.2.15", "latest"] + runs-on: "ubuntu-latest" steps: - - uses: actions/checkout@v3 - - name: Setup bun.sh - uses: oven-sh/setup-bun@v1 + - uses: "actions/checkout@v4" + + - name: "Setting up Bun" + uses: oven-sh/setup-bun@v2 with: - bun-version: ${{ matrix.version }} - - run: bun install - working-directory: ./server - - run: bun test - working-directory: ./server + bun-version: latest + + - name: "Installing dependencies" + working-directory: "./server" + run: "bun install" + + - name: "Checking types" + working-directory: "./server" + run: "bun run check:types" + + - name: "Checking style" + working-directory: "./server" + run: "bun run check:style" + + - name: "Testing server" + working-directory: "./server" + run: "bun run test" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..1566bcc5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,35 @@ +name: "Publishing to release" + +on: + release: + types: + - "published" + +permissions: + contents: "write" + +jobs: + release-mod: + runs-on: "ubuntu-latest" + steps: + - uses: "actions/checkout@v4" + + - name: "Setting up JDK 17" + uses: "actions/setup-java@v4" + with: + java-version: "17" + distribution: "adopt" + + - name: "Building mods" + working-directory: "./mod" + run: "./gradlew build" + + - name: "Publishing mods" + working-directory: "./mod" + run: | + for file in $(find "dist/" -maxdepth 1 -type f -name "*.jar"); do + echo "Uploading $file" + gh release upload ${{ github.event.release.tag_name }} "$file" --clobber + done + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/Dockerfile b/Dockerfile index 7714d197..77e4b138 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,6 @@ # base is shared between build/test and deploy -FROM node:18-alpine AS base +# See options at: https://hub.docker.com/r/oven/bun +FROM oven/bun:latest AS base WORKDIR /usr/src/app/ @@ -8,29 +9,28 @@ COPY ./server/package.json /usr/src/app/package.json FROM base AS build -COPY ./server/yarn.lock /usr/src/app/yarn.lock -RUN yarn +COPY ./server/bun.lock /usr/src/app/bun.lock +COPY ./server/bunfig.toml /usr/src/app/bunfig.toml +RUN bun install # copy source as late as possible, to reuse docker cache with node_modules COPY ./server /usr/src/app -RUN yarn build - -FROM build AS test -RUN yarn test # final image only includes minimal files FROM base AS deploy +COPY --from=build /usr/src/app/bun.lock /usr/src/app/bun.lock +COPY --from=build /usr/src/app/bunfig.toml /usr/src/app/bunfig.toml COPY --from=build /usr/src/app/node_modules /usr/src/app/node_modules -COPY --from=build /usr/src/app/dist /usr/src/app/dist +COPY --from=build /usr/src/app/src /usr/src/app/src ENV NODE_ENV=production ENV HOST=0.0.0.0 #Mount your FS or volume or whatnot to this folder -RUN mkdir /data +# TODO: Fix env override of config data ENV MAPSYNC_DATA_DIR=/data -EXPOSE 12312/tcp +# EXPOSE 12312/tcp -CMD [ "yarn", "start" ] +CMD [ "bun", "run", "start" ] diff --git a/server/package.json b/server/package.json index ddcd5a88..f3135819 100644 --- a/server/package.json +++ b/server/package.json @@ -7,11 +7,12 @@ "type": "module", "module": "src/main.ts", "scripts": { - "format": "bunx prettier -w .", + "check:types": "bunx --bun tsc --noEmit --checkJs", + "check:style": "bunx --bun prettier --check .", + "format": "bunx --bun prettier -w .", "test": "bun test ./src/*.test.ts", "start": "bun src/main.ts", "start:dev": "bun --inspect src/main.ts", - "check": "bunx tsc", "compile": "bun build --compile . --outfile out/mapsync-server" }, "dependencies": { From de077a4912b932f049147e702e03908bdfc860ef Mon Sep 17 00:00:00 2001 From: Alexander Date: Sat, 14 Jun 2025 16:46:58 +0100 Subject: [PATCH 25/27] Create database test Turns out Bun doesn't like it if you try to run tests and there aren't any tests yet. Easy fix, we should probably have a test for migrations anyway. --- .github/workflows/build-server.yml | 2 +- server/package.json | 2 +- server/src/db/database.test.ts | 8 ++++++++ server/src/db/database.ts | 18 +++++++++++++++++- server/src/main.ts | 18 +----------------- 5 files changed, 28 insertions(+), 20 deletions(-) create mode 100644 server/src/db/database.test.ts diff --git a/.github/workflows/build-server.yml b/.github/workflows/build-server.yml index 3648d6fa..4b961e9b 100644 --- a/.github/workflows/build-server.yml +++ b/.github/workflows/build-server.yml @@ -32,6 +32,6 @@ jobs: working-directory: "./server" run: "bun run check:style" - - name: "Testing server" + - name: "Running tests" working-directory: "./server" run: "bun run test" diff --git a/server/package.json b/server/package.json index f3135819..d2b39488 100644 --- a/server/package.json +++ b/server/package.json @@ -10,7 +10,7 @@ "check:types": "bunx --bun tsc --noEmit --checkJs", "check:style": "bunx --bun prettier --check .", "format": "bunx --bun prettier -w .", - "test": "bun test ./src/*.test.ts", + "test": "bun test ./src/**/*.test.ts", "start": "bun src/main.ts", "start:dev": "bun --inspect src/main.ts", "compile": "bun build --compile . --outfile out/mapsync-server" diff --git a/server/src/db/database.test.ts b/server/src/db/database.test.ts new file mode 100644 index 00000000..ad326f81 --- /dev/null +++ b/server/src/db/database.test.ts @@ -0,0 +1,8 @@ +import { test } from "bun:test"; + +test("testMigrations", async () => { + process.env["SQLITE_PATH"] = ":memory:"; // Ensure an in-memory database + + const { setup } = await require("./database.ts"); + await setup(); +}); diff --git a/server/src/db/database.ts b/server/src/db/database.ts index 95b8c1f9..c89f9b3d 100644 --- a/server/src/db/database.ts +++ b/server/src/db/database.ts @@ -52,10 +52,26 @@ export function getMigrations(): Migrator { /** Convenience function to migrate to latest */ export async function setup() { const results = await getMigrations().migrateToLatest(); + for (const result of results.results ?? []) { + switch (result.status) { + case "Success": + console.info(`Migration [${result.migrationName}] applied!`); + break; + case "Error": + console.error( + `Migration [${result.migrationName}] failed to apply!`, + ); + break; + case "NotExecuted": + console.warn( + `Migration [${result.migrationName}] was not applied!`, + ); + break; + } + } if (results.error) { throw results.error; } - return results.results ?? []; } /** diff --git a/server/src/main.ts b/server/src/main.ts index f9d46903..1d8a6451 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -18,23 +18,7 @@ import { isAuthed, OnlineAuth, requireAuth } from "./net/auth.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { - for (const result of await database.setup()) { - switch (result.status) { - case "Success": - console.info(`Migration [${result.migrationName}] applied!`); - break; - case "Error": - console.error( - `Migration [${result.migrationName}] failed to apply!`, - ); - break; - case "NotExecuted": - console.warn( - `Migration [${result.migrationName}] was not applied!`, - ); - break; - } - } + await database.setup(); config = metadata.getConfig(); From 8de3cb4a1b90ea0911c550f3d51145498ad237b7 Mon Sep 17 00:00:00 2001 From: Alexander Date: Sat, 14 Jun 2025 17:48:28 +0100 Subject: [PATCH 26/27] Destaticify database connection Converts the database connection into an instanced-class. This is to avoid the creation of the "DATA_FOLDER" within the database test since tests should probably be pure. --- server/src/Renderer.ts | 3 +- server/src/db/database.test.ts | 8 +- server/src/db/database.ts | 325 ++++++++++++++++----------------- server/src/main.ts | 10 +- 4 files changed, 176 insertions(+), 170 deletions(-) diff --git a/server/src/Renderer.ts b/server/src/Renderer.ts index 9e90221b..00d4f6e7 100644 --- a/server/src/Renderer.ts +++ b/server/src/Renderer.ts @@ -1,8 +1,9 @@ import { spawn } from "node:child_process"; import { promisify } from "node:util"; -import * as database from "./db/database.ts"; +import type DatabaseConnection from "./db/database.ts"; export async function renderTile( + database: DatabaseConnection, dimension: string, tileX: number, tileZ: number, diff --git a/server/src/db/database.test.ts b/server/src/db/database.test.ts index ad326f81..dc3620a5 100644 --- a/server/src/db/database.test.ts +++ b/server/src/db/database.test.ts @@ -1,8 +1,8 @@ import { test } from "bun:test"; -test("testMigrations", async () => { - process.env["SQLITE_PATH"] = ":memory:"; // Ensure an in-memory database +import DatabaseConnection from "./database.ts"; - const { setup } = await require("./database.ts"); - await setup(); +test("testMigrations", async () => { + const database = new DatabaseConnection(":memory:"); + await database.setup(); }); diff --git a/server/src/db/database.ts b/server/src/db/database.ts index c89f9b3d..22d2c1cc 100644 --- a/server/src/db/database.ts +++ b/server/src/db/database.ts @@ -3,157 +3,153 @@ import { Database as BunSqliteDatabase } from "bun:sqlite"; import { Kysely, type Generated, Migrator } from "kysely"; import { BunSqliteDialect } from "kysely-bun-sqlite"; -import { DATA_FOLDER } from "../metadata.ts"; import Migrations from "./migrations.ts"; import { type Pos2D } from "../model.ts"; -let database: Kysely | null = null; +export default class DatabaseConnection { + public readonly internal: Kysely<{ + chunk_data: { + hash: Buffer; + version: number; + data: Buffer; + }; + player_chunk: { + world: string; + chunk_x: number; + chunk_z: number; + gen_region_x: Generated; + gen_region_z: Generated; + gen_region_coord: Generated; + uuid: string; + ts: number; + hash: Buffer; + }; + }>; -export interface Database { - chunk_data: { - hash: Buffer; - version: number; - data: Buffer; - }; - player_chunk: { - world: string; - chunk_x: number; - chunk_z: number; - gen_region_x: Generated; - gen_region_z: Generated; - gen_region_coord: Generated; - uuid: string; - ts: number; - hash: Buffer; - }; -} - -export function get() { - return (database ??= new Kysely({ - dialect: new BunSqliteDialect({ - database: new BunSqliteDatabase( - Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, - { - create: true, - readwrite: true, - }, - ), - }), - })); -} + /** + * See {@link BunSqliteDatabase}'s constructor and {@link https://bun.sh/docs/api/sqlite Bun SQLite} for + * documentation on what this function can accept. + */ + public constructor( + ...args: ConstructorParameters + ) { + this.internal = new Kysely({ + dialect: new BunSqliteDialect({ + database: new BunSqliteDatabase(...args), + }), + }); + } -export function getMigrations(): Migrator { - return new Migrator({ - db: get(), - provider: new Migrations(), - }); -} + public getMigrations(): Migrator { + return new Migrator({ + db: this.internal, + provider: new Migrations(), + }); + } -/** Convenience function to migrate to latest */ -export async function setup() { - const results = await getMigrations().migrateToLatest(); - for (const result of results.results ?? []) { - switch (result.status) { - case "Success": - console.info(`Migration [${result.migrationName}] applied!`); - break; - case "Error": - console.error( - `Migration [${result.migrationName}] failed to apply!`, - ); - break; - case "NotExecuted": - console.warn( - `Migration [${result.migrationName}] was not applied!`, - ); - break; + /** Convenience function to migrate to latest */ + public async setup() { + const results = await this.getMigrations().migrateToLatest(); + for (const result of results.results ?? []) { + switch (result.status) { + case "Success": + console.info( + `Migration [${result.migrationName}] applied!`, + ); + break; + case "Error": + console.error( + `Migration [${result.migrationName}] failed to apply!`, + ); + break; + case "NotExecuted": + console.warn( + `Migration [${result.migrationName}] was not applied!`, + ); + break; + } + } + if (results.error) { + throw results.error; } } - if (results.error) { - throw results.error; - } -} -/** - * Gets the timestamps for ALL regions stored. - */ -export async function getRegionTimestamps(dimension: string) { - return await get() - .selectFrom("player_chunk") - .select([ - "gen_region_x as regionX", - "gen_region_z as regionZ", - (eb) => eb.fn.max("ts").as("timestamp"), - ]) - .where("world", "=", dimension) - .groupBy(["regionX", "regionZ"]) - .orderBy("timestamp", "asc") - .execute(); -} + /** Gets the timestamps for ALL regions stored. */ + async getRegionTimestamps(dimension: string) { + return await this.internal + .selectFrom("player_chunk") + .select([ + "gen_region_x as regionX", + "gen_region_z as regionZ", + (eb) => eb.fn.max("ts").as("timestamp"), + ]) + .where("world", "=", dimension) + .groupBy(["regionX", "regionZ"]) + .orderBy("timestamp", "asc") + .execute(); + } -export async function getChunkTimestamps(dimension: string, regions: Pos2D[]) { - return await get() - .selectFrom("player_chunk") - .select([ - "chunk_x as chunkX", - "chunk_z as chunkZ", - (eb) => eb.fn.max("ts").as("timestamp"), - ]) - .where( - "gen_region_coord", - "in", - regions.map((region) => region.x + "_" + region.z), - ) - .where("world", "=", dimension) - .groupBy(["chunkX", "chunkZ"]) - .orderBy("timestamp", "desc") - .execute(); -} + public async getChunkTimestamps(dimension: string, regions: Pos2D[]) { + return await this.internal + .selectFrom("player_chunk") + .select([ + "chunk_x as chunkX", + "chunk_z as chunkZ", + (eb) => eb.fn.max("ts").as("timestamp"), + ]) + .where( + "gen_region_coord", + "in", + regions.map((region) => region.x + "_" + region.z), + ) + .where("world", "=", dimension) + .groupBy(["chunkX", "chunkZ"]) + .orderBy("timestamp", "desc") + .execute(); + } -/** - * Retrieves the data for a given chunk's world, x, z, and timestamp. - * - * TODO: May want to consider making world, x, z, and timestamp a unique in the - * database table... may help performance. - */ -export async function getChunkData( - dimension: string, - chunkX: number, - chunkZ: number, -) { - return await get() - .selectFrom("player_chunk") - .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") - .select([ - "chunk_data.hash as hash", - "chunk_data.version as version", - "chunk_data.data as data", - "player_chunk.ts as ts", - ]) - .where("player_chunk.world", "=", dimension) - .where("player_chunk.chunk_x", "=", chunkX) - .where("player_chunk.chunk_z", "=", chunkZ) - .orderBy("player_chunk.ts", "desc") - .limit(1) - .executeTakeFirst(); -} + /** + * Retrieves the data for a given chunk's world, x, z, and timestamp. + * + * TODO: May want to consider making world, x, z, and timestamp a unique in + * the database table... may help performance. + */ + public async getChunkData( + dimension: string, + chunkX: number, + chunkZ: number, + ) { + return await this.internal + .selectFrom("player_chunk") + .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") + .select([ + "chunk_data.hash as hash", + "chunk_data.version as version", + "chunk_data.data as data", + "player_chunk.ts as ts", + ]) + .where("player_chunk.world", "=", dimension) + .where("player_chunk.chunk_x", "=", chunkX) + .where("player_chunk.chunk_z", "=", chunkZ) + .orderBy("player_chunk.ts", "desc") + .limit(1) + .executeTakeFirst(); + } -/** - * Stores a player's chunk data. - */ -export async function storeChunkData( - dimension: string, - chunkX: number, - chunkZ: number, - uuid: string, - timestamp: number, - version: number, - hash: Buffer, - data: Buffer, -) { - await get() - .transaction() - .execute(async (transaction) => { + /** + * Stores a player's chunk data. + */ + public async storeChunkData( + dimension: string, + chunkX: number, + chunkZ: number, + uuid: string, + timestamp: number, + version: number, + hash: Buffer, + data: Buffer, + ) { + await this.internal.transaction().execute(async (transaction) => { await transaction .insertInto("chunk_data") .values({ hash, version, data }) @@ -171,30 +167,31 @@ export async function storeChunkData( }) .execute(); }); -} + } -/** - * Gets all the [latest] chunks within a region. - */ -export async function getRegionChunks( - dimension: string, - regionX: number, - regionZ: number, -) { - return await get() - .selectFrom("player_chunk") - .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") - .select([ - "player_chunk.chunk_x as chunkX", - "player_chunk.chunk_z as chunkZ", - (eb) => eb.fn.max("player_chunk.ts").as("timestamp"), - "chunk_data.version as version", - "chunk_data.data as data", - ]) - .where("player_chunk.world", "=", dimension) - .where("player_chunk.gen_region_x", "=", regionX) - .where("player_chunk.gen_region_z", "=", regionZ) - .groupBy(["chunkX", "chunkZ", "version", "data"]) - .orderBy("timestamp", "desc") - .execute(); + /** + * Gets all the [latest] chunks within a region. + */ + public async getRegionChunks( + dimension: string, + regionX: number, + regionZ: number, + ) { + return await this.internal + .selectFrom("player_chunk") + .innerJoin("chunk_data", "chunk_data.hash", "player_chunk.hash") + .select([ + "player_chunk.chunk_x as chunkX", + "player_chunk.chunk_z as chunkZ", + (eb) => eb.fn.max("player_chunk.ts").as("timestamp"), + "chunk_data.version as version", + "chunk_data.data as data", + ]) + .where("player_chunk.world", "=", dimension) + .where("player_chunk.gen_region_x", "=", regionX) + .where("player_chunk.gen_region_z", "=", regionZ) + .groupBy(["chunkX", "chunkZ", "version", "data"]) + .orderBy("timestamp", "desc") + .execute(); + } } diff --git a/server/src/main.ts b/server/src/main.ts index 1d8a6451..3244871e 100644 --- a/server/src/main.ts +++ b/server/src/main.ts @@ -1,5 +1,5 @@ import "./cli.ts"; -import * as database from "./db/database.ts"; +import DatabaseConnection from "./db/database.ts"; import * as metadata from "./metadata.ts"; import { type ClientPacket, @@ -15,9 +15,17 @@ import { ServerboundChunkTimestampsRequestPacket, } from "./net/packets.ts"; import { isAuthed, OnlineAuth, requireAuth } from "./net/auth.ts"; +import { DATA_FOLDER } from "./metadata.ts"; let config: metadata.Config = null!; Promise.resolve().then(async () => { + const database = new DatabaseConnection( + Bun.env["SQLITE_PATH"] ?? `${DATA_FOLDER}/db.sqlite`, + { + create: true, + readwrite: true, + }, + ); await database.setup(); config = metadata.getConfig(); From 5261d2bb680e746c97bfdd5194206b1af80bb1e5 Mon Sep 17 00:00:00 2001 From: Alexander Date: Sat, 14 Jun 2025 19:40:12 +0100 Subject: [PATCH 27/27] Fix websocket close handler The websocket server does not provide an error, but rather a close code and reason. --- server/src/lang.ts | 4 ---- server/src/net/server.ts | 10 ++++------ 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/server/src/lang.ts b/server/src/lang.ts index 8337bcf8..70759493 100644 --- a/server/src/lang.ts +++ b/server/src/lang.ts @@ -1,10 +1,6 @@ import node_os from "node:os"; import node_utils from "node:util"; -export function exists(obj: T): obj is NonNullable { - return (obj ?? null) !== null; -} - export const INT8_SIZE = 1; export const INT16_SIZE = 2; export const INT32_SIZE = 4; diff --git a/server/src/net/server.ts b/server/src/net/server.ts index 3d7f623d..c834ea64 100644 --- a/server/src/net/server.ts +++ b/server/src/net/server.ts @@ -1,6 +1,5 @@ import { serve, type Server, type ServerWebSocket } from "bun"; -import { exists } from "../lang.ts"; import { type ClientPacket, decodePacket, @@ -86,14 +85,13 @@ export class TcpServer { await self.handlers.handleClientConnected(client); client.log("Connected"); }, - async close(socket, err) { + async close(socket, code, reason) { const client: TcpClient = socket.data; self.clients.delete(client.id); - if (exists(err)) { - client.warn(`Closed due to an error!`, err); - } await self.handlers.handleClientDisconnected(client); - client.log("Disconnected"); + client.log( + `Disconnected (Code: ${code}) (Reason: ${reason})`, + ); }, async message(socket, message) { const client: TcpClient = socket.data;