diff --git a/.gitignore b/.gitignore index 2b0aaee..099122f 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,5 @@ logs .env* dist .nx/cache -.nx/workspace-data \ No newline at end of file +.nx/workspace-data +.tsc-e2e diff --git a/DOCS.md b/DOCS.md index 379d13e..5c5615f 100644 --- a/DOCS.md +++ b/DOCS.md @@ -316,3 +316,20 @@ This ensures isolation and allows access to useful resources during execution. - `context.http`: Ability to make outgoing HTTP requests - `context.values`: Read application-wide configuration variables +--- + +## 🧪 End-to-end rule validation + +Le end-to-end risiedono in `tests/e2e` e vengono ora eseguite insieme agli unit test con `npm test`. Il root `jest.config.ts` contiene due progetti (`packages/flowerbase` + `tests`), perciò il comando fa partire: + +- le suite `packages/flowerbase/src/**` (unit) +- i test `tests/e2e/**/*.test.ts` (E2E) + +Per gli E2E viene caricato automaticamente `.env.e2e` (se presente) tramite `dotenv`, quindi il file può contenere `DB_CONNECTION_STRING` e altre variabili custom. Se preferisci non salvare le credenziali nel repo, basta esportare `DB_CONNECTION_STRING` prima di `npm test`: + +```bash +export DB_CONNECTION_STRING="mongodb+srv://user:pass@cluster.mongodb.net/dbname" +npm test +``` + +In mancanza di un valore esplicito, il test fallirà perché il server remoto non sarà raggiungibile: assicurati che la stringa punti a un cluster che esegue le regole attese (per esempio `flowerbase-e2e`). Non serve più avviare Docker o Replica Set locali. diff --git a/jest.config.ts b/jest.config.ts index 6b3f2d6..81c7dc6 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -1,5 +1,6 @@ -import { getJestProjectsAsync } from '@nx/jest'; - -export default async () => ({ - projects: await getJestProjectsAsync(), -}); +export default { + projects: [ + '/packages/flowerbase/jest.config.ts', + '/tests/jest.e2e.config.ts' + ] +}; diff --git a/package-lock.json b/package-lock.json index addf702..1d06c25 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,6 +11,18 @@ "workspaces": [ "packages/*" ], + "dependencies": { + "@fastify/cors": "^11.2.0", + "@fastify/jwt": "^10.0.0", + "@fastify/mongodb": "^9.0.2", + "@fastify/swagger": "^9.6.1", + "@fastify/swagger-ui": "^5.2.4", + "aws-sdk": "^2.1692.0", + "bson": "^6.10.4", + "fastify-raw-body": "^5.0.0", + "node-cron": "^4.2.1", + "undici": "^7.18.2" + }, "devDependencies": { "@babel/core": "^7.24.5", "@babel/preset-react": "^7.24.1", @@ -29,9 +41,11 @@ "@swc/helpers": "~0.5.11", "@swc/jest": "0.2.36", "@types/jest": "^29.4.0", + "@types/mongodb": "^4.0.7", "@types/node": "18.16.9", "@typescript-eslint/eslint-plugin": "^7.3.0", "@typescript-eslint/parser": "^7.3.0", + "dotenv": "^17.2.3", "eslint": "~8.57.0", "eslint-config-prettier": "^9.0.0", "eslint-plugin-import": "^2.29.1", @@ -40,10 +54,12 @@ "eslint-plugin-prettier": "^5.1.3", "eslint-plugin-react": "^7.34.1", "eslint-plugin-react-hooks": "^4.6.2", + "fastify": "^5.6.2", "jest": "^29.4.1", "jest-environment-jsdom": "^29.4.1", "jest-environment-node": "^29.4.1", "markdownlint-cli2": "^0.15.0", + "mongodb": "^6.21.0", "nx": "19.4.1", "prettier": "^3.2.5", "rimraf": "^5.0.7", @@ -2243,9 +2259,9 @@ } }, "node_modules/@fastify/cors": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/@fastify/cors/-/cors-10.1.0.tgz", - "integrity": "sha512-MZyBCBJtII60CU9Xme/iE4aEy8G7QpzGR8zkdXZkDFt7ElEMachbE61tfhAG/bvSaULlqlf0huMT12T7iqEmdQ==", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@fastify/cors/-/cors-11.2.0.tgz", + "integrity": "sha512-LbLHBuSAdGdSFZYTLVA3+Ch2t+sA6nq3Ejc6XLAKiQ6ViS2qFnvicpj0htsx03FyYeLs04HfRNBsz/a8SvbcUw==", "funding": [ { "type": "github", @@ -2258,7 +2274,7 @@ ], "dependencies": { "fastify-plugin": "^5.0.0", - "mnemonist": "0.40.0" + "toad-cache": "^3.7.0" } }, "node_modules/@fastify/error": { @@ -2300,9 +2316,9 @@ "integrity": "sha512-kJExsp4JCms7ipzg7SJ3y8DwmePaELHxKYtg+tZow+k0znUTf3cb+npgyqm8+ATZOdmfgfydIebPDWM172wfyA==" }, "node_modules/@fastify/jwt": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/@fastify/jwt/-/jwt-9.1.0.tgz", - "integrity": "sha512-CiGHCnS5cPMdb004c70sUWhQTfzrJHAeTywt7nVw6dAiI0z1o4WRvU94xfijhkaId4bIxTCOjFgn4sU+Gvk43w==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@fastify/jwt/-/jwt-10.0.0.tgz", + "integrity": "sha512-2Qka3NiyNNcsfejMUvyzot1T4UYIzzcbkFGDdVyrl344fRZ/WkD6VFXOoXhxe2Pzf3LpJNkoSxUM4Ru4DVgkYA==", "funding": [ { "type": "github", @@ -2314,10 +2330,10 @@ } ], "dependencies": { - "@fastify/error": "^4.0.0", + "@fastify/error": "^4.2.0", "@lukeed/ms": "^2.0.2", - "fast-jwt": "^5.0.0", - "fastify-plugin": "^5.0.0", + "fast-jwt": "^6.0.2", + "fastify-plugin": "^5.0.1", "steed": "^1.1.3" } }, @@ -2390,9 +2406,9 @@ } }, "node_modules/@fastify/static": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@fastify/static/-/static-8.2.0.tgz", - "integrity": "sha512-PejC/DtT7p1yo3p+W7LiUtLMsV8fEvxAK15sozHy9t8kwo5r0uLYmhV/inURmGz1SkHZFz/8CNtHLPyhKcx4SQ==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@fastify/static/-/static-9.0.0.tgz", + "integrity": "sha512-r64H8Woe/vfilg5RTy7lwWlE8ZZcTrc3kebYFMEUBrMqlydhQyoiExQXdYAy2REVpST/G35+stAM8WYp1WGmMA==", "funding": [ { "type": "github", @@ -2406,27 +2422,33 @@ "dependencies": { "@fastify/accept-negotiator": "^2.0.0", "@fastify/send": "^4.0.0", - "content-disposition": "^0.5.4", + "content-disposition": "^1.0.1", "fastify-plugin": "^5.0.0", "fastq": "^1.17.1", - "glob": "^11.0.0" + "glob": "^13.0.0" + } + }, + "node_modules/@fastify/static/node_modules/content-disposition": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.1.tgz", + "integrity": "sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/@fastify/static/node_modules/glob": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz", - "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==", + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.0.tgz", + "integrity": "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA==", "dependencies": { - "foreground-child": "^3.3.1", - "jackspeak": "^4.1.1", - "minimatch": "^10.0.3", + "minimatch": "^10.1.1", "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, "engines": { "node": "20 || >=22" }, @@ -2435,9 +2457,9 @@ } }, "node_modules/@fastify/static/node_modules/minimatch": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz", - "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", "dependencies": { "@isaacs/brace-expansion": "^5.0.0" }, @@ -2449,9 +2471,9 @@ } }, "node_modules/@fastify/swagger": { - "version": "9.5.1", - "resolved": "https://registry.npmjs.org/@fastify/swagger/-/swagger-9.5.1.tgz", - "integrity": "sha512-EGjYLA7vDmCPK7XViAYMF6y4+K3XUy5soVTVxsyXolNe/Svb4nFQxvtuQvvoQb2Gzc9pxiF3+ZQN/iZDHhKtTg==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/@fastify/swagger/-/swagger-9.6.1.tgz", + "integrity": "sha512-fKlpJqFMWoi4H3EdUkDaMteEYRCfQMEkK0HJJ0eaf4aRlKd8cbq0pVkOfXDXmtvMTXYcnx3E+l023eFDBsA1HA==", "funding": [ { "type": "github", @@ -2471,9 +2493,9 @@ } }, "node_modules/@fastify/swagger-ui": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/@fastify/swagger-ui/-/swagger-ui-5.2.3.tgz", - "integrity": "sha512-e7ivEJi9EpFcxTONqICx4llbpB2jmlI+LI1NQ/mR7QGQnyDOqZybPK572zJtcdHZW4YyYTBHcP3a03f1pOh0SA==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/@fastify/swagger-ui/-/swagger-ui-5.2.4.tgz", + "integrity": "sha512-Maw8OYPUDxlOzKQd3VMv7T/fmjf2h6BWR3XHkhk3dD3rIfzO7C/UPnzGuTpOGMqw1HCUnctADBbeTNAzAwzUqA==", "funding": [ { "type": "github", @@ -2485,7 +2507,7 @@ } ], "dependencies": { - "@fastify/static": "^8.0.0", + "@fastify/static": "^9.0.0", "fastify-plugin": "^5.0.0", "openapi-types": "^12.1.3", "rfdc": "^1.3.1", @@ -2624,6 +2646,7 @@ "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", @@ -2640,6 +2663,7 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true, "engines": { "node": ">=12" }, @@ -2651,6 +2675,7 @@ "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, "engines": { "node": ">=12" }, @@ -2662,6 +2687,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", @@ -2678,6 +2704,7 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, "dependencies": { "ansi-regex": "^6.0.1" }, @@ -2692,6 +2719,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", @@ -4361,6 +4389,11 @@ "typescript": "^3 || ^4 || ^5" } }, + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==" + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", @@ -4866,41 +4899,6 @@ "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", "dev": true }, - "node_modules/@sendgrid/client": { - "version": "8.1.5", - "resolved": "https://registry.npmjs.org/@sendgrid/client/-/client-8.1.5.tgz", - "integrity": "sha512-Jqt8aAuGIpWGa15ZorTWI46q9gbaIdQFA21HIPQQl60rCjzAko75l3D1z7EyjFrNr4MfQ0StusivWh8Rjh10Cg==", - "dependencies": { - "@sendgrid/helpers": "^8.0.0", - "axios": "^1.8.2" - }, - "engines": { - "node": ">=12.*" - } - }, - "node_modules/@sendgrid/helpers": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@sendgrid/helpers/-/helpers-8.0.0.tgz", - "integrity": "sha512-Ze7WuW2Xzy5GT5WRx+yEv89fsg/pgy3T1E3FS0QEx0/VvRmigMZ5qyVGhJz4SxomegDkzXv/i0aFPpHKN8qdAA==", - "dependencies": { - "deepmerge": "^4.2.2" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "node_modules/@sendgrid/mail": { - "version": "8.1.5", - "resolved": "https://registry.npmjs.org/@sendgrid/mail/-/mail-8.1.5.tgz", - "integrity": "sha512-W+YuMnkVs4+HA/bgfto4VHKcPKLc7NiZ50/NH2pzO6UHCCFuq8/GNB98YJlLEr/ESDyzAaDr7lVE7hoBwFTT3Q==", - "dependencies": { - "@sendgrid/client": "^8.1.5", - "@sendgrid/helpers": "^8.0.0" - }, - "engines": { - "node": ">=12.*" - } - }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -5951,12 +5949,13 @@ } }, "node_modules/@types/mongodb": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-3.6.20.tgz", - "integrity": "sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ==", + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-4.0.7.tgz", + "integrity": "sha512-lPUYPpzA43baXqnd36cZ9xxorprybxXDzteVKCPAdp14ppHtFJHnXYvNpmBvtMUTb5fKXVv6sVbzo1LHkWhJlw==", + "deprecated": "mongodb provides its own types. @types/mongodb is no longer needed.", + "dev": true, "dependencies": { - "@types/bson": "*", - "@types/node": "*" + "mongodb": "*" } }, "node_modules/@types/node": { @@ -7125,7 +7124,8 @@ "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true }, "node_modules/atomic-sleep": { "version": "1.0.0", @@ -7229,6 +7229,7 @@ "version": "1.11.0", "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "dev": true, "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", @@ -8246,6 +8247,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -8626,6 +8628,7 @@ "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -9054,6 +9057,7 @@ "version": "4.3.1", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -9184,6 +9188,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, "engines": { "node": ">=0.4.0" } @@ -9399,9 +9404,10 @@ } }, "node_modules/dotenv": { - "version": "16.6.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", - "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "version": "17.2.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz", + "integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==", + "dev": true, "engines": { "node": ">=12" }, @@ -9424,6 +9430,18 @@ "url": "https://dotenvx.com" } }, + "node_modules/dotenv-expand/node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -9446,7 +9464,8 @@ "node_modules/eastasianwidth": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true }, "node_modules/ecdsa-sig-formatter": { "version": "1.0.11", @@ -9496,7 +9515,8 @@ "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true }, "node_modules/emojis-list": { "version": "3.0.0", @@ -9736,6 +9756,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", @@ -10849,9 +10870,9 @@ } }, "node_modules/fast-jwt": { - "version": "5.0.6", - "resolved": "https://registry.npmjs.org/fast-jwt/-/fast-jwt-5.0.6.tgz", - "integrity": "sha512-LPE7OCGUl11q3ZgW681cEU2d0d2JZ37hhJAmetCgNyW8waVaJVZXhyFF6U2so1Iim58Yc7pfxJe2P7MNetQH2g==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/fast-jwt/-/fast-jwt-6.1.0.tgz", + "integrity": "sha512-cGK/TXlud8INL49Iv7yRtZy0PHzNJId1shfqNCqdF0gOlWiy+1FPgjxX+ZHp/CYxFYDaoNnxeYEGzcXSkahUEQ==", "dependencies": { "@lukeed/ms": "^2.0.2", "asn1.js": "^5.4.1", @@ -10876,14 +10897,6 @@ "fast-decode-uri-component": "^1.0.1" } }, - "node_modules/fast-redact": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.5.0.tgz", - "integrity": "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==", - "engines": { - "node": ">=6" - } - }, "node_modules/fast-uri": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", @@ -10920,9 +10933,9 @@ } }, "node_modules/fastify": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/fastify/-/fastify-5.4.0.tgz", - "integrity": "sha512-I4dVlUe+WNQAhKSyv15w+dwUh2EPiEl4X2lGYMmNSgF83WzTMAPKGdWEv5tPsCQOb+SOZwz8Vlta2vF+OeDgRw==", + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/fastify/-/fastify-5.6.2.tgz", + "integrity": "sha512-dPugdGnsvYkBlENLhCgX8yhyGCsCPrpA8lFWbTNU428l+YOnLgYHR69hzV8HWPC79n536EqzqQtvhtdaCE0dKg==", "funding": [ { "type": "github", @@ -10943,7 +10956,7 @@ "fast-json-stringify": "^6.0.0", "find-my-way": "^9.0.0", "light-my-request": "^6.0.0", - "pino": "^9.0.0", + "pino": "^10.1.0", "process-warning": "^5.0.0", "rfdc": "^1.3.1", "secure-json-parse": "^4.0.0", @@ -11402,6 +11415,7 @@ "version": "1.15.9", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "dev": true, "funding": [ { "type": "individual", @@ -11435,6 +11449,7 @@ "version": "3.3.1", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, "dependencies": { "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" @@ -11450,6 +11465,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, "engines": { "node": ">=14" }, @@ -11617,6 +11633,7 @@ "version": "4.0.4", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -13158,7 +13175,8 @@ "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true }, "node_modules/isobject": { "version": "3.0.1", @@ -13300,20 +13318,6 @@ "node": ">= 0.4" } }, - "node_modules/jackspeak": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", - "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/jake": { "version": "10.9.2", "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", @@ -13469,6 +13473,41 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/jest-circus/node_modules/babel-plugin-macros": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", + "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "cosmiconfig": "^7.0.0", + "resolve": "^1.19.0" + }, + "engines": { + "node": ">=10", + "npm": ">=6" + } + }, + "node_modules/jest-circus/node_modules/cosmiconfig": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", + "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.2.1", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.10.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/jest-circus/node_modules/dedent": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz", @@ -13483,6 +13522,17 @@ } } }, + "node_modules/jest-circus/node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/jest-cli": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", @@ -15108,13 +15158,13 @@ "link": true }, "node_modules/mongodb": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.18.0.tgz", - "integrity": "sha512-fO5ttN9VC8P0F5fqtQmclAkgXZxbIkYRTUi1j8JO6IYwvamkhtYDilJr35jOPELR49zqCJgXZWwCtW7B+TM8vQ==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.21.0.tgz", + "integrity": "sha512-URyb/VXMjJ4da46OeSXg+puO39XH9DeQpWCslifrRn9JWugy0D+DvvBvkm2WxmHe61O/H19JM66p1z7RHVkZ6A==", "dependencies": { - "@mongodb-js/saslprep": "^1.1.9", + "@mongodb-js/saslprep": "^1.3.0", "bson": "^6.10.4", - "mongodb-connection-string-url": "^3.0.0" + "mongodb-connection-string-url": "^3.0.2" }, "engines": { "node": ">=16.20.1" @@ -15125,7 +15175,7 @@ "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", "mongodb-client-encryption": ">=6.0.0 <7", - "snappy": "^7.2.2", + "snappy": "^7.3.2", "socks": "^2.7.1" }, "peerDependenciesMeta": { @@ -15220,6 +15270,15 @@ "mongoose": "*" } }, + "node_modules/mongoose/node_modules/@types/mongodb": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-3.6.20.tgz", + "integrity": "sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ==", + "dependencies": { + "@types/bson": "*", + "@types/node": "*" + } + }, "node_modules/mongoose/node_modules/bl": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", @@ -15469,24 +15528,13 @@ "optional": true }, "node_modules/node-cron": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.3.tgz", - "integrity": "sha512-dOal67//nohNgYWb+nWmg5dkFdIwDm8EpeGYMekPMrngV3637lqnX0lbUcCtgibHTz6SEz7DAIjKvKDFYCnO1A==", - "dependencies": { - "uuid": "8.3.2" - }, + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-4.2.1.tgz", + "integrity": "sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==", "engines": { "node": ">=6.0.0" } }, - "node_modules/node-cron/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/node-forge": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", @@ -16175,7 +16223,8 @@ "node_modules/package-json-from-dist": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", + "dev": true }, "node_modules/parent-module": { "version": "1.0.1", @@ -16258,6 +16307,7 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, "engines": { "node": ">=8" } @@ -16269,9 +16319,9 @@ "dev": true }, "node_modules/path-scurry": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz", - "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz", + "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==", "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" @@ -16284,9 +16334,9 @@ } }, "node_modules/path-scurry/node_modules/lru-cache": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.1.0.tgz", - "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==", + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", "engines": { "node": "20 || >=22" } @@ -16346,30 +16396,30 @@ } }, "node_modules/pino": { - "version": "9.7.0", - "resolved": "https://registry.npmjs.org/pino/-/pino-9.7.0.tgz", - "integrity": "sha512-vnMCM6xZTb1WDmLvtG2lE/2p+t9hDEIvTWJsu6FejkE62vB7gDhvzrpFR4Cw2to+9JNQxVnkAKVPA1KPB98vWg==", + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.1.1.tgz", + "integrity": "sha512-3qqVfpJtRQUCAOs4rTOEwLH6mwJJ/CSAlbis8fKOiMzTtXh0HN/VLsn3UWVTJ7U8DsWmxeNon2IpGb+wORXH4g==", "dependencies": { + "@pinojs/redact": "^0.4.0", "atomic-sleep": "^1.0.0", - "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", - "pino-abstract-transport": "^2.0.0", + "pino-abstract-transport": "^3.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", - "thread-stream": "^3.0.0" + "thread-stream": "^4.0.0" }, "bin": { "pino": "bin.js" } }, "node_modules/pino-abstract-transport": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", - "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", "dependencies": { "split2": "^4.0.0" } @@ -17358,7 +17408,8 @@ "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true }, "node_modules/prr": { "version": "1.0.1", @@ -17548,6 +17599,16 @@ "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", "dev": true }, + "node_modules/react-refresh": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/read-cache": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", @@ -19619,6 +19680,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, "dependencies": { "shebang-regex": "^3.0.0" }, @@ -19630,6 +19692,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, "engines": { "node": ">=8" } @@ -20050,6 +20113,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -20062,7 +20126,8 @@ "node_modules/string-width-cjs/node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/string-width/node_modules/emoji-regex": { "version": "8.0.0", @@ -20192,6 +20257,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "dependencies": { "ansi-regex": "^5.0.1" }, @@ -20657,11 +20723,14 @@ "dev": true }, "node_modules/thread-stream": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", - "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-4.0.0.tgz", + "integrity": "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==", "dependencies": { "real-require": "^0.2.0" + }, + "engines": { + "node": ">=20" } }, "node_modules/through": { @@ -21490,9 +21559,9 @@ "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==" }, "node_modules/undici": { - "version": "7.12.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.12.0.tgz", - "integrity": "sha512-GrKEsc3ughskmGA9jevVlIOPMiiAHJ4OFUtaAH+NhfTUSiZ1wMPIQqQvAJUrJspFXJt3EBWgpAeoHEDVT1IBug==", + "version": "7.18.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.18.2.tgz", + "integrity": "sha512-y+8YjDFzWdQlSE9N5nzKMT3g4a5UBX1HKowfdXh0uvAnTaqqwqB92Jt4UXBAeKekDs5IaDKyJFR4X1gYVCgXcw==", "engines": { "node": ">=20.18.1" } @@ -22122,6 +22191,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, "dependencies": { "isexe": "^2.0.0" }, @@ -22258,6 +22328,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -22442,7 +22513,6 @@ "@fastify/mongodb": "^9.0.1", "@fastify/swagger": "^9.5.1", "@fastify/swagger-ui": "^5.2.3", - "@sendgrid/mail": "^8.1.4", "aws-sdk": "^2.1692.0", "bson": "^6.8.0", "dotenv": "^16.4.7", @@ -22547,6 +22617,47 @@ "url": "https://eslint.org/donate" } }, + "packages/flowerbase/node_modules/@fastify/cors": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/@fastify/cors/-/cors-10.1.0.tgz", + "integrity": "sha512-MZyBCBJtII60CU9Xme/iE4aEy8G7QpzGR8zkdXZkDFt7ElEMachbE61tfhAG/bvSaULlqlf0huMT12T7iqEmdQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "dependencies": { + "fastify-plugin": "^5.0.0", + "mnemonist": "0.40.0" + } + }, + "packages/flowerbase/node_modules/@fastify/jwt": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@fastify/jwt/-/jwt-9.1.0.tgz", + "integrity": "sha512-CiGHCnS5cPMdb004c70sUWhQTfzrJHAeTywt7nVw6dAiI0z1o4WRvU94xfijhkaId4bIxTCOjFgn4sU+Gvk43w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "dependencies": { + "@fastify/error": "^4.0.0", + "@lukeed/ms": "^2.0.2", + "fast-jwt": "^5.0.0", + "fastify-plugin": "^5.0.0", + "steed": "^1.1.3" + } + }, "packages/flowerbase/node_modules/@types/node": { "version": "22.6.1", "dev": true, @@ -22802,6 +22913,17 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, + "packages/flowerbase/node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "packages/flowerbase/node_modules/eslint": { "version": "9.28.0", "dev": true, @@ -22943,6 +23065,20 @@ "node": ">= 6" } }, + "packages/flowerbase/node_modules/fast-jwt": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/fast-jwt/-/fast-jwt-5.0.6.tgz", + "integrity": "sha512-LPE7OCGUl11q3ZgW681cEU2d0d2JZ37hhJAmetCgNyW8waVaJVZXhyFF6U2so1Iim58Yc7pfxJe2P7MNetQH2g==", + "dependencies": { + "@lukeed/ms": "^2.0.2", + "asn1.js": "^5.4.1", + "ecdsa-sig-formatter": "^1.0.11", + "mnemonist": "^0.40.0" + }, + "engines": { + "node": ">=20" + } + }, "packages/flowerbase/node_modules/file-entry-cache": { "version": "8.0.0", "dev": true, @@ -23047,6 +23183,17 @@ "concat-map": "0.0.1" } }, + "packages/flowerbase/node_modules/node-cron": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.3.tgz", + "integrity": "sha512-dOal67//nohNgYWb+nWmg5dkFdIwDm8EpeGYMekPMrngV3637lqnX0lbUcCtgibHTz6SEz7DAIjKvKDFYCnO1A==", + "dependencies": { + "uuid": "8.3.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, "packages/flowerbase/node_modules/p-locate": { "version": "5.0.0", "dev": true, @@ -23095,6 +23242,14 @@ "node": ">=14.17" } }, + "packages/flowerbase/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "packages/users-exporter": { "name": "mongo-server-migration-tool", "version": "1.0.0", diff --git a/package.json b/package.json index e7c0726..0e62238 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "private": true, "scripts": { "test": "npx jest", + "test:e2e": "npx jest --config tests/jest.e2e.config.ts", "tsc:noemit": "nx run-many -t tsc:noemit", "lint:check": "nx run-many -t lint --fix --quiet --exclude=demo", "coverage": "npm run test -- --coverage", @@ -34,7 +35,6 @@ "url": "https://github.com/flowerforce/flowerbase/issues" }, "homepage": "https://github.com/flowerforce/flowerbase#readme", - "dependencies": {}, "devDependencies": { "@babel/core": "^7.24.5", "@babel/preset-react": "^7.24.1", @@ -53,9 +53,11 @@ "@swc/helpers": "~0.5.11", "@swc/jest": "0.2.36", "@types/jest": "^29.4.0", + "@types/mongodb": "^4.0.7", "@types/node": "18.16.9", "@typescript-eslint/eslint-plugin": "^7.3.0", "@typescript-eslint/parser": "^7.3.0", + "dotenv": "^17.2.3", "eslint": "~8.57.0", "eslint-config-prettier": "^9.0.0", "eslint-plugin-import": "^2.29.1", @@ -64,10 +66,12 @@ "eslint-plugin-prettier": "^5.1.3", "eslint-plugin-react": "^7.34.1", "eslint-plugin-react-hooks": "^4.6.2", + "fastify": "^5.6.2", "jest": "^29.4.1", "jest-environment-jsdom": "^29.4.1", "jest-environment-node": "^29.4.1", "markdownlint-cli2": "^0.15.0", + "mongodb": "^6.21.0", "nx": "19.4.1", "prettier": "^3.2.5", "rimraf": "^5.0.7", @@ -83,5 +87,17 @@ "@nx/nx-linux-x64-gnu": "19.4.1", "@nx/nx-win32-x64-msvc": "19.4.1", "@rollup/rollup-linux-x64-gnu": "4.18.0" + }, + "dependencies": { + "@fastify/cors": "^11.2.0", + "@fastify/jwt": "^10.0.0", + "@fastify/mongodb": "^9.0.2", + "@fastify/swagger": "^9.6.1", + "@fastify/swagger-ui": "^5.2.4", + "aws-sdk": "^2.1692.0", + "bson": "^6.10.4", + "fastify-raw-body": "^5.0.0", + "node-cron": "^4.2.1", + "undici": "^7.18.2" } } \ No newline at end of file diff --git a/packages/demo/packages/backend/data_sources/mongodb-atlas/flowerbase-demo/users/rules.json b/packages/demo/packages/backend/data_sources/mongodb-atlas/flowerbase-demo/users/rules.json index fa9b974..76df483 100644 --- a/packages/demo/packages/backend/data_sources/mongodb-atlas/flowerbase-demo/users/rules.json +++ b/packages/demo/packages/backend/data_sources/mongodb-atlas/flowerbase-demo/users/rules.json @@ -9,7 +9,12 @@ "delete": false, "search": false, "read": false, - "write": false + "write": false, + "fields": { + "password": { + "read": false + } + } } ] -} +} \ No newline at end of file diff --git a/packages/flowerbase/README.md b/packages/flowerbase/README.md index bfc5702..411cb9b 100644 --- a/packages/flowerbase/README.md +++ b/packages/flowerbase/README.md @@ -96,6 +96,13 @@ Ensure the following environment variables are set in your .env file or deployme | `APP_SECRET` | Secret used to sign and verify JWT tokens (choose a strong secret). | `supersecretkey123!` | | `HOST` | The host address the server binds to (usually `0.0.0.0` for public access). | `0.0.0.0` | | `HTTPS_SCHEMA` | The schema for your server requests (usually `https` or `http`). | `http` | +| `RESET_PASSWORD_TTL_SECONDS` | Time-to-live for password reset tokens (in seconds). | `3600` | +| `AUTH_RATE_LIMIT_WINDOW_MS` | Rate limit window for auth endpoints (in ms). | `900000` | +| `AUTH_LOGIN_MAX_ATTEMPTS` | Max login attempts per window. | `10` | +| `AUTH_RESET_MAX_ATTEMPTS` | Max reset requests per window. | `5` | +| `REFRESH_TOKEN_TTL_DAYS` | Refresh token time-to-live (in days). | `60` | +| `SWAGGER_UI_USER` | Basic Auth username for Swagger UI (optional). | `admin` | +| `SWAGGER_UI_PASSWORD` | Basic Auth password for Swagger UI (optional). | `change-me` | Example: @@ -106,6 +113,13 @@ DB_CONNECTION_STRING=mongodb+srv://username:password@cluster.mongodb.net/dbname APP_SECRET=your-jwt-secret HOST=0.0.0.0 HTTPS_SCHEMA=http +RESET_PASSWORD_TTL_SECONDS=3600 +AUTH_RATE_LIMIT_WINDOW_MS=900000 +AUTH_LOGIN_MAX_ATTEMPTS=10 +AUTH_RESET_MAX_ATTEMPTS=5 +REFRESH_TOKEN_TTL_DAYS=60 +SWAGGER_UI_USER=admin +SWAGGER_UI_PASSWORD=change-me ``` 🛡️ Note: Never commit .env files to source control. Use a .gitignore file to exclude it. @@ -406,6 +420,13 @@ Ensure the following environment variables are set in your .env file or deployme | `APP_SECRET` | Secret used to sign and verify JWT tokens (choose a strong secret). | `supersecretkey123!` | | `HOST` | The host address the server binds to (usually `0.0.0.0` for public access). | `0.0.0.0` | | `HTTPS_SCHEMA` | The schema for your server requests (usually `https` or `http`). | `http` | +| `RESET_PASSWORD_TTL_SECONDS` | Time-to-live for password reset tokens (in seconds). | `3600` | +| `AUTH_RATE_LIMIT_WINDOW_MS` | Rate limit window for auth endpoints (in ms). | `900000` | +| `AUTH_LOGIN_MAX_ATTEMPTS` | Max login attempts per window. | `10` | +| `AUTH_RESET_MAX_ATTEMPTS` | Max reset requests per window. | `5` | +| `REFRESH_TOKEN_TTL_DAYS` | Refresh token time-to-live (in days). | `60` | +| `SWAGGER_UI_USER` | Basic Auth username for Swagger UI (optional). | `admin` | +| `SWAGGER_UI_PASSWORD` | Basic Auth password for Swagger UI (optional). | `change-me` | Example: @@ -416,6 +437,13 @@ DB_CONNECTION_STRING=mongodb+srv://username:password@cluster.mongodb.net/dbname APP_SECRET=your-jwt-secret HOST=0.0.0.0 HTTPS_SCHEMA=http +RESET_PASSWORD_TTL_SECONDS=3600 +AUTH_RATE_LIMIT_WINDOW_MS=900000 +AUTH_LOGIN_MAX_ATTEMPTS=10 +AUTH_RESET_MAX_ATTEMPTS=5 +REFRESH_TOKEN_TTL_DAYS=60 +SWAGGER_UI_USER=admin +SWAGGER_UI_PASSWORD=change-me ``` 🛡️ Note: Never commit .env files to source control. Use a .gitignore file to exclude it. @@ -472,6 +500,3 @@ export default app; >🔗 The baseUrl should point to the backend URL you deployed earlier using Flowerbase. This tells the frontend SDK where to send authentication and data requests. - - - diff --git a/packages/flowerbase/jest.config.ts b/packages/flowerbase/jest.config.ts index 23bc4a4..8314e09 100644 --- a/packages/flowerbase/jest.config.ts +++ b/packages/flowerbase/jest.config.ts @@ -4,21 +4,11 @@ module.exports = { '^.+\\.[tj]s$': [ 'ts-jest', { - tsconfig: './tsconfig.json' + tsconfig: '/tsconfig.json' } ] }, - collectCoverage: false, - collectCoverageFrom: ['./**/*.ts'], - coverageDirectory: 'coverage', - coverageThreshold: { - global: { - branches: 50, - functions: 90, - lines: 90, - statements: 90 - } - }, + setupFilesAfterEnv: ['/jest.setup.ts'], testEnvironment: 'node', testMatch: ['./**/*.test.ts'] } diff --git a/packages/flowerbase/jest.setup.ts b/packages/flowerbase/jest.setup.ts new file mode 100644 index 0000000..5e88c9b --- /dev/null +++ b/packages/flowerbase/jest.setup.ts @@ -0,0 +1,28 @@ +import { Blob as NodeBlob } from 'buffer' +import path from 'node:path' + +if (!process.env.FLOWERBASE_APP_PATH) { + process.env.FLOWERBASE_APP_PATH = path.resolve(__dirname, '../../tests/e2e/app') +} + +const BaseBlob = typeof globalThis.Blob !== 'undefined' ? globalThis.Blob : NodeBlob + +type PolyfillFilePropertyBag = FilePropertyBag & { + name?: string +} + +class FilePolyfill extends BaseBlob { + lastModified: number + name: string + + constructor(bits?: Iterable, options?: FilePropertyBag) { + super(bits, options as FilePropertyBag) + const fileOptions = options as PolyfillFilePropertyBag + this.name = fileOptions?.name ?? '' + this.lastModified = fileOptions?.lastModified ?? Date.now() + } +} + +if (typeof globalThis.File === 'undefined') { + globalThis.File = FilePolyfill as unknown as typeof File +} diff --git a/packages/flowerbase/package.json b/packages/flowerbase/package.json index bb4685d..30364f4 100644 --- a/packages/flowerbase/package.json +++ b/packages/flowerbase/package.json @@ -25,7 +25,6 @@ "@fastify/mongodb": "^9.0.1", "@fastify/swagger": "^9.5.1", "@fastify/swagger-ui": "^5.2.3", - "@sendgrid/mail": "^8.1.4", "aws-sdk": "^2.1692.0", "bson": "^6.8.0", "dotenv": "^16.4.7", @@ -69,4 +68,4 @@ "exports": { ".": "./dist/index.js" } -} +} \ No newline at end of file diff --git a/packages/flowerbase/src/auth/controller.ts b/packages/flowerbase/src/auth/controller.ts index d1181e6..49d1088 100644 --- a/packages/flowerbase/src/auth/controller.ts +++ b/packages/flowerbase/src/auth/controller.ts @@ -1,6 +1,7 @@ import { ObjectId } from 'bson' import { FastifyInstance } from 'fastify' -import { AUTH_CONFIG, DB_NAME } from '../constants' +import { AUTH_CONFIG, DB_NAME, DEFAULT_CONFIG } from '../constants' +import { hashToken } from '../utils/crypto' import { SessionCreatedDto } from './dtos' import { AUTH_ENDPOINTS, AUTH_ERRORS } from './utils' @@ -12,9 +13,19 @@ const HANDLER_TYPE = 'preHandler' * @param {FastifyInstance} app - The Fastify instance. */ export async function authController(app: FastifyInstance) { - const { authCollection, userCollection } = AUTH_CONFIG + const { authCollection, userCollection, refreshTokensCollection } = AUTH_CONFIG const db = app.mongo.client.db(DB_NAME) + const refreshTokenTtlMs = DEFAULT_CONFIG.REFRESH_TOKEN_TTL_DAYS * 24 * 60 * 60 * 1000 + + try { + await db.collection(refreshTokensCollection).createIndex( + { expiresAt: 1 }, + { expireAfterSeconds: 0 } + ) + } catch (error) { + console.error('Failed to ensure refresh token TTL index', error) + } app.addHook(HANDLER_TYPE, app.jwtAuthentication) @@ -59,6 +70,21 @@ export async function authController(app: FastifyInstance) { throw new Error(AUTH_ERRORS.INVALID_TOKEN) } + const authHeader = req.headers.authorization + if (!authHeader?.startsWith('Bearer ')) { + throw new Error(AUTH_ERRORS.INVALID_TOKEN) + } + const refreshToken = authHeader.slice('Bearer '.length).trim() + const refreshTokenHash = hashToken(refreshToken) + const storedToken = await db.collection(refreshTokensCollection).findOne({ + tokenHash: refreshTokenHash, + revokedAt: null, + expiresAt: { $gt: new Date() } + }) + if (!storedToken) { + throw new Error(AUTH_ERRORS.INVALID_TOKEN) + } + const auth_user = await db ?.collection(authCollection) .findOne({ _id: new this.mongo.ObjectId(req.user.sub) }) @@ -75,7 +101,10 @@ export async function authController(app: FastifyInstance) { return { access_token: this.createAccessToken({ ...auth_user, - user_data: user + user_data: { + ...user, + id: req.user.sub + } }) } } @@ -85,8 +114,45 @@ export async function authController(app: FastifyInstance) { */ app.delete( AUTH_ENDPOINTS.SESSION, - async function () { - return { status: "ok" } + async function (req, res) { + const authHeader = req.headers.authorization + if (!authHeader?.startsWith('Bearer ')) { + res.status(204) + return + } + const refreshToken = authHeader.slice('Bearer '.length).trim() + const refreshTokenHash = hashToken(refreshToken) + const now = new Date() + const expiresAt = new Date(Date.now() + refreshTokenTtlMs) + const updateResult = await db.collection(refreshTokensCollection).findOneAndUpdate( + { tokenHash: refreshTokenHash }, + { + $set: { + revokedAt: now, + expiresAt + } + }, + { returnDocument: 'after' } + ) + + const fromToken = req.user?.sub + let userId = updateResult?.value?.userId + if (!userId && fromToken) { + try { + userId = new ObjectId(fromToken) + } catch { + userId = fromToken + } + } + + if (userId && authCollection) { + await db.collection(authCollection).updateOne( + { _id: userId }, + { $set: { lastLogoutAt: now } } + ) + } + + return { status: 'ok' } } ) } diff --git a/packages/flowerbase/src/auth/plugins/jwt.test.ts b/packages/flowerbase/src/auth/plugins/jwt.test.ts new file mode 100644 index 0000000..4fa5aa9 --- /dev/null +++ b/packages/flowerbase/src/auth/plugins/jwt.test.ts @@ -0,0 +1,93 @@ +jest.mock('node:diagnostics_channel', () => { + const createChannel = () => ({ + publish: jest.fn(), + subscribe: jest.fn() + }) + return { + channel: jest.fn(createChannel), + tracingChannel: () => ({ + asyncStart: createChannel(), + asyncEnd: createChannel(), + error: createChannel() + }) + } +}) + +import fastify, { FastifyInstance, FastifyReply } from 'fastify' +import jwtAuthPlugin from './jwt' +import { ObjectId } from 'bson' + +const SECRET = 'test-secret' + +const createAccessRequest = (payload: { typ: 'access'; sub: string; iat: number }) => { + const request: Record = {} + request.jwtVerify = jest.fn(async () => { + request.user = payload + }) + return request +} + +describe('jwtAuthentication', () => { + let app: FastifyInstance + + beforeEach(async () => { + app = fastify() + await app.register(jwtAuthPlugin, { secret: SECRET }) + await app.ready() + }) + + afterEach(async () => { + await app.close() + }) + + const setupMongo = (userPayload: { _id: ObjectId; lastLogoutAt?: Date }) => { + const findOneMock = jest.fn().mockResolvedValue(userPayload) + const collectionMock = { findOne: findOneMock } + const dbMock = { collection: jest.fn().mockReturnValue(collectionMock) } + const mongoMock = { client: { db: jest.fn().mockReturnValue(dbMock) } } + ;(app as any).mongo = mongoMock + } + + const createReply = () => { + return { + code: jest.fn().mockReturnThis(), + send: jest.fn() + } as unknown as FastifyReply + } + + it('allows access tokens issued after the last logout', async () => { + const userId = new ObjectId() + const nowSeconds = Math.floor(Date.now() / 1000) + setupMongo({ _id: userId, lastLogoutAt: new Date((nowSeconds - 30) * 1000) }) + + const request = createAccessRequest({ + typ: 'access', + sub: userId.toHexString(), + iat: nowSeconds + }) + const reply = createReply() + + await app.jwtAuthentication(request as any, reply) + + expect(reply.code).not.toHaveBeenCalled() + expect(reply.send).not.toHaveBeenCalled() + }) + + it('rejects access tokens issued before the last logout', async () => { + const userId = new ObjectId() + const nowSeconds = Math.floor(Date.now() / 1000) + setupMongo({ _id: userId, lastLogoutAt: new Date((nowSeconds + 30) * 1000) }) + + const request = createAccessRequest({ + typ: 'access', + sub: userId.toHexString(), + iat: nowSeconds + }) + const reply = createReply() + + await app.jwtAuthentication(request as any, reply) + + expect(reply.code).toHaveBeenCalledWith(401) + expect(reply.send).toHaveBeenCalledWith({ message: 'Unauthorized' }) + }) +}) diff --git a/packages/flowerbase/src/auth/plugins/jwt.ts b/packages/flowerbase/src/auth/plugins/jwt.ts index e518358..b7a9745 100644 --- a/packages/flowerbase/src/auth/plugins/jwt.ts +++ b/packages/flowerbase/src/auth/plugins/jwt.ts @@ -1,11 +1,18 @@ import fastifyJwt from '@fastify/jwt' import fp from 'fastify-plugin' import { Document, ObjectId, WithId } from 'mongodb' +import { AUTH_CONFIG, DB_NAME, DEFAULT_CONFIG } from '../../constants' type Options = { secret: string } +type JwtAccessWithTimestamp = { + typ: 'access' + sub: string + iat?: number +} + /** * This module is a Fastify plugin that sets up JWT-based authentication and token creation. * It registers JWT authentication, and provides methods to create access and refresh tokens. @@ -25,20 +32,72 @@ export default fp(async function (fastify, opts: Options) { try { await request.jwtVerify() } catch (err) { - // TODO: handle error - reply.send(err) + fastify.log.warn({ err }, 'JWT authentication failed') + reply.code(401).send({ message: 'Unauthorized' }) + return + } + + if (request.user?.typ !== 'access') { + return + } + + const db = fastify.mongo?.client?.db(DB_NAME) + if (!db) { + fastify.log.warn('Mongo client unavailable while checking logout state') + return + } + + if (!request.user.sub) { + reply.code(401).send({ message: 'Unauthorized' }) + return + } + + let authUser + try { + authUser = await db + .collection(AUTH_CONFIG.authCollection) + .findOne({ _id: new ObjectId(request.user.sub) }) + } catch (err) { + fastify.log.warn({ err }, 'Failed to lookup user during JWT authentication') + reply.code(401).send({ message: 'Unauthorized' }) + return + } + + if (!authUser) { + reply.code(401).send({ message: 'Unauthorized' }) + return + } + + const lastLogoutAt = authUser.lastLogoutAt ? new Date(authUser.lastLogoutAt) : null + const accessUser = request.user as JwtAccessWithTimestamp + const rawIssuedAt = accessUser.iat + const issuedAt = + typeof rawIssuedAt === 'number' + ? rawIssuedAt + : typeof rawIssuedAt === 'string' + ? Number(rawIssuedAt) + : undefined + if ( + lastLogoutAt && + !Number.isNaN(lastLogoutAt.getTime()) && + typeof issuedAt === 'number' && + !Number.isNaN(issuedAt) && + lastLogoutAt.getTime() >= issuedAt * 1000 + ) { + reply.code(401).send({ message: 'Unauthorized' }) + return } }) fastify.decorate('createAccessToken', function (user: WithId) { const id = user._id.toString() - const userDataId = user.user_data._id.toString() + // const userDataId = user.user_data._id.toString() const user_data = { - _id: userDataId, - id: userDataId, + ...user.user_data, + _id: id, + id: id, email: user.email, - ...user.user_data } return this.jwt.sign( @@ -52,7 +111,7 @@ export default fp(async function (fastify, opts: Options) { { iss: BAAS_ID, jti: BAAS_ID, - sub: user._id.toJSON(), + sub: id, expiresIn: '300m' } ) @@ -66,7 +125,7 @@ export default fp(async function (fastify, opts: Options) { }, { sub: user._id.toJSON(), - expiresIn: '60d' + expiresIn: `${DEFAULT_CONFIG.REFRESH_TOKEN_TTL_DAYS}d` } ) }) diff --git a/packages/flowerbase/src/auth/providers/custom-function/controller.ts b/packages/flowerbase/src/auth/providers/custom-function/controller.ts index 4466011..68f9872 100644 --- a/packages/flowerbase/src/auth/providers/custom-function/controller.ts +++ b/packages/flowerbase/src/auth/providers/custom-function/controller.ts @@ -1,9 +1,10 @@ import { FastifyInstance } from 'fastify' -import { AUTH_CONFIG } from '../../../constants' +import { AUTH_CONFIG, DB_NAME, DEFAULT_CONFIG } from '../../../constants' import handleUserRegistration from '../../../shared/handleUserRegistration' import { PROVIDER } from '../../../shared/models/handleUserRegistration.model' import { StateManager } from '../../../state' import { GenerateContext } from '../../../utils/context' +import { hashToken } from '../../../utils/crypto' import { AUTH_ENDPOINTS, generatePassword, @@ -22,6 +23,9 @@ export async function customFunctionController(app: FastifyInstance) { const functionsList = StateManager.select('functions') const services = StateManager.select('services') + const db = app.mongo.client.db(DB_NAME) + const { refreshTokensCollection } = AUTH_CONFIG + const refreshTokenTtlMs = DEFAULT_CONFIG.REFRESH_TOKEN_TTL_DAYS * 24 * 60 * 60 * 1000 /** * Endpoint for user login. @@ -53,6 +57,7 @@ export async function customFunctionController(app: FastifyInstance) { id } = req + type CustomFunctionAuthResult = { id?: string } const res = await GenerateContext({ args: [ req.body @@ -72,23 +77,35 @@ export async function customFunctionController(app: FastifyInstance) { ip, id } - }) + }) as CustomFunctionAuthResult if (res.id) { const user = await handleUserRegistration(app, { run_as_system: true, skipUserCheck: true, provider: PROVIDER.CUSTOM_FUNCTION })({ email: res.id, password: generatePassword() }) + if (!user?.insertedId) { + throw new Error('Failed to register custom user') + } const currentUserData = { _id: user.insertedId, user_data: { - _id: user.insertedId, + _id: user.insertedId } } + const refreshToken = this.createRefreshToken(currentUserData) + const refreshTokenHash = hashToken(refreshToken) + await db.collection(refreshTokensCollection).insertOne({ + userId: user.insertedId, + tokenHash: refreshTokenHash, + createdAt: new Date(), + expiresAt: new Date(Date.now() + refreshTokenTtlMs), + revokedAt: null + }) return { access_token: this.createAccessToken(currentUserData), - refresh_token: this.createRefreshToken(currentUserData), + refresh_token: refreshToken, device_id: '', - user_id: user.insertedId.toString(), + user_id: user.insertedId.toString() } } diff --git a/packages/flowerbase/src/auth/providers/local-userpass/controller.ts b/packages/flowerbase/src/auth/providers/local-userpass/controller.ts index 762b921..c92c3a5 100644 --- a/packages/flowerbase/src/auth/providers/local-userpass/controller.ts +++ b/packages/flowerbase/src/auth/providers/local-userpass/controller.ts @@ -1,42 +1,121 @@ -import sendGrid from '@sendgrid/mail' import { FastifyInstance } from 'fastify' -import { AUTH_CONFIG, DB_NAME } from '../../../constants' -import { services } from '../../../services' +import { ObjectId } from 'mongodb' +import { AUTH_CONFIG, DB_NAME, DEFAULT_CONFIG } from '../../../constants' import handleUserRegistration from '../../../shared/handleUserRegistration' import { PROVIDER } from '../../../shared/models/handleUserRegistration.model' import { StateManager } from '../../../state' import { GenerateContext } from '../../../utils/context' -import { comparePassword, generateToken, hashPassword } from '../../../utils/crypto' +import { comparePassword, generateToken, hashPassword, hashToken } from '../../../utils/crypto' import { AUTH_ENDPOINTS, AUTH_ERRORS, CONFIRM_RESET_SCHEMA, - getMailConfig, + CONFIRM_USER_SCHEMA, LOGIN_SCHEMA, REGISTRATION_SCHEMA, - RESET_SCHEMA + RESET_CALL_SCHEMA, + RESET_SEND_SCHEMA } from '../../utils' import { ConfirmResetPasswordDto, + ConfirmUserDto, LoginDto, RegistrationDto, - ResetPasswordDto + ResetPasswordCallDto, + ResetPasswordSendDto } from './dtos' + +const rateLimitStore = new Map() + +const isRateLimited = (key: string, maxAttempts: number, windowMs: number) => { + const now = Date.now() + const existing = rateLimitStore.get(key) ?? [] + const recent = existing.filter((timestamp) => now - timestamp < windowMs) + recent.push(now) + rateLimitStore.set(key, recent) + return recent.length > maxAttempts +} /** * Controller for handling local user registration and login. * @testable * @param {FastifyInstance} app - The Fastify instance. */ export async function localUserPassController(app: FastifyInstance) { - const functionsList = StateManager.select('functions') - - const { - authCollection, - userCollection, - user_id_field, - on_user_creation_function_name - } = AUTH_CONFIG + const { authCollection, userCollection, user_id_field } = AUTH_CONFIG + const { resetPasswordCollection } = AUTH_CONFIG + const { refreshTokensCollection } = AUTH_CONFIG const db = app.mongo.client.db(DB_NAME) + const resetPasswordTtlSeconds = DEFAULT_CONFIG.RESET_PASSWORD_TTL_SECONDS + const rateLimitWindowMs = DEFAULT_CONFIG.AUTH_RATE_LIMIT_WINDOW_MS + const loginMaxAttempts = DEFAULT_CONFIG.AUTH_LOGIN_MAX_ATTEMPTS + const resetMaxAttempts = DEFAULT_CONFIG.AUTH_RESET_MAX_ATTEMPTS + const refreshTokenTtlMs = DEFAULT_CONFIG.REFRESH_TOKEN_TTL_DAYS * 24 * 60 * 60 * 1000 + + try { + await db.collection(resetPasswordCollection).createIndex( + { createdAt: 1 }, + { expireAfterSeconds: resetPasswordTtlSeconds } + ) + } catch (error) { + console.error('Failed to ensure reset password TTL index', error) + } + + try { + await db.collection(refreshTokensCollection).createIndex( + { expiresAt: 1 }, + { expireAfterSeconds: 0 } + ) + } catch (error) { + console.error('Failed to ensure refresh token TTL index', error) + } + const handleResetPasswordRequest = async ( + email: string, + password?: string, + extraArguments?: unknown[] + ) => { + const { resetPasswordConfig } = AUTH_CONFIG + const authUser = await db.collection(authCollection!).findOne({ + email + }) + + if (!authUser) { + return + } + + const token = generateToken() + const tokenId = generateToken() + + await db + ?.collection(resetPasswordCollection) + .updateOne( + { email }, + { $set: { token, tokenId, email, createdAt: new Date() } }, + { upsert: true } + ) + + if (!resetPasswordConfig.runResetFunction && !resetPasswordConfig.resetFunctionName) { + throw new Error(AUTH_ERRORS.MISSING_RESET_FUNCTION) + } + + if (resetPasswordConfig.runResetFunction && resetPasswordConfig.resetFunctionName) { + const functionsList = StateManager.select('functions') + const services = StateManager.select('services') + const currentFunction = functionsList[resetPasswordConfig.resetFunctionName] + const baseArgs = { token, tokenId, email, password, username: email } + const args = Array.isArray(extraArguments) ? [baseArgs, ...extraArguments] : [baseArgs] + await GenerateContext({ + args, + app, + rules: {}, + user: {}, + currentFunction, + functionsList, + services + }) + return + } + + } /** * Endpoint for user registration. @@ -55,8 +134,57 @@ export async function localUserPassController(app: FastifyInstance) { const result = await handleUserRegistration(app, { run_as_system: true, provider: PROVIDER.LOCAL_USERPASS })({ email: req.body.email.toLowerCase(), password: req.body.password }) + if (!result?.insertedId) { + res?.status(500) + throw new Error('Failed to register user') + } + res?.status(201) - return { userId: result?.insertedId.toString() } + return { userId: result.insertedId.toString() } + } + ) + + /** + * Endpoint for confirming a user registration. + * + * @route {POST} /confirm + * @param {ConfirmUserDto} req - The request object with confirmation data. + * @returns {Promise} A promise resolving with confirmation status. + */ + app.post( + AUTH_ENDPOINTS.CONFIRM, + { + schema: CONFIRM_USER_SCHEMA + }, + async (req, res) => { + const key = `confirm:${req.ip}` + if (isRateLimited(key, resetMaxAttempts, rateLimitWindowMs)) { + res.status(429).send({ message: 'Too many requests' }) + return + } + + const existing = await db.collection(authCollection!).findOne({ + confirmationToken: req.body.token, + confirmationTokenId: req.body.tokenId + }) as { _id: ObjectId; status?: string } | null + + if (!existing) { + res.status(500) + throw new Error(AUTH_ERRORS.INVALID_TOKEN) + } + + if (existing.status !== 'confirmed') { + await db.collection(authCollection!).updateOne( + { _id: existing._id }, + { + $set: { status: 'confirmed' }, + $unset: { confirmationToken: '', confirmationTokenId: '' } + } + ) + } + + res.status(200) + return { status: 'confirmed' } } ) @@ -72,7 +200,12 @@ export async function localUserPassController(app: FastifyInstance) { { schema: LOGIN_SCHEMA }, - async function (req) { + async function (req, res) { + const key = `login:${req.ip}` + if (isRateLimited(key, loginMaxAttempts, rateLimitWindowMs)) { + res.status(429).send({ message: 'Too many requests' }) + return + } const authUser = await db.collection(authCollection!).findOne({ email: req.body.username }) @@ -105,45 +238,23 @@ export async function localUserPassController(app: FastifyInstance) { id: authUser._id.toString() } - if (authUser && authUser.status === 'pending') { - try { - await db?.collection(authCollection!).updateOne( - { _id: authUser._id }, - { - $set: { - status: 'confirmed' - } - } - ) - } catch (error) { - console.log('>>> 🚀 ~ localUserPassController ~ error:', error) - } + if (authUser && authUser.status !== 'confirmed') { + throw new Error(AUTH_ERRORS.USER_NOT_CONFIRMED) } - if ( - authUser && - authUser.status === 'pending' && - on_user_creation_function_name && - functionsList[on_user_creation_function_name] - ) { - try { - await GenerateContext({ - args: [userWithCustomData], - app, - rules: {}, - user: userWithCustomData, - currentFunction: functionsList[on_user_creation_function_name], - functionsList, - services - }) - } catch (error) { - console.log('localUserPassController - /login - GenerateContext - CATCH:', error) - } - } + const refreshToken = this.createRefreshToken(userWithCustomData) + const refreshTokenHash = hashToken(refreshToken) + await db.collection(refreshTokensCollection).insertOne({ + userId: authUser._id, + tokenHash: refreshTokenHash, + createdAt: new Date(), + expiresAt: new Date(Date.now() + refreshTokenTtlMs), + revokedAt: null + }) return { access_token: this.createAccessToken(userWithCustomData), - refresh_token: this.createRefreshToken(userWithCustomData), + refresh_token: refreshToken, device_id: '', user_id: authUser._id.toString() } @@ -153,65 +264,49 @@ export async function localUserPassController(app: FastifyInstance) { /** * Endpoint for reset password. * - * @route {POST} /reset/call + * @route {POST} /reset/send * @param {ResetPasswordDto} req - The request object with th reset request. * @returns {Promise} */ - app.post( + app.post( AUTH_ENDPOINTS.RESET, { - schema: RESET_SCHEMA + schema: RESET_SEND_SCHEMA }, - async function (req) { - const { resetPasswordCollection, resetPasswordConfig } = AUTH_CONFIG - const email = req.body.email - const authUser = await db.collection(authCollection!).findOne({ - email - }) - - if (!authUser) { - throw new Error(AUTH_ERRORS.INVALID_CREDENTIALS) + async function (req, res) { + const key = `reset:${req.ip}` + if (isRateLimited(key, resetMaxAttempts, rateLimitWindowMs)) { + res.status(429) + return { message: 'Too many requests' } } - - const token = generateToken() - const tokenId = generateToken() - - await db - ?.collection(resetPasswordCollection) - .updateOne( - { email }, - { $set: { token, tokenId, email, createdAt: new Date() } }, - { upsert: true } - ) - - if (resetPasswordConfig.runResetFunction && resetPasswordConfig.resetFunctionName) { - const functionsList = StateManager.select('functions') - const services = StateManager.select('services') - const currentFunction = functionsList[resetPasswordConfig.resetFunctionName] - await GenerateContext({ - args: [{ token, tokenId, email }], - app, - rules: {}, - user: {}, - currentFunction, - functionsList, - services - }) - return + await handleResetPasswordRequest(req.body.email) + res.status(202) + return { + status: 'ok' } + } + ) - const { from, subject, mailToken, body } = getMailConfig( - resetPasswordConfig, - token, - tokenId + app.post( + AUTH_ENDPOINTS.RESET_CALL, + { + schema: RESET_CALL_SCHEMA + }, + async function (req, res) { + const key = `reset:${req.ip}` + if (isRateLimited(key, resetMaxAttempts, rateLimitWindowMs)) { + res.status(429) + return { message: 'Too many requests' } + } + await handleResetPasswordRequest( + req.body.email, + req.body.password, + req.body.arguments ) - sendGrid.setApiKey(mailToken) - await sendGrid.send({ - to: email, - from, - subject, - html: body - }) + res.status(202) + return { + status: 'ok' + } } ) @@ -227,8 +322,12 @@ export async function localUserPassController(app: FastifyInstance) { { schema: CONFIRM_RESET_SCHEMA }, - async function (req) { - const { resetPasswordCollection } = AUTH_CONFIG + async function (req, res) { + const key = `reset-confirm:${req.ip}` + if (isRateLimited(key, resetMaxAttempts, rateLimitWindowMs)) { + res.status(429) + return { message: 'Too many requests' } + } const { token, tokenId, password } = req.body const resetRequest = await db @@ -238,6 +337,16 @@ export async function localUserPassController(app: FastifyInstance) { if (!resetRequest) { throw new Error(AUTH_ERRORS.INVALID_RESET_PARAMS) } + + const createdAt = resetRequest.createdAt ? new Date(resetRequest.createdAt) : null + const isExpired = !createdAt || + Number.isNaN(createdAt.getTime()) || + Date.now() - createdAt.getTime() > resetPasswordTtlSeconds * 1000 + + if (isExpired) { + await db?.collection(resetPasswordCollection).deleteOne({ _id: resetRequest._id }) + throw new Error(AUTH_ERRORS.INVALID_RESET_PARAMS) + } const hashedPassword = await hashPassword(password) await db.collection(authCollection!).updateOne( { email: resetRequest.email }, diff --git a/packages/flowerbase/src/auth/providers/local-userpass/dtos.ts b/packages/flowerbase/src/auth/providers/local-userpass/dtos.ts index f073e53..7a24645 100644 --- a/packages/flowerbase/src/auth/providers/local-userpass/dtos.ts +++ b/packages/flowerbase/src/auth/providers/local-userpass/dtos.ts @@ -15,19 +15,30 @@ export type LoginSuccessDto = { user_id: string } +export type ErrorResponseDto = { + message: string +} + export interface RegistrationDto { Body: RegisterUserDto } export interface LoginDto { Body: LoginUserDto - Reply: LoginSuccessDto + Reply: LoginSuccessDto | ErrorResponseDto +} + +export interface ResetPasswordSendDto { + Body: { + email: string + } } -export interface ResetPasswordDto { +export interface ResetPasswordCallDto { Body: { email: string password: string + arguments?: unknown[] } } @@ -38,3 +49,10 @@ export interface ConfirmResetPasswordDto { password: string } } + +export interface ConfirmUserDto { + Body: { + token: string + tokenId: string + } +} diff --git a/packages/flowerbase/src/auth/utils.ts b/packages/flowerbase/src/auth/utils.ts index 12100b9..d95095b 100644 --- a/packages/flowerbase/src/auth/utils.ts +++ b/packages/flowerbase/src/auth/utils.ts @@ -8,19 +8,45 @@ export const LOGIN_SCHEMA = { body: { type: 'object', properties: { - username: { type: 'string' }, - password: { type: 'string' } + username: { + type: 'string', + pattern: '^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$', + minLength: 3, + maxLength: 254 + }, + password: { type: 'string', minLength: 8, maxLength: 128 } }, required: ['username', 'password'] } } -export const RESET_SCHEMA = { +export const RESET_SEND_SCHEMA = { body: { type: 'object', properties: { - email: { type: 'string' }, - password: { type: 'string' } + email: { + type: 'string', + pattern: '^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$', + minLength: 3, + maxLength: 254 + } + }, + required: ['email'] + } +} + +export const RESET_CALL_SCHEMA = { + body: { + type: 'object', + properties: { + email: { + type: 'string', + pattern: '^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$', + minLength: 3, + maxLength: 254 + }, + password: { type: 'string', minLength: 8, maxLength: 128 }, + arguments: { type: 'array' } }, required: ['email', 'password'] } @@ -30,7 +56,7 @@ export const CONFIRM_RESET_SCHEMA = { body: { type: 'object', properties: { - password: { type: 'string' }, + password: { type: 'string', minLength: 8, maxLength: 128 }, token: { type: 'string' }, tokenId: { type: 'string' } }, @@ -38,12 +64,30 @@ export const CONFIRM_RESET_SCHEMA = { } } +export const CONFIRM_USER_SCHEMA = { + body: { + type: 'object', + properties: { + token: { type: 'string' }, + tokenId: { type: 'string' } + }, + required: ['token', 'tokenId'] + } +} + +export const RESET_SCHEMA = RESET_SEND_SCHEMA + export const REGISTRATION_SCHEMA = { body: { type: 'object', properties: { - email: { type: 'string' }, - password: { type: 'string' } + email: { + type: 'string', + pattern: '^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$', + minLength: 3, + maxLength: 254 + }, + password: { type: 'string', minLength: 8, maxLength: 128 } }, required: ['email', 'password'] } @@ -52,9 +96,11 @@ export const REGISTRATION_SCHEMA = { export enum AUTH_ENDPOINTS { LOGIN = '/login', REGISTRATION = '/register', + CONFIRM = '/confirm', PROFILE = '/profile', SESSION = '/session', - RESET = '/reset/call', + RESET = '/reset/send', + RESET_CALL = '/reset/call', CONFIRM_RESET = "/reset", FIRST_USER = '/setup/first-user' } @@ -62,7 +108,9 @@ export enum AUTH_ENDPOINTS { export enum AUTH_ERRORS { INVALID_CREDENTIALS = 'Invalid credentials', INVALID_TOKEN = 'Invalid refresh token provided', - INVALID_RESET_PARAMS = 'Invalid token or tokenId provided' + INVALID_RESET_PARAMS = 'Invalid token or tokenId provided', + MISSING_RESET_FUNCTION = 'Missing reset function', + USER_NOT_CONFIRMED = 'User not confirmed' } export interface AuthConfig { @@ -95,15 +143,11 @@ interface CustomFunction { export interface Config { autoConfirm: boolean + confirmationFunctionName?: string resetFunctionName: string resetPasswordUrl: string runConfirmationFunction: boolean runResetFunction: boolean - mailConfig: { - from: string - subject: string - mailToken: string - } } export interface CustomUserDataConfig { @@ -115,13 +159,16 @@ export interface CustomUserDataConfig { on_user_creation_function_name: string } +const resolveAppPath = () => + process.env.FLOWERBASE_APP_PATH ?? require.main?.path ?? process.cwd() + /** * > Loads the auth config json file * @testable */ export const loadAuthConfig = (): AuthConfig => { - const authPath = path.join(require.main!.path, 'auth/providers.json') + const authPath = path.join(resolveAppPath(), 'auth/providers.json') return JSON.parse(fs.readFileSync(authPath, 'utf-8')) } @@ -130,79 +177,11 @@ export const loadAuthConfig = (): AuthConfig => { * @testable */ export const loadCustomUserData = (): CustomUserDataConfig => { - const userDataPath = path.join(require.main!.path, 'auth/custom_user_data.json') + const userDataPath = path.join(resolveAppPath(), 'auth/custom_user_data.json') return JSON.parse(fs.readFileSync(userDataPath, 'utf-8')) } -export const getMailConfig = ( - resetPasswordConfig: Config, - token: string, - tokenId: string -) => { - const { mailConfig, resetPasswordUrl } = resetPasswordConfig - const ENV_PREFIX = 'ENV' - const { from, subject, mailToken } = mailConfig - - const [fromPrefix, fromPath] = from.split('.') - - if (!fromPath) { - throw new Error(`Invalid fromPath: ${fromPath}`) - } - - const currentSender = (fromPrefix === ENV_PREFIX ? process.env[fromPath] : from) ?? '' - const [subjectPrefix, subjectPath] = subject.split('.') - - if (!subjectPath) { - throw new Error(`Invalid subjectPath: ${subjectPath}`) - } - - const currentSubject = - (subjectPrefix === ENV_PREFIX ? process.env[subjectPath] : subject) ?? '' - const [mailTokenPrefix, mailTokenPath] = mailToken.split('.') - - if (!mailTokenPath) { - throw new Error(`Invalid mailTokenPath: ${mailTokenPath}`) - } - - const currentMailToken = - (mailTokenPrefix === 'ENV' ? process.env[mailTokenPath] : mailToken) ?? '' - - const link = `${resetPasswordUrl}/${token}/${tokenId}` - const body = ` - - - - -
- - - - -
-

Password Reset Request

-

If you requested a password reset, click the button below to reset your password.

-

If you did not request this, please ignore this email.

-

- Reset Password -

-

If the button does not work, copy and paste the following link into your browser:

-

${link}

-
-
-` - return { - from: currentSender ?? '', - subject: currentSubject, - mailToken: currentMailToken, - body - } -} - - - - - export const generatePassword = (length = 20) => { const bytes = crypto.randomBytes(length); return Array.from(bytes, (b) => CHARSET[b % CHARSET.length]).join(""); -} \ No newline at end of file +} diff --git a/packages/flowerbase/src/constants.ts b/packages/flowerbase/src/constants.ts index 2f9b0dc..3859f9b 100644 --- a/packages/flowerbase/src/constants.ts +++ b/packages/flowerbase/src/constants.ts @@ -17,6 +17,13 @@ export const DEFAULT_CONFIG = { HTTPS_SCHEMA: process.env.HTTPS_SCHEMA || 'https', HOST: process.env.HOST || '0.0.0.0', ENABLE_LOGGER: process.env.ENABLE_LOGGER, + RESET_PASSWORD_TTL_SECONDS: Number(process.env.RESET_PASSWORD_TTL_SECONDS) || 3600, + AUTH_RATE_LIMIT_WINDOW_MS: Number(process.env.AUTH_RATE_LIMIT_WINDOW_MS) || 15 * 60 * 1000, + AUTH_LOGIN_MAX_ATTEMPTS: Number(process.env.AUTH_LOGIN_MAX_ATTEMPTS) || 10, + AUTH_RESET_MAX_ATTEMPTS: Number(process.env.AUTH_RESET_MAX_ATTEMPTS) || 5, + REFRESH_TOKEN_TTL_DAYS: Number(process.env.REFRESH_TOKEN_TTL_DAYS) || 60, + SWAGGER_UI_USER: process.env.SWAGGER_UI_USER || '', + SWAGGER_UI_PASSWORD: process.env.SWAGGER_UI_PASSWORD || '', CORS_OPTIONS: { origin: "*", methods: ["GET", "POST", "PUT", "DELETE"] as ALLOWED_METHODS[] @@ -30,8 +37,10 @@ export const DB_NAME = database_name export const AUTH_CONFIG = { authCollection: auth_collection, userCollection: collection_name, - resetPasswordCollection: 'reset-password-requests', + resetPasswordCollection: 'reset_password_requests', + refreshTokensCollection: 'auth_refresh_tokens', resetPasswordConfig: configuration['local-userpass']?.config, + localUserpassConfig: configuration['local-userpass']?.config, user_id_field, on_user_creation_function_name, providers: { @@ -44,4 +53,4 @@ export const AUTH_CONFIG = { export const S3_CONFIG = { ACCESS_KEY_ID: process.env.S3_ACCESS_KEY_ID, SECRET_ACCESS_KEY: process.env.S3_SECRET_ACCESS_KEY -} \ No newline at end of file +} diff --git a/packages/flowerbase/src/features/functions/controller.ts b/packages/flowerbase/src/features/functions/controller.ts index 513f489..a3e20ec 100644 --- a/packages/flowerbase/src/features/functions/controller.ts +++ b/packages/flowerbase/src/features/functions/controller.ts @@ -1,5 +1,6 @@ import { ObjectId } from 'bson' import { ChangeStream, Document } from 'mongodb'; +import type { FastifyRequest } from 'fastify' import { services } from '../../services' import { StateManager } from '../../state' import { GenerateContext } from '../../utils/context' @@ -7,7 +8,38 @@ import { Base64Function, FunctionCallBase64Dto, FunctionCallDto } from './dtos' import { FunctionController } from './interface' import { executeQuery } from './utils' +const normalizeUser = (payload: Record | undefined) => { + if (!payload) return undefined + const nestedUser = + payload.data ?? payload.user_data ?? payload.custom_data ?? payload + const flattened = + typeof nestedUser === 'object' && nestedUser !== null ? nestedUser : {} + + return { + ...payload, + ...flattened, + custom_data: payload.custom_data ?? flattened, + user_data: payload.user_data ?? flattened, + data: payload.data ?? flattened + } +} + +const getRequestUser = (req: FastifyRequest) => { + const candidate = req.user as Record | undefined + return normalizeUser(candidate) +} +const logFunctionCall = (method: string, user: Record | undefined, args: unknown[]) => { + if (process.env.DEBUG_FUNCTIONS !== 'true') return + console.log('[functions-debug]', method, user ? { id: user.id, role: user.role, email: user.email } : 'no-user', args) +} + +const formatFunctionExecutionError = (error: unknown) => { + const err = error as { message?: string; name?: string } + const message = typeof err?.message === 'string' ? err.message : String(error) + const name = typeof err?.name === 'string' ? err.name : 'Error' + return JSON.stringify({ message, name }) +} /** * > Creates a pre handler for every query @@ -24,10 +56,10 @@ export const functionsController: FunctionController = async ( const streams = {} as Record> app.post<{ Body: FunctionCallDto }>('/call', async (req, res) => { - if (req.user.typ !== 'access') { + const user = getRequestUser(req) + if (!user || user.typ !== 'access') { throw new Error('Access token required') } - const user = req.user const { name: method, arguments: args } = req.body if ('service' in req.body) { @@ -36,16 +68,31 @@ export const functionsController: FunctionController = async ( if (!serviceFn) { throw new Error(`Service "${req.body.service}" does not exist`) } - const [{ database, collection, query, update, document, documents, pipeline = [] }] = args + const [{ + database, + collection, + query, + filter, + update, + options, + returnNewDocument, + document, + documents, + pipeline = [] + }] = args const currentMethod = serviceFn(app, { rules, user }) .db(database) .collection(collection)[method] + logFunctionCall(`service:${req.body.service}:${method}`, user, args) const operatorsByType = await executeQuery({ currentMethod, query, + filter, update, + options, + returnNewDocument, document, documents, pipeline, @@ -64,26 +111,36 @@ export const functionsController: FunctionController = async ( throw new Error(`Function "${req.body.name}" is private`) } - const result = await GenerateContext({ - args: req.body.arguments, - app, - rules, - user: { ...user, _id: new ObjectId(user.id) }, - currentFunction, - functionsList, - services - }) - res.type('application/json') - return JSON.stringify(result) + logFunctionCall(`function:${method}`, user, args) + try { + const result = await GenerateContext({ + args: req.body.arguments, + app, + rules, + user: { ...user, _id: new ObjectId(user.id) }, + currentFunction, + functionsList, + services + }) + res.type('application/json') + return JSON.stringify(result) + } catch (error) { + res.status(500) + res.type('application/json') + return JSON.stringify({ + error: formatFunctionExecutionError(error), + error_code: 'FunctionExecutionError' + }) + } }) app.get<{ Querystring: FunctionCallBase64Dto }>('/call', async (req, res) => { const { query } = req - if (req.user.typ !== 'access') { + const user = getRequestUser(req) + if (!user || user.typ !== 'access') { throw new Error('Access token required') } - const user = req.user const { baas_request, stitch_request } = query const config: Base64Function = JSON.parse( diff --git a/packages/flowerbase/src/features/functions/dtos.ts b/packages/flowerbase/src/features/functions/dtos.ts index 506201c..c7bd487 100644 --- a/packages/flowerbase/src/features/functions/dtos.ts +++ b/packages/flowerbase/src/features/functions/dtos.ts @@ -23,8 +23,11 @@ export type FunctionCallBase64Dto = { type ArgumentsData = Arguments<{ database: string collection: string + filter?: Document query: Parameters update: Document + options?: Document + returnNewDocument?: boolean document: Document documents: Document[] pipeline?: Document[] diff --git a/packages/flowerbase/src/features/functions/interface.ts b/packages/flowerbase/src/features/functions/interface.ts index 412973d..f506720 100644 --- a/packages/flowerbase/src/features/functions/interface.ts +++ b/packages/flowerbase/src/features/functions/interface.ts @@ -24,6 +24,9 @@ export type ExecuteQueryParams = { currentMethod: ReturnType[keyof ReturnType] query: Parameters update: Document + filter?: Document + options?: Document + returnNewDocument?: boolean document: Document documents: Document[] pipeline: Document[] diff --git a/packages/flowerbase/src/features/functions/utils.ts b/packages/flowerbase/src/features/functions/utils.ts index 5438679..6851263 100644 --- a/packages/flowerbase/src/features/functions/utils.ts +++ b/packages/flowerbase/src/features/functions/utils.ts @@ -44,29 +44,51 @@ export const executeQuery = async ({ currentMethod, query, update, + filter, + options, + returnNewDocument, document, documents, pipeline, isClient = false }: ExecuteQueryParams) => { + const resolvedQuery = + typeof query !== 'undefined' + ? query + : typeof filter !== 'undefined' + ? filter + : {} + const resolvedUpdate = typeof update !== 'undefined' ? update : {} + const resolvedOptions = + typeof options !== 'undefined' + ? options + : typeof returnNewDocument === 'boolean' + ? { returnDocument: returnNewDocument ? 'after' : 'before' } + : undefined return { find: async () => await (currentMethod as ReturnType['find'])( - EJSON.deserialize(query) + EJSON.deserialize(resolvedQuery) ).toArray(), findOne: () => (currentMethod as ReturnType['findOne'])( - EJSON.deserialize(query) + EJSON.deserialize(resolvedQuery) ), deleteOne: () => (currentMethod as ReturnType['deleteOne'])( - EJSON.deserialize(query) + EJSON.deserialize(resolvedQuery) ), insertOne: () => (currentMethod as ReturnType['insertOne'])( EJSON.deserialize(document) ), - updateOne: () => currentMethod(EJSON.deserialize(query), EJSON.deserialize(update)), + updateOne: () => currentMethod(EJSON.deserialize(resolvedQuery), EJSON.deserialize(resolvedUpdate)), + findOneAndUpdate: () => + (currentMethod as ReturnType['findOneAndUpdate'])( + EJSON.deserialize(resolvedQuery), + EJSON.deserialize(resolvedUpdate), + resolvedOptions ? EJSON.deserialize(resolvedOptions) : undefined + ), aggregate: async () => (await (currentMethod as ReturnType['aggregate'])( EJSON.deserialize(pipeline), @@ -79,13 +101,12 @@ export const executeQuery = async ({ ), updateMany: () => (currentMethod as ReturnType['updateMany'])( - EJSON.deserialize(query), - EJSON.deserialize(update) + EJSON.deserialize(resolvedQuery), + EJSON.deserialize(resolvedUpdate) ), deleteMany: () => (currentMethod as ReturnType['deleteMany'])( - EJSON.deserialize(query) + EJSON.deserialize(resolvedQuery) ) } } - diff --git a/packages/flowerbase/src/features/rules/utils.ts b/packages/flowerbase/src/features/rules/utils.ts index 005fe7c..7ebac0b 100644 --- a/packages/flowerbase/src/features/rules/utils.ts +++ b/packages/flowerbase/src/features/rules/utils.ts @@ -5,11 +5,20 @@ import { Rules, RulesConfig } from './interface' export const loadRules = async (rootDir = process.cwd()): Promise => { const rulesRoot = path.join(rootDir, 'data_sources', 'mongodb-atlas') - const files = fs.readdirSync(rulesRoot, { recursive: true }) as string[] + const recursivelyCollectFiles = (dir: string): string[] => { + return fs.readdirSync(dir, { withFileTypes: true }).flatMap((entry) => { + const fullPath = path.join(dir, entry.name) + if (entry.isDirectory()) { + return recursivelyCollectFiles(fullPath) + } + return entry.isFile() ? [fullPath] : [] + }) + } + const files = recursivelyCollectFiles(rulesRoot) const rulesFiles = files.filter((x) => (x as string).endsWith('rules.json')) const rulesByCollection = rulesFiles.reduce((acc, rulesFile) => { - const filePath = path.join(rulesRoot, rulesFile) + const filePath = rulesFile const collectionRules = readJsonContent(filePath) as RulesConfig acc[collectionRules.collection] = collectionRules diff --git a/packages/flowerbase/src/features/triggers/index.ts b/packages/flowerbase/src/features/triggers/index.ts index 8ee4b44..9844b70 100644 --- a/packages/flowerbase/src/features/triggers/index.ts +++ b/packages/flowerbase/src/features/triggers/index.ts @@ -1,3 +1,4 @@ +import { AUTH_CONFIG, DB_NAME } from '../../constants' import { services } from '../../services' import { Function, Functions } from '../functions/interface' import { ActivateTriggersParams } from './dtos' @@ -17,7 +18,49 @@ export const activateTriggers = async ({ }: ActivateTriggersParams) => { console.log('START ACTIVATION TRIGGERS') try { - for await (const trigger of triggersList) { + const triggersToActivate = [...triggersList] + if (AUTH_CONFIG.on_user_creation_function_name) { + const alreadyDeclared = triggersToActivate.some( + (trigger) => + trigger.content.type === 'AUTHENTICATION' && + trigger.content.event_processors?.FUNCTION?.config?.function_name === + AUTH_CONFIG.on_user_creation_function_name + ) + if (!alreadyDeclared) { + triggersToActivate.push({ + fileName: '__auto_on_user_creation_trigger__.json', + content: { + name: 'onUserCreation', + type: 'AUTHENTICATION', + disabled: false, + config: { + isAutoTrigger: true, + collection: AUTH_CONFIG.authCollection ?? 'auth_users', + database: DB_NAME, + full_document: true, + full_document_before_change: false, + match: {}, + operation_types: ['insert', 'update', 'replace'], + project: {}, + service_name: 'mongodb-atlas', + skip_catchup_events: false, + tolerate_resume_errors: false, + unordered: false, + schedule: '' + }, + event_processors: { + FUNCTION: { + config: { + function_name: AUTH_CONFIG.on_user_creation_function_name + } + } + } + } + }) + } + } + + for await (const trigger of triggersToActivate) { const { content } = trigger const { type, config, event_processors } = content diff --git a/packages/flowerbase/src/features/triggers/interface.ts b/packages/flowerbase/src/features/triggers/interface.ts index eacfd1c..8b66b47 100644 --- a/packages/flowerbase/src/features/triggers/interface.ts +++ b/packages/flowerbase/src/features/triggers/interface.ts @@ -21,6 +21,7 @@ type Config = { database: string full_document: boolean full_document_before_change: boolean + isAutoTrigger?: boolean match: Record operation_types: string[] project: Record diff --git a/packages/flowerbase/src/features/triggers/utils.ts b/packages/flowerbase/src/features/triggers/utils.ts index 3dcc211..27b1cb7 100644 --- a/packages/flowerbase/src/features/triggers/utils.ts +++ b/packages/flowerbase/src/features/triggers/utils.ts @@ -7,6 +7,41 @@ import { readJsonContent } from '../../utils' import { GenerateContext } from '../../utils/context' import { HandlerParams, Trigger, Triggers } from './interface' +const registerOnClose = ( + app: HandlerParams['app'], + handler: () => Promise | void, + label: string +) => { + if (app.server) { + app.server.once('close', () => { + Promise.resolve(handler()).catch((error) => { + console.error(`${label} close error`, error) + }) + }) + return + } + + try { + app.addHook('onClose', async () => { + try { + await handler() + } catch (error) { + console.error(`${label} close error`, error) + } + }) + } catch (error) { + console.error(`${label} hook registration error`, error) + } +} + +const shouldIgnoreStreamError = (error: unknown) => { + const err = error as { name?: string; message?: string } + if (err?.name === 'MongoClientClosedError') return true + if (err?.message?.includes('client was closed')) return true + if (err?.message?.includes('Client is closed')) return true + return false +} + /** * Loads trigger files from the specified directory and returns them as an array of objects. * Each object contains the file name and the parsed JSON content. @@ -54,7 +89,7 @@ const handleCronTrigger = async ({ services, app }: HandlerParams) => { - cron.schedule(config.schedule, async () => { + const task = cron.schedule(config.schedule, async () => { await GenerateContext({ args: [], app, @@ -65,6 +100,7 @@ const handleCronTrigger = async ({ services }) }) + registerOnClose(app, () => task.stop(), 'Scheduled trigger') } const handleAuthenticationTrigger = async ({ @@ -74,52 +110,115 @@ const handleAuthenticationTrigger = async ({ services, app }: HandlerParams) => { - const { database } = config + const { database, isAutoTrigger } = config + const authCollection = AUTH_CONFIG.authCollection ?? 'auth_users' + const collection = app.mongo.client.db(database || DB_NAME).collection(authCollection) const pipeline = [ { $match: { - operationType: { $in: ['insert'] } + operationType: { $in: ['insert', 'update', 'replace'] } } } ] - const changeStream = app.mongo.client - .db(database || DB_NAME) - .collection(AUTH_CONFIG.authCollection) - .watch(pipeline, { - fullDocument: 'whenAvailable' - }) + const changeStream = collection.watch(pipeline, { + fullDocument: 'whenAvailable' + }) + changeStream.on('error', (error) => { + if (shouldIgnoreStreamError(error)) return + console.error('Authentication trigger change stream error', error) + }) changeStream.on('change', async function (change) { - const document = change['fullDocument' as keyof typeof change] as Record< - string, - string - > //TODO -> define user type - - if (document) { - delete document.password - - const currentUser = { ...document } - delete currentUser.password - await GenerateContext({ - args: [{ - user: { - ...currentUser, - id: currentUser._id.toString(), - data: { - _id: currentUser._id.toString(), - email: currentUser.email - } - } - }], - app, - rules: StateManager.select("rules"), - user: {}, // TODO from currentUser ?? - currentFunction: triggerHandler, - functionsList, - services, - runAsSystem: true - }) + const operationType = change['operationType' as keyof typeof change] as string | undefined + const documentKey = change['documentKey' as keyof typeof change] as + | { _id?: unknown } + | undefined + const fullDocument = change['fullDocument' as keyof typeof change] as + | Record + | null + if (!documentKey?._id) { + return + } + + const updateDescription = change[ + 'updateDescription' as keyof typeof change + ] as { updatedFields?: Record } | undefined + const updatedStatus = updateDescription?.updatedFields?.status + let confirmedCandidate = false + let confirmedDocument = + fullDocument as Record | null + + if (operationType === 'update') { + if (updatedStatus === 'confirmed') { + confirmedCandidate = true + } else if (updatedStatus === undefined) { + const fetched = await collection.findOne({ + _id: documentKey._id + }) as Record | null + confirmedDocument = fetched ?? confirmedDocument + confirmedCandidate = (confirmedDocument as { status?: string } | null)?.status === 'confirmed' + } + } else { + confirmedCandidate = (confirmedDocument as { status?: string } | null)?.status === 'confirmed' } + + if (!confirmedCandidate) { + return + } + + const updateResult = await collection.findOneAndUpdate( + { + _id: documentKey._id, + status: 'confirmed', + on_user_creation_triggered_at: { $exists: false } + }, + { + $set: { + on_user_creation_triggered_at: new Date() + } + }, + { + returnDocument: 'after' + } + ) + + const document = + (updateResult?.value as Record | null) ?? confirmedDocument + if (!document) { + return + } + + delete (document as { password?: unknown }).password + + const currentUser = { ...document } + delete (currentUser as { password?: unknown }).password + + const userData = { + ...currentUser, + id: (currentUser as { _id: { toString: () => string } })._id.toString(), + data: { + _id: (currentUser as { _id: { toString: () => string } })._id.toString(), + email: (currentUser as { email?: string }).email + } + } + // TODO change va ripulito + await GenerateContext({ + args: isAutoTrigger ? [userData] : [{ user: userData /*, ...change */ }], + app, + rules: StateManager.select("rules"), + user: {}, // TODO from currentUser ?? + currentFunction: triggerHandler, + functionsList, + services, + runAsSystem: true + }) }) + registerOnClose( + app, + async () => { + await changeStream.close() + }, + 'Authentication trigger' + ) } /** @@ -175,6 +274,10 @@ const handleDataBaseTrigger = async ({ ? 'whenAvailable' : undefined }) + changeStream.on('error', (error) => { + if (shouldIgnoreStreamError(error)) return + console.error('Database trigger change stream error', error) + }) changeStream.on('change', async function ({ clusterTime, ...change }) { await GenerateContext({ args: [change], @@ -186,7 +289,13 @@ const handleDataBaseTrigger = async ({ services }) }) - // TODO -> gestire close dello stream + registerOnClose( + app, + async () => { + await changeStream.close() + }, + 'Database trigger' + ) } export const TRIGGER_HANDLERS = { diff --git a/packages/flowerbase/src/index.ts b/packages/flowerbase/src/index.ts index d9b9dc9..9593f06 100644 --- a/packages/flowerbase/src/index.ts +++ b/packages/flowerbase/src/index.ts @@ -29,6 +29,7 @@ export type InitializeConfig = { port?: number host?: string corsConfig?: CorsConfig + basePath?: string } /** @@ -45,26 +46,37 @@ export async function initialize({ jwtSecret = DEFAULT_CONFIG.JWT_SECRET, port = DEFAULT_CONFIG.PORT, mongodbUrl = DEFAULT_CONFIG.MONGODB_URL, - corsConfig = DEFAULT_CONFIG.CORS_OPTIONS + corsConfig = DEFAULT_CONFIG.CORS_OPTIONS, + basePath }: InitializeConfig) { + if (!jwtSecret || jwtSecret.trim().length === 0) { + throw new Error('JWT secret missing: set JWT_SECRET or pass jwtSecret to initialize()') + } + + const resolvedBasePath = basePath ?? require.main?.path ?? process.cwd() const fastify = Fastify({ logger: !!DEFAULT_CONFIG.ENABLE_LOGGER }) - const basePath = require.main?.path - console.log("BASE PATH", basePath) + const isTest = process.env.NODE_ENV === 'test' || process.env.JEST_WORKER_ID !== undefined + const logInfo = (...args: unknown[]) => { + if (!isTest) { + console.log(...args) + } + } - console.log("CURRENT PORT", port) - console.log("CURRENT HOST", host) + logInfo("BASE PATH", resolvedBasePath) + logInfo("CURRENT PORT", port) + logInfo("CURRENT HOST", host) - const functionsList = await loadFunctions(basePath) - console.log("Functions LOADED") - const triggersList = await loadTriggers(basePath) - console.log("Triggers LOADED") - const endpointsList = await loadEndpoints(basePath) - console.log("Endpoints LOADED") - const rulesList = await loadRules(basePath) - console.log("Rules LOADED") + const functionsList = await loadFunctions(resolvedBasePath) + logInfo("Functions LOADED") + const triggersList = await loadTriggers(resolvedBasePath) + logInfo("Triggers LOADED") + const endpointsList = await loadEndpoints(resolvedBasePath) + logInfo("Endpoints LOADED") + const rulesList = await loadRules(resolvedBasePath) + logInfo("Rules LOADED") const stateConfig = { functions: functionsList, @@ -88,7 +100,33 @@ export async function initialize({ deepLinking: false }, uiHooks: { - onRequest: function (request, reply, next) { next() }, + onRequest: function (request, reply, next) { + const swaggerUser = DEFAULT_CONFIG.SWAGGER_UI_USER + const swaggerPassword = DEFAULT_CONFIG.SWAGGER_UI_PASSWORD + if (!swaggerUser && !swaggerPassword) { + next() + return + } + const authHeader = request.headers.authorization + if (!authHeader || !authHeader.startsWith('Basic ')) { + reply + .code(401) + .header('WWW-Authenticate', 'Basic realm="Swagger UI"') + .send({ message: 'Unauthorized' }) + return + } + const encoded = authHeader.slice('Basic '.length) + const decoded = Buffer.from(encoded, 'base64').toString('utf8') + const [user, pass] = decoded.split(':') + if (user !== swaggerUser || pass !== swaggerPassword) { + reply + .code(401) + .header('WWW-Authenticate', 'Basic realm="Swagger UI"') + .send({ message: 'Unauthorized' }) + return + } + next() + }, preHandler: function (request, reply, next) { next() } }, staticCSP: true, @@ -105,15 +143,15 @@ export async function initialize({ corsConfig }) - console.log('Plugins registration COMPLETED') + logInfo('Plugins registration COMPLETED') await exposeRoutes(fastify) - console.log('APP Routes registration COMPLETED') + logInfo('APP Routes registration COMPLETED') await registerFunctions({ app: fastify, functionsList, rulesList }) - console.log('Functions registration COMPLETED') + logInfo('Functions registration COMPLETED') await generateEndpoints({ app: fastify, functionsList, endpointsList, rulesList }) - console.log('HTTP Endpoints registration COMPLETED') + logInfo('HTTP Endpoints registration COMPLETED') fastify.ready(() => { - console.log("FASTIFY IS READY") + logInfo("FASTIFY IS READY") if (triggersList?.length > 0) activateTriggers({ fastify, triggersList, functionsList }) }) await fastify.listen({ port, host }) diff --git a/packages/flowerbase/src/services/mongodb-atlas/__tests__/findOneAndUpdate.test.ts b/packages/flowerbase/src/services/mongodb-atlas/__tests__/findOneAndUpdate.test.ts new file mode 100644 index 0000000..d825e1a --- /dev/null +++ b/packages/flowerbase/src/services/mongodb-atlas/__tests__/findOneAndUpdate.test.ts @@ -0,0 +1,95 @@ +import { Document, ObjectId } from 'mongodb' +import MongoDbAtlas from '..' +import { Role, Rules } from '../../../features/rules/interface' + +const createAppWithCollection = (collection: Record) => ({ + mongo: { + client: { + db: jest.fn().mockReturnValue({ + collection: jest.fn().mockReturnValue(collection) + }) + } + } +}) + +const createRules = (roleOverrides: Partial = {}): Rules => ({ + todos: { + database: 'db', + collection: 'todos', + filters: [], + roles: [ + { + name: 'owner', + apply_when: {}, + insert: true, + delete: true, + search: true, + read: true, + write: true, + ...roleOverrides + } + ] + } +}) + +describe('mongodb-atlas findOneAndUpdate', () => { + it('applies write/read validation and returns the updated document', async () => { + const id = new ObjectId() + const existingDoc = { _id: id, title: 'Old', userId: 'user-1' } + const updatedDoc = { _id: id, title: 'New', userId: 'user-1' } + const findOne = jest.fn().mockResolvedValue(existingDoc) + const aggregate = jest.fn().mockReturnValue({ + toArray: jest.fn().mockResolvedValue([updatedDoc]) + }) + const findOneAndUpdate = jest.fn().mockResolvedValue(updatedDoc) + const collection = { + collectionName: 'todos', + findOne, + aggregate, + findOneAndUpdate + } + + const app = createAppWithCollection(collection) + const operators = MongoDbAtlas(app as any, { + rules: createRules(), + user: { id: 'user-1' } + }) + .db('db') + .collection('todos') + + const result = await operators.findOneAndUpdate({ _id: id }, { $set: { title: 'New' } }) + + expect(findOne).toHaveBeenCalled() + expect(aggregate).toHaveBeenCalled() + expect(findOneAndUpdate).toHaveBeenCalledWith( + { $and: [{ _id: id }] }, + { $set: { title: 'New' } } + ) + expect(result).toEqual(updatedDoc) + }) + + it('rejects updates when write permission is denied', async () => { + const id = new ObjectId() + const existingDoc = { _id: id, title: 'Old', userId: 'user-1' } + const findOne = jest.fn().mockResolvedValue(existingDoc) + const findOneAndUpdate = jest.fn() + const collection = { + collectionName: 'todos', + findOne, + findOneAndUpdate + } + + const app = createAppWithCollection(collection) + const operators = MongoDbAtlas(app as any, { + rules: createRules({ write: false }), + user: { id: 'user-1' } + }) + .db('db') + .collection('todos') + + await expect( + operators.findOneAndUpdate({ _id: id }, { title: 'Denied' } as Document) + ).rejects.toThrow('Update not permitted') + expect(findOneAndUpdate).not.toHaveBeenCalled() + }) +}) diff --git a/packages/flowerbase/src/services/mongodb-atlas/__tests__/utils.test.ts b/packages/flowerbase/src/services/mongodb-atlas/__tests__/utils.test.ts new file mode 100644 index 0000000..6248404 --- /dev/null +++ b/packages/flowerbase/src/services/mongodb-atlas/__tests__/utils.test.ts @@ -0,0 +1,141 @@ +import { ensureClientPipelineStages, getHiddenFieldsFromRulesConfig, prependUnsetStage, applyAccessControlToPipeline } from '../utils' +import { Role } from '../../../utils/roles/interface' + +describe('MongoDB Atlas aggregate helpers', () => { + describe('ensureClientPipelineStages', () => { + it('allows safe stages', () => { + expect(() => + ensureClientPipelineStages([{ $match: { active: true } }]) + ).not.toThrow() + }) + + it('throws when unsupported stage is used', () => { + expect(() => + ensureClientPipelineStages([{ $replaceRoot: { newRoot: '$$ROOT' } }]) + ).toThrow('Stage $replaceRoot is not allowed in client aggregate pipelines') + }) + + it('recurses into nested lookups and facets without throwing', () => { + const pipeline = [ + { + $lookup: { + from: 'other', + localField: 'ref', + foreignField: '_id', + as: 'joined', + pipeline: [ + { + $facet: { + safe: [{ $match: { foo: 'bar' } }] + } + } + ] + } + } + ] + + expect(() => ensureClientPipelineStages(pipeline)).not.toThrow() + }) + }) + + describe('getHiddenFieldsFromRulesConfig', () => { + it('returns fields marked as unreadable', () => { + const roles: Role[] = [ + { + name: 'demo', + apply_when: {}, + insert: true, + delete: true, + search: true, + read: true, + write: true, + fields: { + secret: { read: false, write: false }, + visible: { read: true, write: false } + }, + additional_fields: { + hiddenExtra: { read: false, write: false } + } + } + ] + + const hidden = getHiddenFieldsFromRulesConfig({ + roles + }) + + expect(hidden).toEqual(expect.arrayContaining(['secret', 'hiddenExtra'])) + expect(hidden).not.toContain('visible') + }) + }) + + describe('prependUnsetStage', () => { + it('inserts an $unset stage when hidden fields are present', () => { + const pipeline = [{ $match: { active: true } }] + const result = prependUnsetStage(pipeline, ['password', 'secret']) + + expect(result[0]).toEqual({ $unset: ['password', 'secret'] }) + expect(result[1]).toEqual(pipeline[0]) + }) + + it('returns original pipeline if no hidden fields exist', () => { + const pipeline = [{ $match: { active: true } }] + expect(prependUnsetStage(pipeline, [])).toEqual(pipeline) + }) + }) + + describe('applyAccessControlToPipeline', () => { + it('prepends hidden-field $unset inside lookup pipelines for client requests', () => { + const rules = { + main: { + filters: [], + roles: [] + }, + other: { + filters: [], + roles: [ + { + name: 'lookup-role', + apply_when: {}, + insert: true, + delete: true, + search: true, + read: true, + write: true, + fields: { + secretField: { read: false, write: false } + }, + additional_fields: { + secretAux: { read: false, write: false } + } + } + ] + } + } + + const pipeline = [ + { + $lookup: { + from: 'other', + localField: 'ref', + foreignField: '_id', + as: 'joined', + pipeline: [{ $match: { active: true } }] + } + } + ] + + const sanitized = applyAccessControlToPipeline( + pipeline, + rules, + {}, + 'main', + { isClientPipeline: true } + ) + + const lookupPipeline = sanitized[0].$lookup.pipeline + expect(lookupPipeline?.[0]).toEqual({ + $unset: ['secretField', 'secretAux'] + }) + }) + }) +}) diff --git a/packages/flowerbase/src/services/mongodb-atlas/index.ts b/packages/flowerbase/src/services/mongodb-atlas/index.ts index 947d00a..3b63f61 100644 --- a/packages/flowerbase/src/services/mongodb-atlas/index.ts +++ b/packages/flowerbase/src/services/mongodb-atlas/index.ts @@ -1,23 +1,62 @@ -import { EventEmitterAsyncResourceOptions } from 'events' import isEqual from 'lodash/isEqual' -import { Collection, Document, EventsDescription, FindCursor, WithId } from 'mongodb' +import { + Collection, + Document, + EventsDescription, + Filter as MongoFilter, + FindOneAndUpdateOptions, + UpdateFilter, + WithId +} from 'mongodb' import { checkValidation } from '../../utils/roles/machines' import { getWinningRole } from '../../utils/roles/machines/utils' import { CRUD_OPERATIONS, GetOperatorsFunction, MongodbAtlasFunction } from './model' import { applyAccessControlToPipeline, checkDenyOperation, + ensureClientPipelineStages, getFormattedProjection, getFormattedQuery, + getHiddenFieldsFromRulesConfig, normalizeQuery } from './utils' +import { Rules } from '../../features/rules/interface' //TODO aggiungere no-sql inject security +const debugRules = process.env.DEBUG_RULES === 'true' +const debugServices = process.env.DEBUG_SERVICES === 'true' + +const logDebug = (message: string, payload?: unknown) => { + if (!debugRules) return + const formatted = payload && typeof payload === 'object' ? JSON.stringify(payload) : payload + console.log(`[rules-debug] ${message}`, formatted ?? '') +} + +const getUserId = (user?: unknown) => { + if (!user || typeof user !== 'object') return undefined + return (user as { id?: string }).id +} + +const logService = (message: string, payload?: unknown) => { + if (!debugServices) return + console.log('[service-debug]', message, payload ?? '') +} + const getOperators: GetOperatorsFunction = ( collection, - { rules = {}, collName, user, run_as_system } -) => ({ - /** + { rules, collName, user, run_as_system } +) => { + const normalizedRules: Rules = rules ?? ({} as Rules) + const collectionRules = normalizedRules[collName] + const filters = collectionRules?.filters ?? [] + const roles = collectionRules?.roles ?? [] + const fallbackAccess = (doc: Document | null | undefined = undefined) => ({ + status: false, + document: doc + }) + + return { + /** * Finds a single document in a MongoDB collection with optional role-based filtering and validation. * * @param {Filter} query - The MongoDB query used to match the document. @@ -34,16 +73,38 @@ const getOperators: GetOperatorsFunction = ( */ findOne: async (query) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.READ) - const { filters, roles } = rules[collName] || {} - + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.READ) // Apply access control filters to the query const formattedQuery = getFormattedQuery(filters, query, user) - - const result = await collection.findOne({ $and: formattedQuery }) + logDebug('update formattedQuery', { + collection: collName, + query, + formattedQuery + }) + logDebug('find formattedQuery', { + collection: collName, + query, + formattedQuery, + rolesLength: roles.length + }) + + logService('findOne query', { collName, formattedQuery }) + const safeQuery = normalizeQuery(formattedQuery) + logService('findOne normalizedQuery', { collName, safeQuery }) + const result = await collection.findOne({ $and: safeQuery }) + logDebug('findOne result', { + collection: collName, + result + }) + logService('findOne result', { collName, result }) const winningRole = getWinningRole(result, user, roles) + logDebug('findOne winningRole', { + collection: collName, + winningRoleName: winningRole?.name ?? null, + userId: getUserId(user) + }) const { status, document } = winningRole ? await checkValidation( winningRole, @@ -55,7 +116,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true, document: result } + : fallbackAccess(result) // Return validated document or empty object if not permitted return Promise.resolve(status ? document : {}) @@ -82,9 +143,7 @@ const getOperators: GetOperatorsFunction = ( */ deleteOne: async (query = {}) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.DELETE) - const { filters, roles } = rules[collName] || {} - + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.DELETE) // Apply access control filters const formattedQuery = getFormattedQuery(filters, query, user) @@ -92,6 +151,11 @@ const getOperators: GetOperatorsFunction = ( const result = await collection.findOne({ $and: formattedQuery }) const winningRole = getWinningRole(result, user, roles) + logDebug('delete winningRole', { + collection: collName, + userId: getUserId(user), + winningRoleName: winningRole?.name ?? null + }) const { status } = winningRole ? await checkValidation( winningRole, @@ -103,7 +167,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true } + : fallbackAccess(result) if (!status) { throw new Error('Delete not permitted') @@ -134,10 +198,8 @@ const getOperators: GetOperatorsFunction = ( * This ensures that only users with the correct permissions can insert data into the collection. */ insertOne: async (data, options) => { - const { roles } = rules[collName] || {} - if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.CREATE) + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.CREATE) const winningRole = getWinningRole(data, user, roles) const { status, document } = winningRole @@ -151,12 +213,19 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true, document: data } + : fallbackAccess(data) if (!status || !isEqual(data, document)) { throw new Error('Insert not permitted') } - return collection.insertOne(data, options) + logService('insertOne payload', { collName, data }) + const insertResult = await collection.insertOne(data, options) + logService('insertOne result', { + collName, + insertedId: insertResult.insertedId.toString(), + document: data + }) + return insertResult } // System mode: insert without validation return collection.insertOne(data, options) @@ -185,8 +254,7 @@ const getOperators: GetOperatorsFunction = ( updateOne: async (query, data, options) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.UPDATE) - const { filters, roles } = rules[collName] || {} + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.UPDATE) // Apply access control filters // Normalize _id @@ -210,10 +278,9 @@ const getOperators: GetOperatorsFunction = ( // const docToCheck = hasOperators // ? Object.values(data).reduce((acc, operation) => ({ ...acc, ...operation }), {}) // : data - const [matchQuery] = formattedQuery; // TODO da chiedere/capire perchè è solo uno. tutti gli altri { $match: { $and: formattedQuery } } const pipeline = [ { - $match: matchQuery + $match: { $and: safeQuery } }, { $limit: 1 @@ -235,17 +302,107 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true, document: docToCheck } + : fallbackAccess(docToCheck) // Ensure no unauthorized changes are made const areDocumentsEqual = isEqual(document, docToCheck) if (!status || !areDocumentsEqual) { throw new Error('Update not permitted') } - return collection.updateOne({ $and: formattedQuery }, data, options) + return collection.updateOne({ $and: safeQuery }, data, options) } return collection.updateOne(query, data, options) }, + /** + * Finds and updates a single document with role-based validation and access control. + * + * @param {Filter} query - The MongoDB query used to match the document to update. + * @param {UpdateFilter | Partial} data - The update operations or replacement document. + * @param {FindOneAndUpdateOptions} [options] - Optional settings for the findOneAndUpdate operation. + * @returns {Promise>} The result of the findOneAndUpdate operation. + * + * @throws {Error} If the user is not authorized to update the document. + */ + findOneAndUpdate: async ( + query: MongoFilter, + data: UpdateFilter | Document[], + options?: FindOneAndUpdateOptions + ) => { + if (!run_as_system) { + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.UPDATE) + const formattedQuery = getFormattedQuery(filters, query, user) + const safeQuery = Array.isArray(formattedQuery) + ? normalizeQuery(formattedQuery) + : formattedQuery + + const result = await collection.findOne({ $and: safeQuery }) + + if (!result) { + throw new Error('Update not permitted') + } + + const winningRole = getWinningRole(result, user, roles) + const hasOperators = Object.keys(data).some((key) => key.startsWith('$')) + const pipeline = [ + { + $match: { $and: safeQuery } + }, + { + $limit: 1 + }, + ...Object.entries(data).map(([key, value]) => ({ [key]: value })) + ] + const [docToCheck] = hasOperators + ? await collection.aggregate(pipeline).toArray() + : ([data] as [Document]) + + const { status, document } = winningRole + ? await checkValidation( + winningRole, + { + type: 'write', + roles, + cursor: docToCheck, + expansions: {} + }, + user + ) + : fallbackAccess(docToCheck) + + const areDocumentsEqual = isEqual(document, docToCheck) + if (!status || !areDocumentsEqual) { + throw new Error('Update not permitted') + } + + const updateResult = options + ? await collection.findOneAndUpdate({ $and: safeQuery }, data, options) + : await collection.findOneAndUpdate({ $and: safeQuery }, data) + if (!updateResult) { + return updateResult + } + + const readRole = getWinningRole(updateResult, user, roles) + const readResult = readRole + ? await checkValidation( + readRole, + { + type: 'read', + roles, + cursor: updateResult, + expansions: {} + }, + user + ) + : fallbackAccess(updateResult) + + const sanitizedDoc = readResult.status ? (readResult.document ?? updateResult) : {} + return sanitizedDoc + } + + return options + ? collection.findOneAndUpdate(query, data, options) + : collection.findOneAndUpdate(query, data) + }, /** * Finds documents in a MongoDB collection with optional role-based access control and post-query validation. * @@ -265,32 +422,32 @@ const getOperators: GetOperatorsFunction = ( */ find: (query) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.READ) - const { filters, roles } = rules[collName] || {} - + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.READ) // Pre-query filtering based on access control rules const formattedQuery = getFormattedQuery(filters, query, user) const currentQuery = formattedQuery.length ? { $and: formattedQuery } : {} // aggiunto filter per evitare questo errore: $and argument's entries must be objects - const originalCursor = collection.find(currentQuery) - // Clone the cursor to override `toArray` with post-query validation - const client = originalCursor[ - 'client' as keyof typeof originalCursor - ] as EventEmitterAsyncResourceOptions - const newCursor = new FindCursor(client) + const cursor = collection.find(currentQuery) + const originalToArray = cursor.toArray.bind(cursor) /** * Overridden `toArray` method that validates each document for read access. * * @returns {Promise} An array of documents the user is authorized to read. */ - newCursor.toArray = async () => { - const response = await originalCursor.toArray() + cursor.toArray = async () => { + const response = await originalToArray() const filteredResponse = await Promise.all( response.map(async (currentDoc) => { const winningRole = getWinningRole(currentDoc, user, roles) + logDebug('find winningRole', { + collection: collName, + userId: getUserId(user), + winningRoleName: winningRole?.name ?? null, + rolesLength: roles.length + }) const { status, document } = winningRole ? await checkValidation( winningRole, @@ -302,16 +459,16 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: !roles.length, document: currentDoc } + : fallbackAccess(currentDoc) return status ? document : undefined }) ) - return filteredResponse.filter(Boolean) + return filteredResponse.filter(Boolean) as WithId[] } - return newCursor + return cursor } // System mode: return original unfiltered cursor return collection.find(query) @@ -337,9 +494,7 @@ const getOperators: GetOperatorsFunction = ( */ watch: (pipeline = [], options) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.READ) - const { filters, roles } = rules[collName] || {} - + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.READ) // Apply access filters to initial change stream pipeline const formattedQuery = getFormattedQuery(filters, {}, user) @@ -377,7 +532,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true, document: fullDocument } + : fallbackAccess(fullDocument) const { status: updatedFieldsStatus, document: updatedFields } = winningRole ? await checkValidation( @@ -390,7 +545,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: true, document: updateDescription?.updatedFields } + : fallbackAccess(updateDescription?.updatedFields) return { status, document, updatedFieldsStatus, updatedFields } } @@ -424,53 +579,49 @@ const getOperators: GetOperatorsFunction = ( return collection.watch(pipeline, options) }, //TODO -> add filter & rules in aggregate - aggregate: async (pipeline = [], options, isClient) => { - if (isClient) { - throw new Error("Aggregate operator from cliente is not implemented! Move it to a function") - } + aggregate: (pipeline = [], options, isClient) => { if (run_as_system || !isClient) { return collection.aggregate(pipeline, options) } - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.READ) - const { filters = [], roles = [] } = rules[collection.collectionName] || {} + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.READ) + + const rulesConfig = collectionRules ?? { filters, roles } + + ensureClientPipelineStages(pipeline) + const formattedQuery = getFormattedQuery(filters, {}, user) + logDebug('aggregate formattedQuery', { + collection: collName, + formattedQuery, + pipeline + }) const projection = getFormattedProjection(filters) + const hiddenFields = getHiddenFieldsFromRulesConfig(rulesConfig) + + const sanitizedPipeline = applyAccessControlToPipeline( + pipeline, + normalizedRules, + user, + collName, + { isClientPipeline: true } + ) + logDebug('aggregate sanitizedPipeline', { + collection: collName, + sanitizedPipeline + }) const guardedPipeline = [ + ...(hiddenFields.length ? [{ $unset: hiddenFields }] : []), ...(formattedQuery.length ? [{ $match: { $and: formattedQuery } }] : []), ...(projection ? [{ $project: projection }] : []), - ...applyAccessControlToPipeline(pipeline, rules, user) + ...sanitizedPipeline ] - // const pipelineCollections = getCollectionsFromPipeline(pipeline) - - // console.log(pipelineCollections) - - // pipelineCollections.every((collection) => checkDenyOperation(rules, collection, CRUD_OPERATIONS.READ)) - const originalCursor = collection.aggregate(guardedPipeline, options) const newCursor = Object.create(originalCursor) - newCursor.toArray = async () => { - const results = await originalCursor.toArray() - - const filtered = await Promise.all( - results.map(async (doc) => { - const role = getWinningRole(doc, user, roles) - const { status, document } = role - ? await checkValidation( - role, - { type: 'read', roles, cursor: doc, expansions: {} }, - user - ) - : { status: !roles?.length, document: doc } - return status ? document : undefined - }) - ) - - return filtered.filter(Boolean) - } + newCursor.toArray = async () => originalCursor.toArray() return newCursor }, @@ -493,8 +644,7 @@ const getOperators: GetOperatorsFunction = ( */ insertMany: async (documents, options) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.CREATE) - const { roles } = rules[collName] || {} + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.CREATE) // Validate each document against user's roles const filteredItems = await Promise.all( documents.map(async (currentDoc) => { @@ -511,7 +661,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: !roles.length, document: currentDoc } + : fallbackAccess(currentDoc) return status ? document : undefined }) @@ -530,8 +680,7 @@ const getOperators: GetOperatorsFunction = ( }, updateMany: async (query, data, options) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.UPDATE) - const { filters, roles } = rules[collName] || {} + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.UPDATE) // Apply access control filters const formattedQuery = getFormattedQuery(filters, query, user) @@ -576,7 +725,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: !roles.length, document: currentDoc } + : fallbackAccess(currentDoc) return status ? document : undefined }) @@ -611,9 +760,7 @@ const getOperators: GetOperatorsFunction = ( */ deleteMany: async (query = {}) => { if (!run_as_system) { - checkDenyOperation(rules, collection.collectionName, CRUD_OPERATIONS.DELETE) - const { filters, roles } = rules[collName] || {} - + checkDenyOperation(normalizedRules, collection.collectionName, CRUD_OPERATIONS.DELETE) // Apply access control filters const formattedQuery = getFormattedQuery(filters, query, user) @@ -636,7 +783,7 @@ const getOperators: GetOperatorsFunction = ( }, user ) - : { status: !roles.length, document: currentDoc } + : fallbackAccess(currentDoc) return status ? document : undefined }) @@ -662,7 +809,8 @@ const getOperators: GetOperatorsFunction = ( // If running as system, bypass access control and delete directly return collection.deleteMany(query) } -}) + } +} const MongodbAtlas: MongodbAtlasFunction = ( app, @@ -671,9 +819,12 @@ const MongodbAtlas: MongodbAtlasFunction = ( db: (dbName: string) => { return { collection: (collName: string) => { - const collection: Collection = app.mongo.client - .db(dbName) - .collection(collName) + const mongoClient = app.mongo.client as unknown as { + db: (database: string) => { + collection: (name: string) => Collection + } + } + const collection: Collection = mongoClient.db(dbName).collection(collName) return getOperators(collection, { rules, collName, diff --git a/packages/flowerbase/src/services/mongodb-atlas/model.ts b/packages/flowerbase/src/services/mongodb-atlas/model.ts index 52cd7f9..b2021f7 100644 --- a/packages/flowerbase/src/services/mongodb-atlas/model.ts +++ b/packages/flowerbase/src/services/mongodb-atlas/model.ts @@ -1,5 +1,13 @@ import { FastifyInstance } from 'fastify' -import { Collection, Document, FindCursor, WithId } from 'mongodb' +import { + Collection, + Document, + Filter as MongoFilter, + FindCursor, + FindOneAndUpdateOptions, + UpdateFilter, + WithId +} from 'mongodb' import { User } from '../../auth/dtos' import { Filter, Rules } from '../../features/rules/interface' import { Role } from '../../utils/roles/interface' @@ -50,11 +58,16 @@ export type GetOperatorsFunction = ( updateOne: ( ...params: Parameters> ) => ReturnType> + findOneAndUpdate: ( + filter: MongoFilter, + update: UpdateFilter | Document[], + options?: FindOneAndUpdateOptions + ) => Promise find: (...params: Parameters>) => FindCursor watch: (...params: Parameters>) => ReturnType> aggregate: ( ...params: [...Parameters>, isClient: boolean] - ) => Promise>> + ) => ReturnType> insertMany: ( ...params: Parameters> ) => ReturnType> @@ -73,4 +86,4 @@ export enum CRUD_OPERATIONS { UPDATE = "UPDATE", DELETE = "DELETE" -} \ No newline at end of file +} diff --git a/packages/flowerbase/src/services/mongodb-atlas/utils.ts b/packages/flowerbase/src/services/mongodb-atlas/utils.ts index c7a7d4e..2be208d 100644 --- a/packages/flowerbase/src/services/mongodb-atlas/utils.ts +++ b/packages/flowerbase/src/services/mongodb-atlas/utils.ts @@ -37,7 +37,7 @@ export const getValidRule = ({ // checkRule valuta se i campi del record soddisfano quella condizione. // Quindi le regole vengono effettivamente rispettate. const valid = rulesMatcherUtils.checkRule( - conditions, + conditions as Parameters[0], { ...(record ?? {}), '%%user': user, @@ -57,10 +57,16 @@ export const getFormattedQuery = ( ) => { const preFilter = getValidRule({ filters, user }) const isValidPreFilter = !!preFilter?.length - return [ - isValidPreFilter && expandQuery(preFilter[0].query, { '%%user': user }), - query - ].filter(Boolean).filter(r => Object.keys(r).length > 0) + const formatted: FilterMongoDB[] = [] + if (isValidPreFilter) { + formatted.push( + expandQuery(preFilter[0].query, { '%%user': user }) as FilterMongoDB + ) + } + if (query && Object.keys(query).length > 0) { + formatted.push(query as FilterMongoDB) + } + return formatted } export const getFormattedProjection = ( @@ -90,61 +96,111 @@ export const applyAccessControlToPipeline = ( roles?: Role[] } >, - user: User + user: User, + collectionName: string, + options?: { + isClientPipeline?: boolean + } ): AggregationPipeline => { + const { isClientPipeline = false } = options || {} + const hiddenFieldsForCollection = isClientPipeline + ? getHiddenFieldsFromRulesConfig(rules[collectionName]) + : [] + return pipeline.map((stage) => { const [stageName] = Object.keys(stage) const value = stage[stageName as keyof typeof stage] - // CASE LOOKUP if (stageName === STAGES_TO_SEARCH.LOOKUP) { const lookUpStage = value as LookupStage const currentCollection = lookUpStage.from + checkDenyOperation(rules as Rules, currentCollection, CRUD_OPERATIONS.READ) const lookupRules = rules[currentCollection] || {} const formattedQuery = getFormattedQuery(lookupRules.filters, {}, user) const projection = getFormattedProjection(lookupRules.filters) + const nestedPipeline = applyAccessControlToPipeline( + lookUpStage.pipeline || [], + rules, + user, + currentCollection, + { isClientPipeline } + ) + + const lookupPipeline = [ + ...(formattedQuery.length ? [{ $match: { $and: formattedQuery } }] : []), + ...(projection ? [{ $project: projection }] : []), + ...nestedPipeline + ] + + const pipelineWithHiddenFields = isClientPipeline + ? prependUnsetStage(lookupPipeline, getHiddenFieldsFromRulesConfig(lookupRules)) + : lookupPipeline + return { $lookup: { ...lookUpStage, - pipeline: [ - ...(formattedQuery.length ? [{ $match: { $and: formattedQuery } }] : []), - ...(projection ? [{ $project: projection }] : []), - ...applyAccessControlToPipeline(lookUpStage.pipeline || [], rules, user) - ] + pipeline: pipelineWithHiddenFields } } } - // CASE LOOKUP if (stageName === STAGES_TO_SEARCH.UNION_WITH) { const unionWithStage = value as UnionWithStage const isSimpleStage = typeof unionWithStage === 'string' const currentCollection = isSimpleStage ? unionWithStage : unionWithStage.coll + checkDenyOperation(rules as Rules, currentCollection, CRUD_OPERATIONS.READ) const unionRules = rules[currentCollection] || {} const formattedQuery = getFormattedQuery(unionRules.filters, {}, user) const projection = getFormattedProjection(unionRules.filters) - const nestedPipeline = isSimpleStage ? [] : unionWithStage.pipeline || [] + if (isSimpleStage) { + return stage + } + + const nestedPipeline = unionWithStage.pipeline || [] + + const sanitizedNestedPipeline = applyAccessControlToPipeline( + nestedPipeline, + rules, + user, + currentCollection, + { isClientPipeline } + ) + + const unionPipeline = [ + ...(formattedQuery.length ? [{ $match: { $and: formattedQuery } }] : []), + ...(projection ? [{ $project: projection }] : []), + ...sanitizedNestedPipeline + ] + + const pipelineWithHiddenFields = isClientPipeline + ? prependUnsetStage(unionPipeline, getHiddenFieldsFromRulesConfig(unionRules)) + : unionPipeline return { $unionWith: { - coll: currentCollection, - pipeline: [ - ...(formattedQuery.length ? [{ $match: { $and: formattedQuery } }] : []), - ...(projection ? [{ $project: projection }] : []), - ...applyAccessControlToPipeline(nestedPipeline, rules, user) - ] + ...unionWithStage, + pipeline: pipelineWithHiddenFields } } } - // CASE FACET if (stageName === STAGES_TO_SEARCH.FACET) { const modifiedFacets = Object.fromEntries( (Object.entries(value) as [string, AggregationPipelineStage[]][]).map( ([facetKey, facetPipeline]) => { - return [facetKey, applyAccessControlToPipeline(facetPipeline, rules, user)] + const sanitizedFacetPipeline = applyAccessControlToPipeline( + facetPipeline, + rules, + user, + collectionName, + { isClientPipeline } + ) + const facetPipelineWithHiddenFields = isClientPipeline + ? prependUnsetStage(sanitizedFacetPipeline, hiddenFieldsForCollection) + : sanitizedFacetPipeline + return [facetKey, facetPipelineWithHiddenFields] } ) ) @@ -210,3 +266,83 @@ export const getCollectionsFromPipeline = (pipeline: Document[]) => { return acc }, []) } + +const CLIENT_STAGE_BLACKLIST = new Set([ + '$replaceRoot', + '$merge', + '$out', + '$function', + '$where', + '$accumulator', + '$graphLookup' +]) + +export function ensureClientPipelineStages(pipeline: AggregationPipeline) { + pipeline.forEach((stage) => { + const [stageName] = Object.keys(stage) + if (!stageName) return + + if (CLIENT_STAGE_BLACKLIST.has(stageName)) { + throw new Error(`Stage ${stageName} is not allowed in client aggregate pipelines`) + } + + const value = stage[stageName as keyof typeof stage] + + if (stageName === STAGES_TO_SEARCH.LOOKUP) { + ensureClientPipelineStages((value as LookupStage).pipeline || []) + return + } + + if (stageName === STAGES_TO_SEARCH.UNION_WITH) { + if (typeof value === 'string') { + throw new Error('$unionWith must provide a pipeline when called from the client') + } + const unionStage = value as { pipeline?: AggregationPipeline } + ensureClientPipelineStages(unionStage.pipeline || []) + return + } + + if (stageName === STAGES_TO_SEARCH.FACET) { + Object.values(value as Record).forEach((facetPipeline) => + ensureClientPipelineStages(facetPipeline) + ) + } + }) +} + +export function getHiddenFieldsFromRulesConfig(rulesConfig?: { roles?: Role[] }) { + if (!rulesConfig) { + return [] + } + return collectHiddenFieldsFromRoles(rulesConfig.roles) +} + +function collectHiddenFieldsFromRoles(roles: Role[] = []) { + const hiddenFields = new Set() + + const collectFromFields = ( + fields?: Role['fields'] | Role['additional_fields'] + ) => { + if (!fields) return + Object.entries(fields).forEach(([fieldName, permissions]) => { + const canRead = Boolean(permissions?.read || permissions?.write) + if (!canRead) { + hiddenFields.add(fieldName) + } + }) + } + + roles.forEach((role) => { + collectFromFields(role.fields) + collectFromFields(role.additional_fields) + }) + + return Array.from(hiddenFields) +} + +export function prependUnsetStage(pipeline: AggregationPipeline, hiddenFields: string[]) { + if (!hiddenFields.length) { + return pipeline + } + return [{ $unset: hiddenFields }, ...pipeline] +} diff --git a/packages/flowerbase/src/shared/handleUserRegistration.ts b/packages/flowerbase/src/shared/handleUserRegistration.ts index e8fa4ab..288a199 100644 --- a/packages/flowerbase/src/shared/handleUserRegistration.ts +++ b/packages/flowerbase/src/shared/handleUserRegistration.ts @@ -1,6 +1,7 @@ -import { FastifyMongoObject } from "@fastify/mongodb/types" import { AUTH_CONFIG, DB_NAME } from "../constants" -import { hashPassword } from "../utils/crypto" +import { StateManager } from "../state" +import { GenerateContext } from "../utils/context" +import { generateToken, hashPassword } from "../utils/crypto" import { HandleUserRegistration } from "./models/handleUserRegistration.model" /** @@ -18,7 +19,11 @@ const handleUserRegistration: HandleUserRegistration = (app, opt) => async ({ em } const { authCollection } = AUTH_CONFIG - const mongo: FastifyMongoObject = app?.mongo + const localUserpassConfig = AUTH_CONFIG.localUserpassConfig + const autoConfirm = localUserpassConfig?.autoConfirm === true + const runConfirmationFunction = localUserpassConfig?.runConfirmationFunction === true + const confirmationFunctionName = localUserpassConfig?.confirmationFunctionName + const mongo = app?.mongo const db = mongo.client.db(DB_NAME) const hashedPassword = await hashPassword(password) @@ -30,7 +35,7 @@ const handleUserRegistration: HandleUserRegistration = (app, opt) => async ({ em const result = await db?.collection(authCollection!).insertOne({ email, password: hashedPassword, - status: skipUserCheck ? 'confirmed' : 'pending', + status: skipUserCheck || autoConfirm ? 'confirmed' : 'pending', createdAt: new Date(), custom_data: { // TODO: aggiungere dati personalizzati alla registrazione @@ -59,8 +64,83 @@ const handleUserRegistration: HandleUserRegistration = (app, opt) => async ({ em } ) + if (!result?.insertedId || skipUserCheck || autoConfirm) { + return result + } + + if (!runConfirmationFunction) { + throw new Error('Missing confirmation function') + } + + if (!confirmationFunctionName) { + throw new Error('Missing confirmation function name') + } + + const functionsList = StateManager.select('functions') + const services = StateManager.select('services') + const confirmationFunction = functionsList[confirmationFunctionName] + if (!confirmationFunction) { + throw new Error(`Confirmation function not found: ${confirmationFunctionName}`) + } + + const token = generateToken() + const tokenId = generateToken() + await db?.collection(authCollection!).updateOne( + { _id: result.insertedId }, + { + $set: { + confirmationToken: token, + confirmationTokenId: tokenId + } + } + ) + + type ConfirmationResult = { status?: 'success' | 'pending' | 'fail' } + let confirmationStatus: ConfirmationResult['status'] = 'fail' + try { + const response = await GenerateContext({ + args: [{ + token, + tokenId, + username: email + }], + app, + rules: {}, + user: {}, + currentFunction: confirmationFunction, + functionsList, + services, + runAsSystem: true + }) as ConfirmationResult + confirmationStatus = response?.status ?? 'fail' + } catch { + confirmationStatus = 'fail' + } + + if (confirmationStatus === 'success') { + await db?.collection(authCollection!).updateOne( + { _id: result.insertedId }, + { + $set: { status: 'confirmed' }, + $unset: { confirmationToken: '', confirmationTokenId: '' } + } + ) + return result + } + + if (confirmationStatus === 'pending') { + return result + } + + await db?.collection(authCollection!).updateOne( + { _id: result.insertedId }, + { + $set: { status: 'failed' }, + $unset: { confirmationToken: '', confirmationTokenId: '' } + } + ) return result } -export default handleUserRegistration \ No newline at end of file +export default handleUserRegistration diff --git a/packages/flowerbase/src/shared/models/handleUserRegistration.model.ts b/packages/flowerbase/src/shared/models/handleUserRegistration.model.ts index 8110147..d01e8f1 100644 --- a/packages/flowerbase/src/shared/models/handleUserRegistration.model.ts +++ b/packages/flowerbase/src/shared/models/handleUserRegistration.model.ts @@ -1,5 +1,4 @@ import { FastifyInstance } from "fastify/types/instance" -import { InsertOneResult } from "mongodb/mongodb" import { User } from "../../auth/dtos" import { Rules } from "../../features/rules/interface" @@ -16,12 +15,18 @@ export type Options = { run_as_system?: boolean } +type RegistrationResult = { + insertedId?: { + toString: () => string + } +} + export type HandleUserRegistration = ( app: FastifyInstance, opt: Options -) => (params: RegistrationParams) => Promise> +) => (params: RegistrationParams) => Promise export enum PROVIDER { LOCAL_USERPASS = "local-userpass", CUSTOM_FUNCTION = "custom-function" -} \ No newline at end of file +} diff --git a/packages/flowerbase/src/types/fastify-raw-body.d.ts b/packages/flowerbase/src/types/fastify-raw-body.d.ts new file mode 100644 index 0000000..64c62cf --- /dev/null +++ b/packages/flowerbase/src/types/fastify-raw-body.d.ts @@ -0,0 +1,22 @@ +import 'fastify' +import type { FastifyJWT } from '@fastify/jwt' +import { Db, MongoClient } from 'mongodb' + +declare module 'fastify' { + interface FastifyRequest { + rawBody?: string + user?: FastifyJWT['user'] + } + + interface FastifyContextConfig { + rawBody?: boolean + } + + interface FastifyInstance { + mongo?: { + client: MongoClient + db?: Db + ObjectId: typeof import('mongodb').ObjectId + } + } +} diff --git a/packages/flowerbase/src/utils/__tests__/STEP_B_STATES.test.ts b/packages/flowerbase/src/utils/__tests__/STEP_B_STATES.test.ts index a08e160..e24cdd2 100644 --- a/packages/flowerbase/src/utils/__tests__/STEP_B_STATES.test.ts +++ b/packages/flowerbase/src/utils/__tests__/STEP_B_STATES.test.ts @@ -11,7 +11,7 @@ const { evaluateDocumentsFiltersWrite } = STEP_B_STATES -jest.mock('../roles/machines/B/validators', () => ({ +jest.mock('../roles/machines/read/B/validators', () => ({ evaluateDocumentFiltersReadFn: jest.fn(), evaluateDocumentFiltersWriteFn: jest.fn() })) diff --git a/packages/flowerbase/src/utils/__tests__/STEP_C_STATES.test.ts b/packages/flowerbase/src/utils/__tests__/STEP_C_STATES.test.ts index 8c4a951..5a79dc5 100644 --- a/packages/flowerbase/src/utils/__tests__/STEP_C_STATES.test.ts +++ b/packages/flowerbase/src/utils/__tests__/STEP_C_STATES.test.ts @@ -12,7 +12,7 @@ const endValidation = jest.fn() const goToNextValidationStage = jest.fn() const next = jest.fn() -jest.mock('../roles/machines/C/validators', () => ({ +jest.mock('../roles/machines/read/C/validators', () => ({ evaluateTopLevelReadFn: jest.fn(), checkFieldsPropertyExists: jest.fn(), evaluateTopLevelWriteFn: jest.fn() diff --git a/packages/flowerbase/src/utils/__tests__/STEP_D_STATES.test.ts b/packages/flowerbase/src/utils/__tests__/STEP_D_STATES.test.ts index 06c8a85..8ccb1f2 100644 --- a/packages/flowerbase/src/utils/__tests__/STEP_D_STATES.test.ts +++ b/packages/flowerbase/src/utils/__tests__/STEP_D_STATES.test.ts @@ -76,7 +76,7 @@ describe('STEP_D_STATES', () => { }) expect(next).toHaveBeenCalledWith('evaluateRead') }) - it('checkIsValidFieldName should end a failed validation, with an empty document', async () => { + it('checkIsValidFieldName should end a successful validation, with a document', async () => { const mockedLogInfo = jest .spyOn(Utils, 'logMachineInfo') .mockImplementation(() => 'Mocked Value') @@ -95,7 +95,7 @@ describe('STEP_D_STATES', () => { next, initialStep: null }) - expect(endValidation).toHaveBeenCalledWith({ success: false, document: {} }) + expect(endValidation).toHaveBeenCalledWith({ success: true, document: { name: 'test' } }) expect(mockedLogInfo).toHaveBeenCalledWith({ enabled: mockContext.enableLog, machine: 'D', diff --git a/packages/flowerbase/src/utils/__tests__/checkIsValidFieldNameFn.test.ts b/packages/flowerbase/src/utils/__tests__/checkIsValidFieldNameFn.test.ts index 75ee629..0b2fe33 100644 --- a/packages/flowerbase/src/utils/__tests__/checkIsValidFieldNameFn.test.ts +++ b/packages/flowerbase/src/utils/__tests__/checkIsValidFieldNameFn.test.ts @@ -32,7 +32,12 @@ describe('checkIsValidFieldNameFn', () => { } const result = checkIsValidFieldNameFn(context as MachineContext) - expect(result).toEqual({ name: 'Alice', email: 'alice@example.com', _id: mockId }) + expect(result).toEqual({ + _id: mockId, + name: 'Alice', + email: 'alice@example.com', + age: 25 + }) }) it("should exclude _id if role doesn't allows it", () => { const mockedRole = { @@ -127,7 +132,7 @@ describe('checkIsValidFieldNameFn', () => { const result = checkIsValidFieldNameFn(context as MachineContext) - expect(result).toEqual({}) + expect(result).toEqual({ email: 'charlie@example.com' }) }) it('should handle additional_fields correctly for read permission', () => { @@ -147,7 +152,7 @@ describe('checkIsValidFieldNameFn', () => { } const result = checkIsValidFieldNameFn(context as MachineContext) - expect(result).toEqual({ _id: mockId, phone: '123456789' }) + expect(result).toEqual({ _id: mockId, phone: '123456789', address: 'Unknown' }) }) it('should handle additional_fields correctly for write permission', () => { const mockedRole = { @@ -186,6 +191,6 @@ describe('checkIsValidFieldNameFn', () => { } const result = checkIsValidFieldNameFn(context as MachineContext) - expect(result).toEqual({ _id: mockId }) + expect(result).toEqual({ _id: mockId, phone: '123456789', address: 'Unknown' }) }) }) diff --git a/packages/flowerbase/src/utils/__tests__/registerPlugins.test.ts b/packages/flowerbase/src/utils/__tests__/registerPlugins.test.ts index 14cccd3..b1bd2a1 100644 --- a/packages/flowerbase/src/utils/__tests__/registerPlugins.test.ts +++ b/packages/flowerbase/src/utils/__tests__/registerPlugins.test.ts @@ -2,6 +2,8 @@ import cors from '@fastify/cors' import fastifyMongodb from '@fastify/mongodb' import { authController } from '../../auth/controller' import jwtAuthPlugin from '../../auth/plugins/jwt' +import fastifyRawBody from 'fastify-raw-body' +import { customFunctionController } from '../../auth/providers/custom-function/controller' import { localUserPassController } from '../../auth/providers/local-userpass/controller' import { Functions } from '../../features/functions/interface' import { registerPlugins } from '../initializer/registerPlugins' @@ -34,7 +36,7 @@ describe('registerPlugins', () => { }) // Check Plugins Registration - expect(registerMock).toHaveBeenCalledTimes(5) + expect(registerMock).toHaveBeenCalledTimes(7) expect(registerMock).toHaveBeenCalledWith(cors, { origin: '*', methods: ['POST', 'GET'] @@ -50,10 +52,22 @@ describe('registerPlugins', () => { expect(registerMock).toHaveBeenCalledWith(localUserPassController, { prefix: `${MOCKED_API_VERSION}/app/:appId/auth/providers/local-userpass` }) + expect(registerMock).toHaveBeenCalledWith(fastifyRawBody, { + field: 'rawBody', + global: false, + encoding: 'utf8', + runFirst: true, + routes: [], + jsonContentTypes: [] + }) + expect(registerMock).toHaveBeenCalledWith(customFunctionController, { + prefix: `${MOCKED_API_VERSION}/app/:appId/auth/providers/custom-function` + }) }) it('should handle errors in the catch block', async () => { const errorLog = jest.spyOn(console, 'error').mockImplementation(() => {}) + const logSpy = jest.spyOn(console, 'log').mockImplementation(() => {}) await registerPlugins({ register: errorMock, @@ -67,5 +81,6 @@ describe('registerPlugins', () => { 'Plugin registration failed' ) errorLog.mockRestore() + logSpy.mockRestore() }) }) diff --git a/packages/flowerbase/src/utils/context/index.ts b/packages/flowerbase/src/utils/context/index.ts index c221b49..278701a 100644 --- a/packages/flowerbase/src/utils/context/index.ts +++ b/packages/flowerbase/src/utils/context/index.ts @@ -1,10 +1,116 @@ import { createRequire } from 'node:module' +import path from 'node:path' +import { pathToFileURL } from 'node:url' import vm from 'vm' import { EJSON } from 'bson' import { StateManager } from '../../state' import { generateContextData } from './helpers' import { GenerateContextParams } from './interface' +const dynamicImport = new Function('specifier', 'return import(specifier)') as ( + specifier: string +) => Promise> + +const transformImportsToRequire = (code: string): string => { + let importIndex = 0 + const lines = code.split(/\r?\n/) + + return lines + .map((line) => { + const trimmed = line.trim() + + if (/^import\s+type\s+/.test(trimmed)) { + return '' + } + + const sideEffectMatch = trimmed.match(/^import\s+['"]([^'"]+)['"]\s*;?$/) + if (sideEffectMatch) { + return `require('${sideEffectMatch[1]}')` + } + + const match = trimmed.match(/^import\s+(.+?)\s+from\s+['"]([^'"]+)['"]\s*;?$/) + if (!match) return line + + const [, importClause, source] = match + const clause = importClause.trim() + + if (clause.startsWith('{') && clause.endsWith('}')) { + const named = clause.slice(1, -1).trim() + return `const { ${named} } = require('${source}')` + } + + const namespaceMatch = clause.match(/^\*\s+as\s+(\w+)$/) + if (namespaceMatch) { + return `const ${namespaceMatch[1]} = require('${source}')` + } + + if (clause.includes(',')) { + const [defaultPart, restRaw] = clause.split(',', 2) + const defaultName = defaultPart.trim() + const rest = restRaw.trim() + const tmpName = `__fb_import_${importIndex++}` + const linesOut = [`const ${tmpName} = require('${source}')`] + + if (defaultName) { + linesOut.push(`const ${defaultName} = ${tmpName}`) + } + + if (rest.startsWith('{') && rest.endsWith('}')) { + const named = rest.slice(1, -1).trim() + linesOut.push(`const { ${named} } = ${tmpName}`) + } else { + const nsMatch = rest.match(/^\*\s+as\s+(\w+)$/) + if (nsMatch) { + linesOut.push(`const ${nsMatch[1]} = ${tmpName}`) + } + } + + return linesOut.join('\n') + } + + return `const ${clause} = require('${source}')` + }) + .join('\n') +} + +const wrapEsmModule = (code: string): string => { + const prelude = [ + 'const __fb_module = { exports: {} };', + 'let exports = __fb_module.exports;', + 'let module = __fb_module;', + 'const __fb_require = globalThis.__fb_require;', + 'const require = __fb_require;', + 'const __filename = globalThis.__fb_filename;', + 'const __dirname = globalThis.__fb_dirname;' + ].join('\n') + + const trailer = [ + 'globalThis.__fb_module = __fb_module;', + 'globalThis.__fb_exports = exports;' + ].join('\n') + + return `${prelude}\n${code}\n${trailer}` +} + +const resolveImportTarget = (specifier: string, customRequire: NodeRequire): string => { + try { + const resolved = customRequire.resolve(specifier) + if (resolved.startsWith('node:')) return resolved + if (path.isAbsolute(resolved)) { + return pathToFileURL(resolved).href + } + return resolved + } catch { + return specifier + } +} + +const shouldFallbackFromVmModules = (error: unknown): boolean => { + if (!error || typeof error !== 'object') return false + const code = (error as { code?: string }).code + return code === 'ERR_VM_MODULES_DISABLED' || code === 'ERR_VM_MODULES_NOT_SUPPORTED' +} + /** * > Used to generate the current context * @testable @@ -28,7 +134,7 @@ export async function GenerateContext({ deserializeArgs = true, enqueue, request -}: GenerateContextParams) { +}: GenerateContextParams): Promise { if (!currentFunction) return const functionsQueue = StateManager.select("functionsQueue") @@ -48,28 +154,151 @@ export async function GenerateContext({ request }) - try { - const entryFile = require.main?.filename ?? process.cwd(); - const customRequire = createRequire(entryFile); - - vm.runInContext(functionToRun.code, vm.createContext({ - ...contextData, require: customRequire, - exports, - module, - __filename: __filename, - __dirname: __dirname - })); + type ExportedFunction = (...args: unknown[]) => unknown + type SandboxModule = { exports: unknown } + type SandboxContext = vm.Context & { + exports?: unknown + module?: SandboxModule + __fb_module?: SandboxModule + __fb_exports?: unknown + __fb_require?: NodeRequire + __fb_filename?: string + __fb_dirname?: string + } + + const isExportedFunction = (value: unknown): value is ExportedFunction => + typeof value === 'function' + + const getDefaultExport = (value: unknown): ExportedFunction | undefined => { + if (!value || typeof value !== 'object') return undefined + if (!('default' in value)) return undefined + const maybeDefault = (value as { default?: unknown }).default + return isExportedFunction(maybeDefault) ? maybeDefault : undefined } - catch (e) { - console.log(e) + + const resolveExport = (ctx: SandboxContext): ExportedFunction | undefined => { + const moduleExports = ctx.module?.exports ?? ctx.__fb_module?.exports + if (isExportedFunction(moduleExports)) return moduleExports + const contextExports = ctx.exports ?? ctx.__fb_exports + if (isExportedFunction(contextExports)) return contextExports + return getDefaultExport(moduleExports) ?? getDefaultExport(contextExports) + } + + const sandboxModule: SandboxModule = { exports: {} } + + try { + const entryFile = require.main?.filename ?? process.cwd() + const customRequire = createRequire(entryFile) + + const vmContext: SandboxContext = vm.createContext({ + ...contextData, + require: customRequire, + exports: sandboxModule.exports, + module: sandboxModule, + __filename, + __dirname, + __fb_require: customRequire, + __fb_filename: __filename, + __fb_dirname: __dirname + }) as SandboxContext + + const vmModules = vm as typeof vm & { + SourceTextModule?: typeof vm.SourceTextModule + SyntheticModule?: typeof vm.SyntheticModule + } + const hasStaticImport = /\bimport\s+/.test(functionToRun.code) + let usedVmModules = false + + if (hasStaticImport && vmModules.SourceTextModule && vmModules.SyntheticModule) { + try { + const moduleCache = new Map() + + const loadModule = async (specifier: string): Promise => { + const importTarget = resolveImportTarget(specifier, customRequire) + const cached = moduleCache.get(importTarget) + if (cached) return cached + + const namespace = await dynamicImport(importTarget) + const exportNames = Object.keys(namespace) + if ('default' in namespace && !exportNames.includes('default')) { + exportNames.push('default') + } + + const syntheticModule = new vmModules.SyntheticModule( + exportNames, + function () { + for (const name of exportNames) { + this.setExport(name, namespace[name]) + } + }, + { context: vmContext, identifier: importTarget } + ) + + moduleCache.set(importTarget, syntheticModule) + return syntheticModule + } + + const importModuleDynamically = (async ( + specifier: string + ): Promise => { + const module = await loadModule(specifier) + if (module.status === 'unlinked') { + await module.link(loadModule) + } + if (module.status === 'linked') { + await module.evaluate() + } + return module + }) as unknown as vm.ScriptOptions['importModuleDynamically'] + + const sourceModule = new vmModules.SourceTextModule( + wrapEsmModule(functionToRun.code), + { + context: vmContext, + identifier: entryFile, + initializeImportMeta: (meta) => { + meta.url = pathToFileURL(entryFile).href + }, + importModuleDynamically + } + ) + + await sourceModule.link(loadModule) + await sourceModule.evaluate() + usedVmModules = true + } catch (error) { + if (!shouldFallbackFromVmModules(error)) { + throw error + } + } + } + + if (!usedVmModules) { + const codeToRun = functionToRun.code.includes('import ') + ? transformImportsToRequire(functionToRun.code) + : functionToRun.code + vm.runInContext(codeToRun, vmContext) + } + + sandboxModule.exports = resolveExport(vmContext) ?? sandboxModule.exports + } catch (error) { + console.error(error) + throw error } if (deserializeArgs) { - return await module.exports(...EJSON.deserialize(args)) + return await (sandboxModule.exports as ExportedFunction)( + ...EJSON.deserialize(args) + ) } - return await module.exports(...args) + return await (sandboxModule.exports as ExportedFunction)(...args) + } + try { + const res = await functionsQueue.add(run, enqueue) + return res + } catch (error) { + console.error(error) + throw error } - const res = await functionsQueue.add(run, enqueue) - return res } diff --git a/packages/flowerbase/src/utils/context/interface.ts b/packages/flowerbase/src/utils/context/interface.ts index ba30711..1d08d3c 100644 --- a/packages/flowerbase/src/utils/context/interface.ts +++ b/packages/flowerbase/src/utils/context/interface.ts @@ -20,5 +20,5 @@ export interface GenerateContextParams { type ContextRequest = Pick export interface GenerateContextDataParams extends Omit { - GenerateContext: (params: GenerateContextParams) => Promise + GenerateContext: (params: GenerateContextParams) => Promise } diff --git a/packages/flowerbase/src/utils/crypto/index.ts b/packages/flowerbase/src/utils/crypto/index.ts index 90a1e7d..34ab5df 100644 --- a/packages/flowerbase/src/utils/crypto/index.ts +++ b/packages/flowerbase/src/utils/crypto/index.ts @@ -36,6 +36,10 @@ export const comparePassword = async (plaintext: string, storedPassword: string) * > Generate a random token * @param length -> the token length */ -export const generateToken = (length = 32) => { +export const generateToken = (length = 64) => { return crypto.randomBytes(length).toString('hex') } + +export const hashToken = (token: string) => { + return crypto.createHash('sha256').update(token).digest('hex') +} diff --git a/packages/flowerbase/src/utils/initializer/exposeRoutes.ts b/packages/flowerbase/src/utils/initializer/exposeRoutes.ts index 9e90036..9a6af71 100644 --- a/packages/flowerbase/src/utils/initializer/exposeRoutes.ts +++ b/packages/flowerbase/src/utils/initializer/exposeRoutes.ts @@ -13,12 +13,21 @@ import { hashPassword } from '../crypto' */ export const exposeRoutes = async (fastify: FastifyInstance) => { try { - fastify.get(`${API_VERSION}/app/:appId/location`, async (req) => ({ - deployment_model: 'LOCAL', - location: 'IE', - hostname: `${DEFAULT_CONFIG.HTTPS_SCHEMA}://${req.headers.host}`, - ws_hostname: `${DEFAULT_CONFIG.HTTPS_SCHEMA === 'https' ? 'wss' : 'ws'}://${req.headers.host}` - })) + fastify.get(`${API_VERSION}/app/:appId/location`, async (req) => { + const schema = DEFAULT_CONFIG?.HTTPS_SCHEMA ?? 'http' + const headerHost = req.headers.host ?? 'localhost:3000' + const hostname = headerHost.split(':')[0] + const port = DEFAULT_CONFIG?.PORT ?? 3000 + const host = `${hostname}:${port}` + const wsSchema = 'wss' + + return { + deployment_model: 'LOCAL', + location: 'IE', + hostname: `${schema}://${host}`, + ws_hostname: `${wsSchema}://${host}` + } + }) fastify.get('/health', async () => ({ status: 'ok', @@ -77,5 +86,3 @@ export const exposeRoutes = async (fastify: FastifyInstance) => { console.error('Error while exposing routes', (e as Error).message) } } - - diff --git a/packages/flowerbase/src/utils/initializer/registerPlugins.ts b/packages/flowerbase/src/utils/initializer/registerPlugins.ts index f7b71f6..c50871e 100644 --- a/packages/flowerbase/src/utils/initializer/registerPlugins.ts +++ b/packages/flowerbase/src/utils/initializer/registerPlugins.ts @@ -56,6 +56,7 @@ export const registerPlugins = async ({ } catch (e) { console.log('Registration FAILED --->', pluginName) console.log('Error --->', e) + throw e } }) } catch (e) { @@ -76,11 +77,16 @@ const getRegisterConfig = async ({ }: Pick): Promise< RegisterConfig[] > => { + const corsOptions = corsConfig ?? { + origin: '*', + methods: ['POST', 'GET'] + } + return [ { pluginName: 'cors', plugin: cors, - options: corsConfig + options: corsOptions }, { pluginName: 'fastifyMongodb', diff --git a/packages/flowerbase/src/utils/roles/helpers.ts b/packages/flowerbase/src/utils/roles/helpers.ts index 5050be4..0f7b247 100644 --- a/packages/flowerbase/src/utils/roles/helpers.ts +++ b/packages/flowerbase/src/utils/roles/helpers.ts @@ -22,7 +22,7 @@ export const evaluateExpression = async ( '%%true': true } const conditions = expandQuery(expression, value) - const complexCondition = Object.entries>(conditions).find(([key]) => + const complexCondition = Object.entries(conditions as Record).find(([key]) => functionsConditions.includes(key) ) return complexCondition @@ -34,15 +34,32 @@ const evaluateComplexExpression = async ( condition: [string, Record], params: MachineContext['params'], user: MachineContext['user'] -) => { +): Promise => { const [key, config] = condition - const { name } = config['%function'] + const functionConfig = config['%function'] + const { name, arguments: fnArguments } = functionConfig const functionsList = StateManager.select('functions') const app = StateManager.select('app') const currentFunction = functionsList[name] + + const expansionContext = { + ...params.expansions, + ...params.cursor, + '%%root': params.cursor, + '%%user': user, + '%%true': true, + '%%false': false + } + + const expandedArguments = + fnArguments && fnArguments.length + ? ((expandQuery({ args: fnArguments }, expansionContext) as { args: unknown[] }) + .args ?? []) + : [params.cursor] + const response = await GenerateContext({ - args: [params.cursor], + args: expandedArguments, app, rules: StateManager.select("rules"), user, @@ -50,5 +67,6 @@ const evaluateComplexExpression = async ( functionsList, services }) - return key === '%%true' ? response : !response + const isTruthy = Boolean(response) + return key === '%%true' ? isTruthy : !isTruthy } diff --git a/packages/flowerbase/src/utils/roles/machines/commonValidators.ts b/packages/flowerbase/src/utils/roles/machines/commonValidators.ts index d7c5cb0..cedd9d2 100644 --- a/packages/flowerbase/src/utils/roles/machines/commonValidators.ts +++ b/packages/flowerbase/src/utils/roles/machines/commonValidators.ts @@ -3,7 +3,7 @@ import { evaluateExpression } from '../helpers' import { DocumentFiltersPermissions } from '../interface' import { MachineContext } from './interface' -const readOnlyPermissions = ['read'] +const readOnlyPermissions = ['read', 'search'] const readWritePermissions = ['write', 'delete', 'insert', ...readOnlyPermissions] export const evaluateDocumentFiltersFn = async ( @@ -23,11 +23,16 @@ export const evaluateTopLevelPermissionsFn = async ( { params, role, user }: MachineContext, currentType: MachineContext['params']['type'] ) => { - return role[currentType] - ? await evaluateExpression(params, role[currentType], user) - : undefined + const permission = role?.[currentType] + if (typeof permission === 'undefined') { + return undefined + } + + return await evaluateExpression(params, permission, user) } export const checkFieldsPropertyExists = ({ role }: MachineContext) => { - return !!Object.keys(role.fields ?? {}).length + const hasFields = !!Object.keys(role?.fields ?? {}).length + const hasAdditional = !!Object.keys(role?.additional_fields ?? {}).length + return hasFields || hasAdditional } diff --git a/packages/flowerbase/src/utils/roles/machines/read/B/validators.ts b/packages/flowerbase/src/utils/roles/machines/read/B/validators.ts new file mode 100644 index 0000000..3df5522 --- /dev/null +++ b/packages/flowerbase/src/utils/roles/machines/read/B/validators.ts @@ -0,0 +1,8 @@ +import { MachineContext } from '../../interface' +import { evaluateDocumentFiltersFn } from '../../commonValidators' + +export const evaluateDocumentFiltersReadFn = (context: MachineContext) => + evaluateDocumentFiltersFn(context, 'read') + +export const evaluateDocumentFiltersWriteFn = (context: MachineContext) => + evaluateDocumentFiltersFn(context, 'write') diff --git a/packages/flowerbase/src/utils/roles/machines/read/C/index.ts b/packages/flowerbase/src/utils/roles/machines/read/C/index.ts index 2309d18..12e1a68 100644 --- a/packages/flowerbase/src/utils/roles/machines/read/C/index.ts +++ b/packages/flowerbase/src/utils/roles/machines/read/C/index.ts @@ -1,7 +1,8 @@ import { checkFieldsPropertyExists, - evaluateTopLevelPermissionsFn -} from '../../commonValidators' + evaluateTopLevelReadFn, + evaluateTopLevelWriteFn +} from './validators' import { States } from '../../interface' import { logMachineInfo } from '../../utils' @@ -13,10 +14,13 @@ export const STEP_C_STATES: States = { step: 1, stepName: 'evaluateTopLevelRead' }) - const check = await evaluateTopLevelPermissionsFn(context, 'read') - return check - ? endValidation({ success: true }) - : next('evaluateTopLevelWrite', { check }) + const check = await evaluateTopLevelReadFn(context) + if (check) { + return checkFieldsPropertyExists(context) + ? next('checkFieldsProperty') + : endValidation({ success: true }) + } + return next('evaluateTopLevelWrite', { check }) }, evaluateTopLevelWrite: async ({ context, next, endValidation }) => { logMachineInfo({ @@ -25,7 +29,7 @@ export const STEP_C_STATES: States = { step: 2, stepName: 'evaluateTopLevelWrite' }) - const check = await evaluateTopLevelPermissionsFn(context, 'write') + const check = await evaluateTopLevelWriteFn(context) if (check) return endValidation({ success: true }) return context?.prevParams?.check === false ? endValidation({ success: false }) diff --git a/packages/flowerbase/src/utils/roles/machines/read/C/validators.ts b/packages/flowerbase/src/utils/roles/machines/read/C/validators.ts new file mode 100644 index 0000000..2eecc50 --- /dev/null +++ b/packages/flowerbase/src/utils/roles/machines/read/C/validators.ts @@ -0,0 +1,21 @@ +import { MachineContext } from '../../interface' +import { + checkFieldsPropertyExists, + evaluateTopLevelPermissionsFn +} from '../../commonValidators' + +export const evaluateTopLevelReadFn = async (context: MachineContext) => { + if (context.params.type !== 'read') { + return false + } + return evaluateTopLevelPermissionsFn(context, 'read') +} + +export const evaluateTopLevelWriteFn = async (context: MachineContext) => { + if (!['read', 'write'].includes(context.params.type)) { + return undefined + } + return evaluateTopLevelPermissionsFn(context, 'write') +} + +export { checkFieldsPropertyExists } from '../../commonValidators' diff --git a/packages/flowerbase/src/utils/roles/machines/read/D/index.ts b/packages/flowerbase/src/utils/roles/machines/read/D/index.ts index 59322dd..cb99ff9 100644 --- a/packages/flowerbase/src/utils/roles/machines/read/D/index.ts +++ b/packages/flowerbase/src/utils/roles/machines/read/D/index.ts @@ -1,7 +1,25 @@ -import { States } from '../../interface' +import { Document } from 'mongodb' +import { MachineContext, States } from '../../interface' import { logMachineInfo } from '../../utils' import { checkAdditionalFieldsFn, checkIsValidFieldNameFn } from './validators' +const runCheckIsValidFieldName = async ({ + context, + endValidation +}: { + context: MachineContext + endValidation: ({ success, document }: { success: boolean; document?: Document }) => void +}) => { + logMachineInfo({ + enabled: context.enableLog, + machine: 'D', + step: 2, + stepName: 'checkIsValidFieldName' + }) + const document = checkIsValidFieldNameFn(context) + return endValidation({ success: !!Object.keys(document).length, document }) +} + export const STEP_D_STATES: States = { checkAdditionalFields: async ({ context, next, endValidation }) => { logMachineInfo({ @@ -11,16 +29,8 @@ export const STEP_D_STATES: States = { stepName: 'checkAdditionalFields' }) const check = checkAdditionalFieldsFn(context) - return check ? next('checkIsValidFieldName') : endValidation({ success: false }) + return check ? next('evaluateRead') : endValidation({ success: false }) }, - checkIsValidFieldName: async ({ context, endValidation }) => { - logMachineInfo({ - enabled: context.enableLog, - machine: 'D', - step: 2, - stepName: 'checkIsValidFieldName' - }) - const document = checkIsValidFieldNameFn(context) - return endValidation({ success: !!Object.keys(document).length, document }) - } + evaluateRead: runCheckIsValidFieldName, + checkIsValidFieldName: runCheckIsValidFieldName } diff --git a/packages/flowerbase/src/utils/rules.ts b/packages/flowerbase/src/utils/rules.ts index 5a8f07b..15db894 100644 --- a/packages/flowerbase/src/utils/rules.ts +++ b/packages/flowerbase/src/utils/rules.ts @@ -1,31 +1,40 @@ import get from 'lodash/get' -const removeExtraColons = (val: unknown) => { - return val?.toString().replace(/:+/g, ":") +const resolvePlaceholder = (value: string, objs: Record) => { + if (!value.startsWith('%%')) return value + + const path = value.slice(2) + const [rootKey, ...rest] = path.split('.') + const rootToken = `%%${rootKey}` + const rootValue = objs[rootToken] + + if (!rest.length) { + return rootValue === undefined ? value : rootValue + } + + const resolved = get(rootValue as object, rest.join('.')) + return resolved === undefined ? value : resolved } -// Funzione che espande dinamicamente i placeholder con supporto per percorsi annidati +const expandValue = (input: unknown, objs: Record): unknown => { + if (Array.isArray(input)) { + return input.map((item) => expandValue(item, objs)) + } + if (input && typeof input === 'object') { + return Object.fromEntries( + Object.entries(input).map(([key, val]) => [key, expandValue(val, objs)]) + ) + } + if (typeof input === 'string') { + return resolvePlaceholder(input, objs) + } + return input +} + +// Espande dinamicamente i placeholder con supporto per array e percorsi annidati export function expandQuery( template: Record, objs: Record ) { - let expandedQuery = JSON.stringify(template) // Converti l'oggetto in una stringa per sostituire i placeholder - const regex = /:\s*"%%([a-zA-Z0-9_.]+)"/g - Object.keys(objs).forEach(() => { - // Espandi tutti i placeholder %%values. - - const callback = (match: string, path: string) => { - const value = get(objs, `%%${path}`) // Recupera il valore annidato da values - const finalValue = typeof value === 'string' ? `"${value}"` : value && JSON.stringify(value) - // TODO tolto i primi : creava questo tipo di oggetto {"userId"::"%%user.id"} - const val = `:${value !== undefined ? finalValue : match}`; // Sostituisci se esiste, altrimenti lascia il placeholder - return removeExtraColons(val) - } - - expandedQuery = expandedQuery.replace( - regex, - callback as Parameters[1] - ) - }) - return JSON.parse(expandedQuery) // Converti la stringa JSON di nuovo in un oggetto + return expandValue(template, objs) as Record } diff --git a/packages/flowerbase/tsconfig.spec.json b/packages/flowerbase/tsconfig.spec.json new file mode 100644 index 0000000..6d4ced3 --- /dev/null +++ b/packages/flowerbase/tsconfig.spec.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "types": ["node", "jest"] + }, + "include": ["src/**/*.test.ts", "src/**/*.spec.ts"] +} diff --git a/tests/e2e/app/auth/custom_user_data.json b/tests/e2e/app/auth/custom_user_data.json new file mode 100644 index 0000000..51ccbe7 --- /dev/null +++ b/tests/e2e/app/auth/custom_user_data.json @@ -0,0 +1,8 @@ +{ + "enabled": true, + "mongo_service_name": "mongodb-atlas", + "database_name": "flowerbase-e2e", + "collection_name": "users", + "user_id_field": "id", + "on_user_creation_function_name": "onCreateUser" +} diff --git a/tests/e2e/app/auth/providers.json b/tests/e2e/app/auth/providers.json new file mode 100644 index 0000000..d9c7f58 --- /dev/null +++ b/tests/e2e/app/auth/providers.json @@ -0,0 +1,25 @@ +{ + "api-key": { + "name": "api-key", + "type": "api-key", + "disabled": true + }, + "local-userpass": { + "name": "local-userpass", + "type": "local-userpass", + "disabled": false, + "config": { + "autoConfirm": true, + "resetFunctionName": "resetPasswordHandler", + "resetPasswordSubject": "reset", + "resetPasswordUrl": "http://localhost/reset", + "runConfirmationFunction": false, + "runResetFunction": true, + "mailConfig": { + "from": "no-reply@example.com", + "subject": "Password Reset", + "mailToken": "ENV.MAIL_TOKEN" + } + } + } +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activities/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activities/rules.json new file mode 100644 index 0000000..0104c89 --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activities/rules.json @@ -0,0 +1,50 @@ +{ + "database": "flowerbase-e2e", + "collection": "activities", + "filters": [ + { + "name": "workspace visibility", + "apply_when": { + "%%true": true + }, + "query": { + "$or": [ + { + "ownerId": "%%user.id" + }, + { + "visibility.type": { + "$ne": "onlyme" + } + } + ], + "workspace": { + "$in": "%%user.custom_data.workspaces" + } + } + } + ], + "roles": [ + { + "name": "workspace members", + "apply_when": { + "workspace": { + "$in": "%%user.custom_data.workspaces" + } + }, + "insert": true, + "delete": true, + "search": true, + "read": true, + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedUser" + } + } + }, + "additional_fields": {} + } + ] +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activityLogs/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activityLogs/rules.json new file mode 100644 index 0000000..49ca1f8 --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/activityLogs/rules.json @@ -0,0 +1,44 @@ +{ + "database": "flowerbase-e2e", + "collection": "activityLogs", + "filters": [ + { + "name": "adminAccess", + "apply_when": { + "%%user.custom_data.role": "admin" + }, + "query": {} + }, + { + "name": "activeOnly", + "apply_when": {}, + "query": { + "status": "active" + } + } + ], + "roles": [ + { + "name": "activeLogRole", + "apply_when": { + "status": "active" + }, + "insert": false, + "delete": false, + "search": true, + "read": true, + "write": false + }, + { + "name": "adminAuditRole", + "apply_when": { + "%%user.custom_data.role": "admin" + }, + "insert": true, + "delete": true, + "search": true, + "read": true, + "write": true + } + ] +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/counters/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/counters/rules.json new file mode 100644 index 0000000..b604aba --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/counters/rules.json @@ -0,0 +1,78 @@ +{ + "database": "flowerbase-e2e", + "collection": "counters", + "filters": [ + { + "name": "adminAccess", + "apply_when": { + "%%user.custom_data.role": "admin" + }, + "query": {} + }, + { + "name": "visibility matrix", + "apply_when": { + "%%true": true + }, + "query": { + "$or": [ + { + "ownerId": "%%user.id" + }, + { + "visibility.type": "all" + }, + { + "visibility.users": { + "$in": [ + "%%user.id" + ] + } + }, + { + "$and": [ + { + "workspace": { + "$in": "%%user.custom_data.adminIn" + } + }, + { + "visibility.type": { + "$ne": "onlyme" + } + } + ] + } + ], + "workspace": { + "$in": "%%user.custom_data.workspaces" + } + } + } + ], + "roles": [ + { + "name": "workspace counters", + "apply_when": { + "workspace": { + "$in": "%%user.custom_data.workspaces" + } + }, + "insert": true, + "delete": true, + "search": true, + "read": true, + "write": { + "%%true": { + "%function": { + "arguments": [ + "%%root" + ], + "name": "isAuthorizedAdmin" + } + } + }, + "additional_fields": {} + } + ] +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/projects/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/projects/rules.json new file mode 100644 index 0000000..817de15 --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/projects/rules.json @@ -0,0 +1,76 @@ +{ + "database": "flowerbase-e2e", + "collection": "projects", + "filters": [ + { + "name": "adminAccess", + "apply_when": { + "%%user.custom_data.role": "admin" + }, + "query": {} + }, + { + "name": "ownerAccess", + "apply_when": { + "ownerId": "%%user.id" + }, + "query": { + "ownerId": "%%user.id" + }, + "projection": { + "name": 1, + "ownerId": 1, + "summary": 1 + } + } + ], + "roles": [ + { + "name": "projectOwner", + "apply_when": { + "ownerId": "%%user.id" + }, + "insert": true, + "delete": false, + "search": true, + "read": true, + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedUser" + } + } + }, + "fields": { + "secretNotes": { + "read": false, + "write": false + } + }, + "additional_fields": { + "internalCode": { + "read": false, + "write": false + } + } + }, + { + "name": "projectAdmin", + "apply_when": { + "%%user.custom_data.role": "admin" + }, + "insert": true, + "delete": true, + "search": true, + "read": true, + "write": true, + "fields": { + "secretNotes": { + "read": true, + "write": true + } + } + } + ] +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/todos/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/todos/rules.json new file mode 100644 index 0000000..4895019 --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/todos/rules.json @@ -0,0 +1,26 @@ +{ + "database": "flowerbase-e2e", + "collection": "todos", + "filters": [ + { + "name": "onlyOwnTodos", + "apply_when": {}, + "query": { + "userId": "%%user.id" + } + } + ], + "roles": [ + { + "name": "todoOwner", + "apply_when": { + "userId": "%%user.id" + }, + "insert": true, + "delete": true, + "search": true, + "read": true, + "write": true + } + ] +} diff --git a/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/users/rules.json b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/users/rules.json new file mode 100644 index 0000000..fd2455c --- /dev/null +++ b/tests/e2e/app/data_sources/mongodb-atlas/flowerbase-e2e/users/rules.json @@ -0,0 +1,77 @@ +{ + "database": "flowerbase-e2e", + "collection": "users", + "filters": [ + { + "name": "workspace or owner", + "query": { + "$or": [ + { + "workspaces": { + "$in": "%%user.custom_data.workspaces" + } + }, + { + "userId": "%%user.id" + } + ] + }, + "apply_when": {} + } + ], + "roles": [ + { + "name": "workspace members", + "apply_when": { + "%%true": true + }, + "insert": false, + "delete": false, + "search": true, + "read": true, + "fields": { + "avatar": { + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedMyUser" + } + } + } + }, + "name": { + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedMyUser" + } + } + } + }, + "tags": { + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedMyUser" + } + } + } + }, + "updatedAt": { + "write": { + "%%true": { + "%function": { + "arguments": ["%%root"], + "name": "isAuthorizedMyUser" + } + } + } + } + }, + "additional_fields": {} + } + ] +} diff --git a/tests/e2e/app/functions/api_checkWorkspace.js b/tests/e2e/app/functions/api_checkWorkspace.js new file mode 100644 index 0000000..ac4b164 --- /dev/null +++ b/tests/e2e/app/functions/api_checkWorkspace.js @@ -0,0 +1,9 @@ +module.exports = (request, response) => { + const workspace = request?.query?.workspace ?? 'default' + response.setStatusCode(202) + response.setBody({ + success: true, + workspace, + source: 'api_checkWorkspace' + }) +} diff --git a/tests/e2e/app/functions/config.json b/tests/e2e/app/functions/config.json new file mode 100644 index 0000000..e7c93df --- /dev/null +++ b/tests/e2e/app/functions/config.json @@ -0,0 +1,57 @@ +[ + { + "name": "isAuthorizedUser", + "private": false, + "run_as_system": false + }, + { + "name": "isAuthorizedMyUser", + "private": false, + "run_as_system": false + }, + { + "name": "isAuthorizedAdmin", + "private": false, + "run_as_system": false + }, + { + "name": "privateEcho", + "private": true, + "run_as_system": false + }, + { + "name": "logTriggerEvent", + "private": false, + "run_as_system": true + }, + { + "name": "publicListAuthUsers", + "private": false, + "run_as_system": false + }, + { + "name": "systemListUsers", + "private": false, + "run_as_system": true + }, + { + "name": "api_checkWorkspace", + "private": false, + "run_as_system": false + }, + { + "name": "resetPasswordHandler", + "private": false, + "run_as_system": true + }, + { + "name": "onCreateUser", + "private": true, + "run_as_system": true + }, + { + "name": "confirmUser", + "private": true, + "run_as_system": true + } +] diff --git a/tests/e2e/app/functions/confirmUser.js b/tests/e2e/app/functions/confirmUser.js new file mode 100644 index 0000000..a4d9c96 --- /dev/null +++ b/tests/e2e/app/functions/confirmUser.js @@ -0,0 +1,19 @@ +module.exports = async function ({ tokenId, username }) { + const mongoService = context.services.get('mongodb-atlas') + const collection = mongoService.db('flowerbase-e2e').collection('triggerEvents') + await collection.insertOne({ + documentId: username, + type: 'user_confirmation', + email: username, + tokenId, + createdAt: new Date().toISOString() + }) + + if (typeof username === 'string' && username.includes('pending')) { + return { status: 'pending' } + } + if (typeof username === 'string' && username.includes('fail')) { + return { status: 'fail' } + } + return { status: 'success' } +} diff --git a/tests/e2e/app/functions/isAuthorizedAdmin.js b/tests/e2e/app/functions/isAuthorizedAdmin.js new file mode 100644 index 0000000..e43caaa --- /dev/null +++ b/tests/e2e/app/functions/isAuthorizedAdmin.js @@ -0,0 +1,12 @@ +module.exports = async function (doc) { + if (!doc || !context?.user) { + return false + } + + if (context.user.role === 'admin') { + return true + } + + const adminWorkspaces = context.user.custom_data?.adminIn ?? [] + return adminWorkspaces.includes(doc?.workspace) +} diff --git a/tests/e2e/app/functions/isAuthorizedMyUser.js b/tests/e2e/app/functions/isAuthorizedMyUser.js new file mode 100644 index 0000000..dd02d12 --- /dev/null +++ b/tests/e2e/app/functions/isAuthorizedMyUser.js @@ -0,0 +1,7 @@ +module.exports = async function (doc) { + if (!doc || !context?.user?.id) { + return false + } + + return doc.userId === context.user.id +} diff --git a/tests/e2e/app/functions/isAuthorizedUser.js b/tests/e2e/app/functions/isAuthorizedUser.js new file mode 100644 index 0000000..3fa3bd8 --- /dev/null +++ b/tests/e2e/app/functions/isAuthorizedUser.js @@ -0,0 +1,8 @@ +const isAuthorizedUser = (doc) => { + const user = context?.user + if (!doc || !user) return false + if (user.role === 'admin') return true + return doc.ownerId === user.id +} + +module.exports = isAuthorizedUser diff --git a/tests/e2e/app/functions/logTriggerEvent.js b/tests/e2e/app/functions/logTriggerEvent.js new file mode 100644 index 0000000..7508363 --- /dev/null +++ b/tests/e2e/app/functions/logTriggerEvent.js @@ -0,0 +1,15 @@ +const logTriggerEvent = async (changeEvent) => { + const mongoService = context.services.get('mongodb-atlas') + const collection = mongoService.db('flowerbase-e2e').collection('triggerEvents') + const documentId = changeEvent?.documentKey?._id?.toString() + await collection.insertOne({ + timestamp: new Date().toISOString(), + operationType: changeEvent?.operationType, + collection: changeEvent?.ns?.coll, + documentId, + payload: changeEvent?.fullDocument + }) + return { recorded: true, documentId } +} + +module.exports = logTriggerEvent diff --git a/tests/e2e/app/functions/onCreateUser.js b/tests/e2e/app/functions/onCreateUser.js new file mode 100644 index 0000000..e129caf --- /dev/null +++ b/tests/e2e/app/functions/onCreateUser.js @@ -0,0 +1,13 @@ +module.exports = async function (payload) { + const user = payload?.user ?? payload + const mongoService = context.services.get('mongodb-atlas') + const collection = mongoService.db('flowerbase-e2e').collection('triggerEvents') + const documentId = user?.id?.toString() ?? 'unknown' + await collection.insertOne({ + documentId, + type: 'on_user_creation', + email: user?.email ?? user?.data?.email ?? null, + createdAt: new Date().toISOString() + }) + return { recorded: true, documentId } +} diff --git a/tests/e2e/app/functions/privateEcho.js b/tests/e2e/app/functions/privateEcho.js new file mode 100644 index 0000000..a052897 --- /dev/null +++ b/tests/e2e/app/functions/privateEcho.js @@ -0,0 +1,3 @@ +module.exports = async function () { + return { ok: true, name: 'privateEcho' } +} diff --git a/tests/e2e/app/functions/publicListAuthUsers.js b/tests/e2e/app/functions/publicListAuthUsers.js new file mode 100644 index 0000000..939efbd --- /dev/null +++ b/tests/e2e/app/functions/publicListAuthUsers.js @@ -0,0 +1,6 @@ +module.exports = async function () { + const mongoService = context.services.get('mongodb-atlas') + const collection = mongoService.db('flowerbase-e2e').collection('auth_users') + const users = await collection.find({}).toArray() + return { users } +} diff --git a/tests/e2e/app/functions/resetPasswordHandler.js b/tests/e2e/app/functions/resetPasswordHandler.js new file mode 100644 index 0000000..3d0de18 --- /dev/null +++ b/tests/e2e/app/functions/resetPasswordHandler.js @@ -0,0 +1,6 @@ +module.exports = async function ({ token, tokenId, email }) { + if (!token || !tokenId || !email) { + throw new Error('Missing reset params') + } + return { ok: true } +} diff --git a/tests/e2e/app/functions/systemListUsers.js b/tests/e2e/app/functions/systemListUsers.js new file mode 100644 index 0000000..d91ce7d --- /dev/null +++ b/tests/e2e/app/functions/systemListUsers.js @@ -0,0 +1,12 @@ +module.exports = async function () { + const mongoService = context.services.get('mongodb-atlas') + const usersCollection = mongoService.db('flowerbase-e2e').collection('users') + const users = await usersCollection + .find({}) + .project({ email: 1, userId: 1, _id: 0 }) + .toArray() + return { + count: users.length, + users + } +} diff --git a/tests/e2e/app/http_endpoints/config.json b/tests/e2e/app/http_endpoints/config.json new file mode 100644 index 0000000..b145890 --- /dev/null +++ b/tests/e2e/app/http_endpoints/config.json @@ -0,0 +1,12 @@ +[ + { + "http_method": "GET", + "route": "/api/checkWorkspace", + "function_name": "api_checkWorkspace", + "validation_method": "NO_VALIDATION", + "respond_result": true, + "fetch_custom_user_data": false, + "create_user_on_auth": false, + "disabled": false + } +] diff --git a/tests/e2e/app/triggers/activityLogsTrigger.json b/tests/e2e/app/triggers/activityLogsTrigger.json new file mode 100644 index 0000000..4da34a5 --- /dev/null +++ b/tests/e2e/app/triggers/activityLogsTrigger.json @@ -0,0 +1,18 @@ +{ + "name": "activityLogsTrigger", + "type": "DATABASE", + "disabled": false, + "config": { + "database": "flowerbase-e2e", + "collection": "activityLogs", + "operation_types": ["insert"], + "match": {} + }, + "event_processors": { + "FUNCTION": { + "config": { + "function_name": "logTriggerEvent" + } + } + } +} diff --git a/tests/e2e/mongodb-atlas.rules.e2e.test.ts b/tests/e2e/mongodb-atlas.rules.e2e.test.ts new file mode 100644 index 0000000..76828f6 --- /dev/null +++ b/tests/e2e/mongodb-atlas.rules.e2e.test.ts @@ -0,0 +1,1908 @@ +import path from 'node:path' +import { EJSON } from 'bson' +import { FastifyInstance } from 'fastify' +import { DeleteResult, Document, MongoClient, ObjectId } from 'mongodb' +import { initialize } from '../../packages/flowerbase/src' +import type { User } from '../../packages/flowerbase/src/auth/dtos' +import { API_VERSION, AUTH_CONFIG, DEFAULT_CONFIG } from '../../packages/flowerbase/src/constants' +import { StateManager } from '../../packages/flowerbase/src/state' +import { hashPassword, hashToken } from '../../packages/flowerbase/src/utils/crypto' + +jest.setTimeout(120000) + +const APP_ROOT = path.join(__dirname, 'app') +const DB_NAME = 'flowerbase-e2e' +const TODO_COLLECTION = 'todos' +const USER_COLLECTION = 'users' +const ACTIVITIES_COLLECTION = 'activities' +const COUNTERS_COLLECTION = 'counters' +const AUTH_USERS_COLLECTION = 'auth_users' +const RESET_PASSWORD_COLLECTION = 'reset_password_requests' +const MANAGE_REPLICA_SET = process.env.MANAGE_REPLICA_SET === 'true' +const REPLICA_SET_NAME = process.env.REPLICA_SET_NAME ?? 'rs0' +const REPLICA_SET_HOST = process.env.REPLICA_SET_HOST ?? 'mongo:27017' +const DEFAULT_DB_URL = 'mongodb://localhost:27017' +const resolveMongoUrl = () => { + const value = process.env.DB_CONNECTION_STRING?.trim() + return value && value.length > 0 ? value : DEFAULT_DB_URL +} + +type TestUser = User & { + id: string + role?: string + email: string + custom_data?: { + workspaces: string[] + adminIn?: string[] + } +} + + +const todoIds = { + ownerFirst: new ObjectId('000000000000000000000001'), + ownerSecond: new ObjectId('000000000000000000000002'), + otherUser: new ObjectId('000000000000000000000003') +} + +const userIds = { + owner: new ObjectId('000000000000000000000010'), + guest: new ObjectId('000000000000000000000011') +} + +const projectIds = { + ownerProject: new ObjectId('000000000000000000000020'), + guestProject: new ObjectId('000000000000000000000021') +} + +const logIds = { + activeOwner: new ObjectId('000000000000000000000030'), + inactiveOwner: new ObjectId('000000000000000000000031'), + activeGuest: new ObjectId('000000000000000000000032') +} + +const activityIds = { + ownerPrivate: new ObjectId('000000000000000000000101'), + ownerPublic: new ObjectId('000000000000000000000102'), + guestPublic: new ObjectId('000000000000000000000103') +} + +const counterIds = { + ownerOnly: new ObjectId('000000000000000000000201'), + workspaceAll: new ObjectId('000000000000000000000202'), + visibilityUsers: new ObjectId('000000000000000000000203'), + adminOnly: new ObjectId('000000000000000000000204') +} +const authUserIds = { + owner: new ObjectId('000000000000000000000090'), + guest: new ObjectId('000000000000000000000091'), + admin: new ObjectId('000000000000000000000092') +} +const ownerUser: TestUser = { + id: authUserIds.owner.toString(), + email: 'owner@example.com', + role: 'owner', + custom_data: { + role: 'owner', + workspaces: ['workspace-1'], + adminIn: ['workspace-1'] + } +} as TestUser +const guestUser: TestUser = { + id: authUserIds.guest.toString(), + email: 'guest@example.com', + role: 'guest', + custom_data: { + role: 'guest', + workspaces: ['workspace-2'], + adminIn: [] + } +} as TestUser +const adminUser: TestUser = { + id: authUserIds.admin.toString(), + email: 'admin@example.com', + role: 'admin', + custom_data: { + role: 'admin', + workspaces: ['workspace-1', 'workspace-2'], + adminIn: ['workspace-1', 'workspace-2'] + } +} as TestUser +const TRIGGER_EVENTS_COLLECTION = 'triggerEvents' +const PROJECT_ID = 'flowerbase-e2e' +const FUNCTION_CALL_URL = `${API_VERSION}/app/${PROJECT_ID}/functions/call` +const AUTH_BASE_URL = `${API_VERSION}/app/${PROJECT_ID}/auth/providers/local-userpass` +const TOKEN_MAP: Record = {} + +const serializeValue = (value: unknown) => { + if (value === undefined) return undefined + const serialized = EJSON.stringify(value) + try { + return JSON.parse(serialized) + } catch { + return serialized + } +} + +const getTokenFor = (user: TestUser | null) => { + if (!user) return undefined + return TOKEN_MAP[user.id] +} + +const callServiceOperation = async ({ + collection, + method, + user, + query, + update, + document, + pipeline +}: { + collection: string + method: + | 'find' + | 'findOne' + | 'findOneAndUpdate' + | 'deleteOne' + | 'deleteMany' + | 'insertOne' + | 'updateOne' + | 'aggregate' + user: TestUser | null + query?: Document + update?: Document + document?: Document + pipeline?: Document[] +}) => { + const fastify = appInstance + if (!fastify) { + throw new Error('App instance not initialized') + } + + const payload = { + name: method, + arguments: [ + { + database: DB_NAME, + collection, + query: serializeValue(query), + update: serializeValue(update), + document: serializeValue(document), + pipeline: pipeline?.map((stage) => serializeValue(stage)) + } + ], + service: 'mongodb-atlas' + } + + const headers: Record = {} + const token = getTokenFor(user) + if (!token && user) { + throw new Error(`Missing token for ${user.id}`) + } + if (token) { + headers.authorization = `Bearer ${token}` + } + + const response = await fastify.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + headers, + payload + }) + + if (response.statusCode >= 400) { + const body = response.json() + const message = body && typeof body === 'object' && 'message' in body ? (body as { message?: string }).message : undefined + throw new Error(message ?? response.payload ?? 'failed to execute service operation') + } + + return response.json() +} + +const createCollectionProxy = (collection: string, user: TestUser | null) => ({ + find: (query: Document = {}) => ({ + toArray: async () => callServiceOperation({ collection, method: 'find', user, query }) + }), + aggregate: (pipeline: Document[] = []) => ({ + toArray: async () => callServiceOperation({ collection, method: 'aggregate', user, pipeline }) + }), + findOne: (query: Document = {}) => callServiceOperation({ collection, method: 'findOne', user, query }), + insertOne: (document: Document) => callServiceOperation({ collection, method: 'insertOne', user, document }), + updateOne: (query: Document, update: Document) => + callServiceOperation({ collection, method: 'updateOne', user, query, update }), + findOneAndUpdate: (query: Document, update: Document) => + callServiceOperation({ collection, method: 'findOneAndUpdate', user, query, update }), + deleteOne: (query: Document) => callServiceOperation({ collection, method: 'deleteOne', user, query }), + deleteMany: (query: Document = {}) => + callServiceOperation({ collection, method: 'deleteMany', user, query }), +}) + +const getTodosCollection = (user: TestUser | null) => createCollectionProxy(TODO_COLLECTION, user) +const getUsersCollection = (user: TestUser | null) => createCollectionProxy(USER_COLLECTION, user) +const getAuthUsersCollection = (user: TestUser | null) => createCollectionProxy(AUTH_USERS_COLLECTION, user) +const getProjectsCollection = (user: TestUser | null) => createCollectionProxy('projects', user) +const getActivityLogsCollection = (user: TestUser | null) => createCollectionProxy('activityLogs', user) +const getActivitiesCollection = (user: TestUser | null) => createCollectionProxy(ACTIVITIES_COLLECTION, user) +const getCountersCollection = (user: TestUser | null) => createCollectionProxy(COUNTERS_COLLECTION, user) + +const registerAccessToken = (user: TestUser, authId: ObjectId) => { + if (!appInstance) { + throw new Error('App instance not initialized') + } + + const customData = user.custom_data ?? {} + const userData = { + _id: authId, + id: authId.toString(), + email: user.email, + role: user.role, + custom_data: customData, + ...customData + } + + const token = appInstance.createAccessToken({ + _id: authId, + email: user.email, + user_data: userData + } as any) + + TOKEN_MAP[user.id] = token +} + +type TodoDoc = Document & { userId: string } +type ProjectDoc = Document & { + ownerId: string + summary: string + secretNotes?: string + internalCode?: string +} +type ActivityLogDoc = Document & { + status: string + ownerId: string +} +type UserDoc = Document & { + userId: string + workspaces: string[] + avatar: string + name: string + tags: string[] + updatedAt: Date +} +type ActivityDoc = Document & { + ownerId: string + workspace: string + visibility: { + type: string + users?: string[] + } + title: string +} +type CounterDoc = Document & { + ownerId: string + workspace: string + visibility: { + type: string + users?: string[] + } + value: number +} + +let client: MongoClient +let appInstance: FastifyInstance | undefined +let originalMainPath: string | undefined + +const resetCollections = async () => { + const db = client.db(DB_NAME) + await Promise.all([ + db.collection(TODO_COLLECTION).deleteMany({}), + db.collection(USER_COLLECTION).deleteMany({}), + db.collection('projects').deleteMany({}), + db.collection('activityLogs').deleteMany({}), + db.collection(ACTIVITIES_COLLECTION).deleteMany({}), + db.collection(COUNTERS_COLLECTION).deleteMany({}), + db.collection(AUTH_USERS_COLLECTION).deleteMany({}), + db.collection(AUTH_CONFIG.refreshTokensCollection).deleteMany({}), + db.collection(RESET_PASSWORD_COLLECTION).deleteMany({}), + db.collection(TRIGGER_EVENTS_COLLECTION).deleteMany({}) + ]) + + await db.collection(TODO_COLLECTION).insertMany([ + { _id: todoIds.ownerFirst, title: 'Owner task 1', userId: ownerUser.id, sensitive: 'redacted' }, + { _id: todoIds.ownerSecond, title: 'Owner task 2', userId: ownerUser.id, sensitive: 'redacted' }, + { _id: todoIds.otherUser, title: 'Other user task', userId: guestUser.id, sensitive: 'redacted' } + ]) + + await db.collection(USER_COLLECTION).insertMany([ + { + _id: userIds.owner, + userId: ownerUser.id, + id: authUserIds.owner.toString(), + email: 'owner@example.com', + password: 'top-secret', + workspaces: ['workspace-1'], + avatar: 'owner.png', + name: 'Owner name', + tags: ['owner'], + updatedAt: new Date() + }, + { + _id: userIds.guest, + userId: guestUser.id, + id: authUserIds.guest.toString(), + email: 'guest@example.com', + password: 'safe-secret', + workspaces: ['workspace-2'], + avatar: 'guest.png', + name: 'Guest name', + tags: ['guest'], + updatedAt: new Date() + } + ]) + + await db.collection('projects').insertMany([ + { + _id: projectIds.ownerProject, + ownerId: ownerUser.id, + name: 'Owner project', + summary: 'Owner summary', + secretNotes: 'top secret', + internalCode: 'XYZ123' + }, + { + _id: projectIds.guestProject, + ownerId: guestUser.id, + name: 'Guest project', + summary: 'Guest summary', + secretNotes: 'guest secret', + internalCode: 'ABC987' + } + ]) + + await db.collection('activityLogs').insertMany([ + { + _id: logIds.activeOwner, + message: 'Owner active log', + status: 'active', + ownerId: ownerUser.id + }, + { + _id: logIds.inactiveOwner, + message: 'Owner inactive log', + status: 'inactive', + ownerId: ownerUser.id + }, + { + _id: logIds.activeGuest, + message: 'Guest active log', + status: 'active', + ownerId: guestUser.id + } + ]) + + await db.collection(ACTIVITIES_COLLECTION).insertMany([ + { + _id: activityIds.ownerPrivate, + title: 'Private owner activity', + ownerId: ownerUser.id, + workspace: 'workspace-1', + visibility: { + type: 'onlyme' + } + }, + { + _id: activityIds.ownerPublic, + title: 'Shared activity', + ownerId: 'user-three', + workspace: 'workspace-1', + visibility: { + type: 'team' + } + }, + { + _id: activityIds.guestPublic, + title: 'Guest workspace activity', + ownerId: guestUser.id, + workspace: 'workspace-2', + visibility: { + type: 'group' + } + } + ]) + + await db.collection(COUNTERS_COLLECTION).insertMany([ + { + _id: counterIds.ownerOnly, + ownerId: ownerUser.id, + workspace: 'workspace-1', + value: 100, + visibility: { + type: 'onlyme' + } + }, + { + _id: counterIds.workspaceAll, + ownerId: 'user-three', + workspace: 'workspace-1', + value: 200, + visibility: { + type: 'all' + } + }, + { + _id: counterIds.visibilityUsers, + ownerId: 'user-four', + workspace: 'workspace-2', + value: 300, + visibility: { + type: 'private', + users: [guestUser.id] + } + }, + { + _id: counterIds.adminOnly, + ownerId: 'user-five', + workspace: 'workspace-1', + value: 400, + visibility: { + type: 'private' + } + } + ]) + + const [ownerPassword, guestPassword, adminPassword] = await Promise.all([ + hashPassword('top-secret'), + hashPassword('safe-secret'), + hashPassword('admin-secret') + ]) + + await db.collection(AUTH_USERS_COLLECTION).insertMany([ + { + _id: authUserIds.owner, + email: 'auth-owner@example.com', + password: ownerPassword, + status: 'confirmed', + createdAt: new Date(), + userId: ownerUser.id + }, + { + _id: authUserIds.guest, + email: 'auth-guest@example.com', + password: guestPassword, + status: 'confirmed', + createdAt: new Date(), + userId: guestUser.id + }, + { + _id: authUserIds.admin, + email: 'auth-admin@example.com', + password: adminPassword, + status: 'confirmed', + createdAt: new Date(), + userId: adminUser.id + } + ]) +} + +const dropReplicaSetHint = (mongoUrl: string) => { + try { + const url = new URL(mongoUrl) + url.searchParams.delete('replicaSet') + const normalized = url.toString() + return normalized.endsWith('?') ? normalized.slice(0, -1) : normalized + } catch { + return mongoUrl.split('?')[0] + } +} + +const waitForTriggerEvent = async (documentId: string) => { + const collection = client.db(DB_NAME).collection(TRIGGER_EVENTS_COLLECTION) + for (let attempt = 0; attempt < 10; attempt++) { + const record = await collection.findOne({ documentId }) + if (record) { + return record + } + await new Promise((resolve) => setTimeout(resolve, 250)) + } + return null +} + +const isReplicaSetNotInitializedError = (error: unknown) => { + if (!(error instanceof Error)) { + return false + } + const message = error.message.toLowerCase() + return ( + message.includes('not yet initialized') || + message.includes('no replset config has been received') || + message.includes('no host described in new configuration') || + message.includes('not yet a member of a replset') || + message.includes('replset not yet initialized') || + ('code' in error && (error as { code?: number }).code === 94) || + ('codeName' in error && (error as { codeName?: string }).codeName === 'NotYetInitialized') + ) +} + +const ensureReplicaSet = async (client: MongoClient) => { + const adminDb = client.db('admin') + let initiated = false + for (let attempt = 0; attempt < 30; attempt++) { + try { + const status = await adminDb.command({ replSetGetStatus: 1 }) + if (status.members?.some((member: { stateStr: string }) => member.stateStr === 'PRIMARY')) { + return + } + } catch (error) { + if (!initiated && isReplicaSetNotInitializedError(error)) { + await adminDb.command({ + replSetInitiate: { + _id: REPLICA_SET_NAME, + members: [{ _id: 0, host: REPLICA_SET_HOST }] + } + }) + initiated = true + } + } + + await new Promise((resolve) => setTimeout(resolve, 1000)) + } + + throw new Error('Replica set did not reach PRIMARY in time') +} + +describe('MongoDB Atlas rule enforcement (e2e)', () => { + beforeAll(async () => { + const mongoUrl = resolveMongoUrl() + if (MANAGE_REPLICA_SET) { + const maintenanceClient = new MongoClient(dropReplicaSetHint(mongoUrl), { + serverSelectionTimeoutMS: 60000, + directConnection: true + }) + try { + await maintenanceClient.connect() + await ensureReplicaSet(maintenanceClient) + } finally { + await maintenanceClient.close() + } + } + + client = new MongoClient(mongoUrl, { serverSelectionTimeoutMS: 60000 }) + await client.connect() + originalMainPath = require.main?.path + if (require.main) { + require.main.path = APP_ROOT + } + + await initialize({ + projectId: 'flowerbase-e2e', + mongodbUrl: mongoUrl, + jwtSecret: 'e2e-secret', + port: 0, + host: '127.0.0.1', + basePath: APP_ROOT + }) + + appInstance = StateManager.select('app') + registerAccessToken(ownerUser, authUserIds.owner) + registerAccessToken(guestUser, authUserIds.guest) + registerAccessToken(adminUser, authUserIds.admin) + await new Promise((resolve) => setTimeout(resolve, 300)) + }) + + beforeEach(async () => { + await resetCollections() + }) + + it('requires authentication to access MongoDB services', async () => { + await expect(getTodosCollection(null).find({}).toArray()).rejects.toThrow() + }) + + afterAll(async () => { + await appInstance?.close() + await client.close() + if (require.main) { + require.main.path = originalMainPath + } + }) + + it('exports only the requesting user todos when reading', async () => { + const todos = (await getTodosCollection(ownerUser).find({}).toArray()) as TodoDoc[] + expect(todos).toHaveLength(2) + expect(todos.every((todo) => todo.userId === ownerUser.id)).toBe(true) + }) + + it('denies inserting a todo for another user', async () => { + await expect( + getTodosCollection(ownerUser).insertOne({ + title: 'Not allowed', + userId: guestUser.id + }) + ).rejects.toThrow('Insert not permitted') + }) + + it('allows owners to insert their own todos', async () => { + const insertResult = await getTodosCollection(ownerUser).insertOne({ + title: 'New owner task', + userId: ownerUser.id + }) + expect(insertResult.insertedId).toBeDefined() + const inserted = (await getTodosCollection(ownerUser).findOne({ + _id: insertResult.insertedId + })) as TodoDoc | null + expect(inserted).toBeDefined() + expect(inserted?.userId).toBe(ownerUser.id) + }) + + it('applies filters to aggregations as well', async () => { + const pipeline: Document[] = [ + { + $group: { + _id: '$userId', + count: { $sum: 1 } + } + } + ] + + const summary = (await getTodosCollection(ownerUser).aggregate(pipeline).toArray()) as Array<{ + _id: string + count: number + }> + + expect(summary).toHaveLength(1) + expect(summary[0]).toEqual({ _id: ownerUser.id, count: 2 }) + }) + + it('blocks pipelines with disallowed stages in aggregates', async () => { + const pipeline: Document[] = [ + { + $out: 'forbidden' + } + ] + + await expect( + getTodosCollection(ownerUser).aggregate(pipeline).toArray() + ).rejects.toThrow('Stage $out is not allowed in client aggregate pipelines') + }) + + it('requires a pipeline for unionWith in client aggregates', async () => { + const pipeline: Document[] = [ + { + $unionWith: 'projects' + } + ] + + await expect( + getTodosCollection(ownerUser).aggregate(pipeline).toArray() + ).rejects.toThrow('$unionWith must provide a pipeline when called from the client') + }) + + it('applies filters in activityLogs aggregations for non-admin users', async () => { + const pipeline: Document[] = [ + { + $group: { + _id: '$status', + count: { $sum: 1 } + } + } + ] + + const summary = (await getActivityLogsCollection(ownerUser) + .aggregate(pipeline) + .toArray()) as Array<{ _id: string; count: number }> + + expect(summary).toHaveLength(1) + expect(summary[0]._id).toBe('active') + }) + + it('allows admins to aggregate all activityLogs', async () => { + const pipeline: Document[] = [ + { + $group: { + _id: '$status', + count: { $sum: 1 } + } + } + ] + + const summary = (await getActivityLogsCollection(adminUser) + .aggregate(pipeline) + .toArray()) as Array<{ _id: string; count: number }> + + const statuses = summary.map((item) => item._id).sort() + expect(statuses).toEqual(['active', 'inactive']) + }) + + it('prevents deleting todos that do not belong to the user', async () => { + await expect( + getTodosCollection(ownerUser).deleteOne({ _id: todoIds.otherUser }) + ).rejects.toThrow('Delete not permitted') + }) + + it('allows deleting owned todos', async () => { + const deleteResult = (await getTodosCollection(ownerUser).deleteOne({ + _id: todoIds.ownerFirst + })) as DeleteResult + expect(deleteResult.deletedCount).toBe(1) + }) + + it('allows users to delete only their own todos with deleteMany', async () => { + const deleteResult = (await getTodosCollection(ownerUser).deleteMany({})) as DeleteResult + expect(deleteResult.deletedCount).toBe(2) + + const remainingOwner = (await getTodosCollection(ownerUser).find({}).toArray()) as TodoDoc[] + expect(remainingOwner).toHaveLength(0) + + const remainingGuest = (await getTodosCollection(guestUser).find({}).toArray()) as TodoDoc[] + expect(remainingGuest).toHaveLength(1) + }) + + it('does not delete others\' documents with deleteMany', async () => { + const deleteResult = (await getTodosCollection(ownerUser).deleteMany({ + userId: guestUser.id + })) as DeleteResult + expect(deleteResult.deletedCount).toBe(0) + + const remainingOwner = (await getTodosCollection(ownerUser).find({}).toArray()) as TodoDoc[] + expect(remainingOwner).toHaveLength(2) + }) + + it('allows guests to delete their own todo with deleteOne', async () => { + const deleteResult = (await getTodosCollection(guestUser).deleteOne({ + _id: todoIds.otherUser + })) as DeleteResult + expect(deleteResult.deletedCount).toBe(1) + }) + + it('allows owners to update their own todos with findOneAndUpdate', async () => { + const updatedTitle = 'Owner task updated' + await getTodosCollection(ownerUser).findOneAndUpdate( + { _id: todoIds.ownerFirst }, + { $set: { title: updatedTitle } } + ) + + const updated = (await getTodosCollection(ownerUser).findOne({ + _id: todoIds.ownerFirst + })) as TodoDoc | null + expect(updated?.title).toBe(updatedTitle) + }) + + it('prevents guests from updating others todos with findOneAndUpdate', async () => { + await expect( + getTodosCollection(guestUser).findOneAndUpdate( + { _id: todoIds.ownerFirst }, + { $set: { title: 'Should fail' } } + ) + ).rejects.toThrow('Update not permitted') + }) + + it('limits profiles to shared workspaces', async () => { + const ownerUsers = (await getUsersCollection(ownerUser).find({}).toArray()) as UserDoc[] + expect(ownerUsers).toHaveLength(1) + expect(ownerUsers[0].workspaces).toContain('workspace-1') + expect(ownerUsers[0].userId).toBe(ownerUser.id) + + const guestUsers = (await getUsersCollection(guestUser).find({}).toArray()) as UserDoc[] + expect(guestUsers).toHaveLength(1) + expect(guestUsers[0].workspaces).toContain('workspace-2') + expect(guestUsers[0].userId).toBe(guestUser.id) + + const adminUsers = (await getUsersCollection(adminUser).find({}).toArray()) as UserDoc[] + expect(adminUsers).toHaveLength(2) + }) + + it('allows profile updates only for the owner', async () => { + const updatedName = 'Owner updated' + const updateResult = await getUsersCollection(ownerUser).updateOne( + { _id: userIds.owner }, + { $set: { name: updatedName } } + ) + expect(updateResult.matchedCount).toBe(1) + + const ownerRecord = (await getUsersCollection(ownerUser).findOne({ + _id: userIds.owner + })) as UserDoc | null + expect(ownerRecord?.name).toBe(updatedName) + + await expect( + getUsersCollection(guestUser).updateOne({ _id: userIds.owner }, { $set: { name: 'Hijack' } }) + ).rejects.toThrow('Update not permitted') + }) + + it('blocks access to auth_users collection without rules', async () => { + await expect(getAuthUsersCollection(ownerUser).find({}).toArray()).rejects.toThrow( + 'READ FORBIDDEN!' + ) + }) + + it('blocks inserts into auth_users without rules', async () => { + await expect( + getAuthUsersCollection(ownerUser).insertOne({ + userId: ownerUser.id, + email: 'blocked@example.com', + password: 'xxx' + }) + ).rejects.toThrow('CREATE FORBIDDEN!') + }) + + it('limits projects to the owner and hides forbidden fields', async () => { + const projects = (await getProjectsCollection(ownerUser).find({}).toArray()) as ProjectDoc[] + expect(projects).toHaveLength(1) + expect(projects[0].ownerId).toBe(ownerUser.id) + expect(projects[0]).not.toHaveProperty('secretNotes') + expect(projects[0]).not.toHaveProperty('internalCode') + expect(projects[0]).toHaveProperty('summary') + }) + + it('allows owners to update their project summary via function rules', async () => { + const updateResult = await getProjectsCollection(ownerUser).updateOne( + { _id: projectIds.ownerProject }, + { $set: { summary: 'Updated summary' } } + ) + expect(updateResult.matchedCount).toBe(1) + const updated = (await getProjectsCollection(ownerUser).findOne({ + _id: projectIds.ownerProject + })) as ProjectDoc | null + expect(updated?.summary).toBe('Updated summary') + }) + + it('prevents guests from updating projects they do not own', async () => { + await expect( + getProjectsCollection(guestUser).updateOne( + { _id: projectIds.ownerProject }, + { $set: { summary: 'Should be blocked' } } + ) + ).rejects.toThrow('Update not permitted') + }) + + it('lets admins read all projects and see privileged fields', async () => { + const projects = (await getProjectsCollection(adminUser).find({}).toArray()) as ProjectDoc[] + expect(projects.length).toBeGreaterThanOrEqual(2) + const ownerProject = projects.find((project) => project.ownerId === ownerUser.id) + expect(ownerProject).toBeDefined() + expect(ownerProject).toHaveProperty('secretNotes', 'top secret') + }) + + it('returns only active activity logs for non-admin roles', async () => { + const logs = (await getActivityLogsCollection(ownerUser).find({}).toArray()) as ActivityLogDoc[] + expect(logs.every((log) => log.status === 'active')).toBe(true) + expect(logs).toHaveLength(2) + }) + + it('allows admins to read all logs and insert new entries', async () => { + const logs = (await getActivityLogsCollection(adminUser).find({}).toArray()) as ActivityLogDoc[] + expect(logs.some((log) => log.status === 'inactive')).toBe(true) + + const insertResult = await getActivityLogsCollection(adminUser).insertOne({ + message: 'Admin log', + status: 'inactive', + ownerId: adminUser.id + }) + expect(insertResult.insertedId).toBeDefined() + }) + + it('prevents non-admin users from inserting activity logs', async () => { + await expect( + getActivityLogsCollection(ownerUser).insertOne({ + message: 'Blocked log', + status: 'inactive', + ownerId: ownerUser.id + }) + ).rejects.toThrow('Insert not permitted') + }) + + it('respects workspace/visibility filters for activities', async () => { + const ownerActivities = (await getActivitiesCollection(ownerUser).find({}).toArray()) as ActivityDoc[] + expect(ownerActivities).toHaveLength(2) + expect(ownerActivities.every((activity) => activity.workspace === 'workspace-1')).toBe(true) + + const guestActivities = (await getActivitiesCollection(guestUser).find({}).toArray()) as ActivityDoc[] + expect(guestActivities).toHaveLength(1) + expect(guestActivities[0].workspace).toBe('workspace-2') + }) + + it('restricts activity writes to owner or admin', async () => { + const newTitle = 'Updated private activity' + const updateResult = await getActivitiesCollection(ownerUser).updateOne( + { _id: activityIds.ownerPrivate }, + { $set: { title: newTitle } } + ) + expect(updateResult.matchedCount).toBe(1) + + const updatedActivity = (await getActivitiesCollection(ownerUser).findOne({ + _id: activityIds.ownerPrivate + })) as ActivityDoc | null + expect(updatedActivity?.title).toBe(newTitle) + + await expect( + getActivitiesCollection(ownerUser).updateOne( + { _id: activityIds.ownerPublic }, + { $set: { title: 'Blocked change' } } + ) + ).rejects.toThrow('Update not permitted') + + const adminChange = await getActivitiesCollection(adminUser).updateOne( + { _id: activityIds.ownerPublic }, + { $set: { title: 'Admin changed' } } + ) + expect(adminChange.matchedCount).toBe(1) + + const adminActivity = (await getActivitiesCollection(adminUser).findOne({ + _id: activityIds.ownerPublic + })) as ActivityDoc | null + expect(adminActivity?.title).toBe('Admin changed') + }) + + it('applies complex visibility filters on counters', async () => { + const ownerCounters = (await getCountersCollection(ownerUser).find({}).toArray()) as CounterDoc[] + expect(ownerCounters).toHaveLength(3) + expect(ownerCounters.every((counter) => counter.workspace === 'workspace-1')).toBe(true) + + const guestCounters = (await getCountersCollection(guestUser).find({}).toArray()) as CounterDoc[] + expect(guestCounters).toHaveLength(1) + expect(guestCounters[0].visibility.users).toContain(guestUser.id) + + const adminCounters = (await getCountersCollection(adminUser).find({}).toArray()) as CounterDoc[] + expect(adminCounters).toHaveLength(4) + }) + + it('requires admin privileges to modify protected counters', async () => { + const ownerUpdate = await getCountersCollection(ownerUser).updateOne( + { _id: counterIds.adminOnly }, + { $set: { value: 450 } } + ) + expect(ownerUpdate.matchedCount).toBe(1) + + const ownerCounter = (await getCountersCollection(ownerUser).findOne({ + _id: counterIds.adminOnly + })) as CounterDoc | null + expect(ownerCounter?.value).toBe(450) + + await expect( + getCountersCollection(guestUser).updateOne({ _id: counterIds.adminOnly }, { $set: { value: 10 } }) + ).rejects.toThrow('Update not permitted') + + const adminUpdate = await getCountersCollection(adminUser).updateOne( + { _id: counterIds.adminOnly }, + { $set: { value: 500 } } + ) + expect(adminUpdate.matchedCount).toBe(1) + + const adminCounter = (await getCountersCollection(adminUser).findOne({ + _id: counterIds.adminOnly + })) as CounterDoc | null + expect(adminCounter?.value).toBe(500) + }) + + it('triggers activityLogs stream and saves the log', async () => { + const newActivityId = new ObjectId() + await getActivityLogsCollection(adminUser).insertOne({ + _id: newActivityId, + title: 'Trigger test activity', + ownerId: adminUser.id, + workspace: 'workspace-1', + visibility: { + type: 'team' + } + }) + + const recorded = await waitForTriggerEvent(newActivityId.toString()) + expect(recorded).not.toBeNull() + expect(recorded?.operationType).toBe('insert') + expect(recorded?.documentId).toBe(newActivityId.toString()) + }) + + it('executes logTriggerEvent function directly', async () => { + const changeEventId = new ObjectId() + const token = getTokenFor(adminUser) + expect(token).toBeDefined() + + const changeEvent: Document = { + operationType: 'insert', + ns: { + coll: 'activityLogs', + db: DB_NAME + }, + documentKey: { + _id: changeEventId + }, + fullDocument: { + _id: changeEventId, + ownerId: adminUser.id, + workspace: 'workspace-1' + } + } + + const response = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + headers: { + authorization: `Bearer ${token}` + }, + payload: { + name: 'logTriggerEvent', + arguments: [changeEvent] + } + }) + + expect(response.statusCode).toBe(200) + expect(response.json()).toEqual({ + recorded: true, + documentId: changeEventId.toString() + }) + + const logged = await client.db(DB_NAME).collection(TRIGGER_EVENTS_COLLECTION).findOne({ + documentId: changeEventId.toString() + }) + + expect(logged).toMatchObject({ + operationType: 'insert', + collection: 'activityLogs', + documentId: changeEventId.toString() + }) + }) + + it('blocks private function when invoked via API', async () => { + const token = getTokenFor(ownerUser) + expect(token).toBeDefined() + + const response = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + headers: { + authorization: `Bearer ${token}` + }, + payload: { + name: 'privateEcho', + arguments: [] + } + }) + + expect(response.statusCode).toBe(500) + const body = response.json() as { message?: string } + expect(body.message).toBe('Function "privateEcho" is private') + }) + + it('allows run_as_system function to read all users', async () => { + const token = getTokenFor(adminUser) + expect(token).toBeDefined() + + const response = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + headers: { + authorization: `Bearer ${token}` + }, + payload: { + name: 'systemListUsers', + arguments: [] + } + }) + + expect(response.statusCode).toBe(200) + const body = response.json() as { count: number; users: Array<{ email: string }> } + expect(body.count).toBe(2) + expect(body.users).toHaveLength(2) + expect(body.users.map((user) => user.email).sort()).toEqual([ + 'guest@example.com', + 'owner@example.com' + ]) + }) + + it('blocks run_as_system=false function from accessing auth_users', async () => { + const token = getTokenFor(ownerUser) + expect(token).toBeDefined() + + const response = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + headers: { + authorization: `Bearer ${token}` + }, + payload: { + name: 'publicListAuthUsers', + arguments: [] + } + }) + + expect(response.statusCode).toBe(500) + const body = response.json() as { error?: string; error_code?: string } + expect(body.error_code).toBe('FunctionExecutionError') + const parsedError = body.error ? JSON.parse(body.error) as { message?: string } : {} + expect(parsedError.message).toBe('READ FORBIDDEN!') + }) + + it('exposes the new API endpoint through the dedicated function', async () => { + const response = await appInstance!.inject({ + method: 'GET', + url: `/app/${PROJECT_ID}/endpoint/api/checkWorkspace?workspace=workspace-1` + }) + expect(response.statusCode).toBe(202) + expect(response.json()).toEqual({ + success: true, + workspace: 'workspace-1', + source: 'api_checkWorkspace' + }) + }) + + it('allows registration and login via local-userpass', async () => { + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'new-user@example.com', + password: 'new-user-pass' + } + }) + expect(registration.statusCode).toBe(201) + const registrationBody = registration.json() as { userId?: string } + expect(registrationBody.userId).toBeDefined() + + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'auth-owner@example.com', + password: 'top-secret' + } + }) + expect(login.statusCode).toBe(200) + const loginBody = login.json() as { + access_token?: string + refresh_token?: string + user_id?: string + } + expect(loginBody.access_token).toBeDefined() + expect(loginBody.refresh_token).toBeDefined() + expect(loginBody.user_id).toBe(authUserIds.owner.toString()) + }) + + it('runs confirmation function when autoConfirm is false', async () => { + const originalConfig = AUTH_CONFIG.localUserpassConfig + AUTH_CONFIG.localUserpassConfig = { + ...originalConfig, + autoConfirm: false, + runConfirmationFunction: true, + confirmationFunctionName: 'confirmUser' + } + + try { + const email = 'confirm-success@example.com' + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email, + password: 'auto-pass' + } + }) + expect(registration.statusCode).toBe(201) + + const confirmationEvent = await waitForTriggerEvent(email) + expect(confirmationEvent).toBeDefined() + expect(confirmationEvent?.type).toBe('user_confirmation') + expect(confirmationEvent?.email).toBe(email) + + const authUser = await client + .db(DB_NAME) + .collection(AUTH_USERS_COLLECTION) + .findOne({ email }) + expect(authUser?.status).toBe('confirmed') + } finally { + AUTH_CONFIG.localUserpassConfig = originalConfig + } + }) + + it('keeps users pending when confirmation function returns pending', async () => { + const originalConfig = AUTH_CONFIG.localUserpassConfig + AUTH_CONFIG.localUserpassConfig = { + ...originalConfig, + autoConfirm: false, + runConfirmationFunction: true, + confirmationFunctionName: 'confirmUser' + } + + try { + const email = 'pending-user@example.com' + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email, + password: 'auto-pass' + } + }) + expect(registration.statusCode).toBe(201) + + const authUser = await client + .db(DB_NAME) + .collection(AUTH_USERS_COLLECTION) + .findOne({ email }) + expect(authUser?.status).toBe('pending') + + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: email, + password: 'auto-pass' + } + }) + expect(login.statusCode).toBe(500) + const loginBody = login.json() as { message?: string } + expect(loginBody.message).toBe('User not confirmed') + } finally { + AUTH_CONFIG.localUserpassConfig = originalConfig + } + }) + + it('confirms users via token and tokenId from the client', async () => { + const originalConfig = AUTH_CONFIG.localUserpassConfig + AUTH_CONFIG.localUserpassConfig = { + ...originalConfig, + autoConfirm: false, + runConfirmationFunction: true, + confirmationFunctionName: 'confirmUser' + } + + try { + const email = 'pending-confirm@example.com' + const password = 'auto-pass' + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email, + password + } + }) + expect(registration.statusCode).toBe(201) + + const authUser = await client + .db(DB_NAME) + .collection(AUTH_USERS_COLLECTION) + .findOne({ email }) as { confirmationToken?: string; confirmationTokenId?: string } | null + expect(authUser?.confirmationToken).toBeDefined() + expect(authUser?.confirmationTokenId).toBeDefined() + + const confirm = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/confirm`, + payload: { + token: authUser!.confirmationToken, + tokenId: authUser!.confirmationTokenId + } + }) + expect(confirm.statusCode).toBe(200) + + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: email, + password + } + }) + expect(login.statusCode).toBe(200) + } finally { + AUTH_CONFIG.localUserpassConfig = originalConfig + } + }) + + it('auto-confirms users on registration when autoConfirm is enabled', async () => { + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'autoconfirm-user@example.com', + password: 'auto-pass' + } + }) + expect(registration.statusCode).toBe(201) + const registrationBody = registration.json() as { userId?: string } + expect(registrationBody.userId).toBeDefined() + + const authUser = await client + .db(DB_NAME) + .collection(AUTH_USERS_COLLECTION) + .findOne({ _id: new ObjectId(registrationBody.userId) }) + expect(authUser?.status).toBe('confirmed') + }) + + it('fires on_user_creation_function_name on auto-confirmed registrations', async () => { + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'autoconfirm-trigger@example.com', + password: 'auto-pass' + } + }) + expect(registration.statusCode).toBe(201) + const registrationBody = registration.json() as { userId?: string } + expect(registrationBody.userId).toBeDefined() + + const creationEvent = await waitForTriggerEvent(registrationBody.userId!) + expect(creationEvent).toBeDefined() + expect(creationEvent?.type).toBe('on_user_creation') + expect(creationEvent?.email).toBe('autoconfirm-trigger@example.com') + }) + + it('calls on_user_creation_function_name when auth user becomes confirmed', async () => { + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'trigger-user@example.com', + password: 'trigger-pass' + } + }) + expect(registration.statusCode).toBe(201) + + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'trigger-user@example.com', + password: 'trigger-pass' + } + }) + expect(login.statusCode).toBe(200) + + const loginBody = login.json() as { user_id?: string } + expect(loginBody.user_id).toBeDefined() + + const creationEvent = await waitForTriggerEvent(loginBody.user_id!) + expect(creationEvent).toBeDefined() + expect(creationEvent?.type).toBe('on_user_creation') + expect(creationEvent?.email).toBe('trigger-user@example.com') + }) + + it('rejects registration when the email is already used', async () => { + const payload = { + email: 'duplicate@example.com', + password: 'dup-pass' + } + + const first = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload + }) + expect(first.statusCode).toBe(201) + + const second = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload + }) + expect(second.statusCode).toBe(500) + const body = second.json() as { message?: string } + expect(body.message).toBe('This email address is already used') + }) + + it('revokes refresh tokens on logout', async () => { + const ip = '203.0.113.50' + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + remoteAddress: ip, + payload: { + username: 'auth-owner@example.com', + password: 'top-secret' + } + }) + expect(login.statusCode).toBe(200) + const loginBody = login.json() as { refresh_token?: string } + expect(loginBody.refresh_token).toBeDefined() + + const refreshToken = loginBody.refresh_token! + const session = await appInstance!.inject({ + method: 'POST', + url: `${API_VERSION}/auth/session`, + remoteAddress: ip, + headers: { + authorization: `Bearer ${refreshToken}` + } + }) + expect(session.statusCode).toBe(201) + + const logout = await appInstance!.inject({ + method: 'DELETE', + url: `${API_VERSION}/auth/session`, + remoteAddress: ip, + headers: { + authorization: `Bearer ${refreshToken}` + } + }) + expect(logout.statusCode).toBe(200) + + const sessionAfterLogout = await appInstance!.inject({ + method: 'POST', + url: `${API_VERSION}/auth/session`, + remoteAddress: ip, + headers: { + authorization: `Bearer ${refreshToken}` + } + }) + expect(sessionAfterLogout.statusCode).toBe(500) + }) + + it('rejects access tokens issued before logout for protected functions', async () => { + const ip = '203.0.113.55' + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + remoteAddress: ip, + payload: { + username: 'auth-owner@example.com', + password: 'top-secret' + } + }) + expect(login.statusCode).toBe(200) + const loginBody = login.json() as { + access_token?: string + refresh_token?: string + } + expect(loginBody.access_token).toBeDefined() + expect(loginBody.refresh_token).toBeDefined() + + const decodedAccessToken = JSON.parse( + Buffer.from(loginBody.access_token!.split('.')[1], 'base64').toString('utf8') + ) + const accessIssuedAt = Number(decodedAccessToken.iat) + expect(Number.isFinite(accessIssuedAt)).toBe(true) + + const functionPayload = { + name: 'find', + arguments: [ + { + database: DB_NAME, + collection: TODO_COLLECTION, + query: {} + } + ], + service: 'mongodb-atlas' + } + + const callBeforeLogout = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + remoteAddress: ip, + headers: { + authorization: `Bearer ${loginBody.access_token}` + }, + payload: functionPayload + }) + expect(callBeforeLogout.statusCode).toBe(200) + + const logout = await appInstance!.inject({ + method: 'DELETE', + url: `${API_VERSION}/auth/session`, + remoteAddress: ip, + headers: { + authorization: `Bearer ${loginBody.refresh_token}` + } + }) + expect(logout.statusCode).toBe(200) + + const authUserAfterLogout = await client + .db(DB_NAME) + .collection(AUTH_CONFIG.authCollection) + .findOne({ _id: authUserIds.owner }) + expect(authUserAfterLogout?.lastLogoutAt).toBeDefined() + const lastLogoutTime = new Date(authUserAfterLogout!.lastLogoutAt).getTime() + expect(lastLogoutTime).toBeGreaterThanOrEqual(accessIssuedAt * 1000) + + const callAfterLogout = await appInstance!.inject({ + method: 'POST', + url: FUNCTION_CALL_URL, + remoteAddress: ip, + headers: { + authorization: `Bearer ${loginBody.access_token}` + }, + payload: functionPayload + }) + expect(callAfterLogout.statusCode).toBe(401) + }) + + it('rejects expired refresh tokens', async () => { + const ip = '203.0.113.51' + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + remoteAddress: ip, + payload: { + username: 'auth-owner@example.com', + password: 'top-secret' + } + }) + expect(login.statusCode).toBe(200) + const loginBody = login.json() as { refresh_token?: string } + expect(loginBody.refresh_token).toBeDefined() + + const refreshToken = loginBody.refresh_token! + const refreshTokenHash = hashToken(refreshToken) + await client + .db(DB_NAME) + .collection(AUTH_CONFIG.refreshTokensCollection) + .updateOne( + { tokenHash: refreshTokenHash }, + { $set: { expiresAt: new Date(Date.now() - 1000) } } + ) + + const sessionAfterExpiry = await appInstance!.inject({ + method: 'POST', + url: `${API_VERSION}/auth/session`, + remoteAddress: ip, + headers: { + authorization: `Bearer ${refreshToken}` + } + }) + expect(sessionAfterExpiry.statusCode).toBe(500) + }) + + it('rejects registration with invalid email or password', async () => { + const invalidEmail = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'not-an-email', + password: 'valid-pass-1' + } + }) + expect(invalidEmail.statusCode).toBe(400) + + const invalidPassword = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email: 'valid-user@example.com', + password: 'short' + } + }) + expect(invalidPassword.statusCode).toBe(400) + }) + + it('rejects login with invalid email or password format', async () => { + const invalidEmail = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'not-an-email', + password: 'top-secret' + } + }) + expect(invalidEmail.statusCode).toBe(400) + + const invalidPassword = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'auth-owner@example.com', + password: 'short' + } + }) + expect(invalidPassword.statusCode).toBe(400) + }) + + it('rate limits login attempts by IP', async () => { + const limit = DEFAULT_CONFIG.AUTH_LOGIN_MAX_ATTEMPTS + const ip = '203.0.113.10' + for (let i = 0; i < limit; i += 1) { + const response = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + remoteAddress: ip, + payload: { + username: 'auth-owner@example.com', + password: 'wrong-password' + } + }) + expect(response.statusCode).toBe(500) + } + + const limited = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + remoteAddress: ip, + payload: { + username: 'auth-owner@example.com', + password: 'wrong-password' + } + }) + expect(limited.statusCode).toBe(429) + }) + + it('rate limits reset requests by IP', async () => { + const limit = DEFAULT_CONFIG.AUTH_RESET_MAX_ATTEMPTS + const ip = '203.0.113.11' + for (let i = 0; i < limit; i += 1) { + const response = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset/send`, + remoteAddress: ip, + payload: { + email: 'auth-owner@example.com' + } + }) + expect(response.statusCode).toBe(202) + } + + const limited = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset/send`, + remoteAddress: ip, + payload: { + email: 'auth-owner@example.com' + } + }) + expect(limited.statusCode).toBe(429) + }) + + it('handles password reset via reset/send and confirm reset', async () => { + const requestedPassword = 'request-pass-1' + const newPassword = 'new-pass-1' + const resetCall = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset/send`, + payload: { + email: 'auth-owner@example.com' + } + }) + expect(resetCall.statusCode).toBe(202) + + const resetRequest = await client + .db(DB_NAME) + .collection(RESET_PASSWORD_COLLECTION) + .findOne({ email: 'auth-owner@example.com' }) + expect(resetRequest).toBeDefined() + + const confirmReset = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset`, + payload: { + password: newPassword, + token: resetRequest?.token, + tokenId: resetRequest?.tokenId + } + }) + expect(confirmReset.statusCode).toBe(200) + + const login = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'auth-owner@example.com', + password: newPassword + } + }) + expect(login.statusCode).toBe(200) + const loginBody = login.json() as { access_token?: string } + expect(loginBody.access_token).toBeDefined() + }) + + it('allows password changes and invalidates the old password', async () => { + const email = 'change-pass@example.com' + const oldPassword = 'old-pass-1' + const newPassword = 'new-pass-2' + + const registration = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/register`, + payload: { + email, + password: oldPassword + } + }) + expect(registration.statusCode).toBe(201) + + const loginOld = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: email, + password: oldPassword + } + }) + expect(loginOld.statusCode).toBe(200) + + const requestedPassword = 'request-pass-2' + const resetCall = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset/call`, + payload: { + email, + password: requestedPassword, + arguments: [] + } + }) + expect(resetCall.statusCode).toBe(202) + + const resetRequest = await client + .db(DB_NAME) + .collection(RESET_PASSWORD_COLLECTION) + .findOne({ email }) + expect(resetRequest).toBeDefined() + + const confirmReset = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset`, + payload: { + password: newPassword, + token: resetRequest?.token, + tokenId: resetRequest?.tokenId + } + }) + expect(confirmReset.statusCode).toBe(200) + + const loginOldAgain = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: email, + password: oldPassword + } + }) + expect(loginOldAgain.statusCode).toBe(500) + + const loginNew = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: email, + password: newPassword + } + }) + expect(loginNew.statusCode).toBe(200) + }) + + it('rejects login with invalid credentials', async () => { + const response = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/login`, + payload: { + username: 'auth-owner@example.com', + password: 'wrong-password' + } + }) + + expect(response.statusCode).toBe(500) + }) + + it('blocks password reset requests for unregistered emails', async () => { + const response = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset/send`, + payload: { + email: 'missing-user@example.com' + } + }) + + expect(response.statusCode).toBe(202) + }) + + it('blocks reset confirmation without a valid token', async () => { + const response = await appInstance!.inject({ + method: 'POST', + url: `${AUTH_BASE_URL}/reset`, + payload: { + password: 'any-password', + token: 'invalid', + tokenId: 'invalid' + } + }) + + expect(response.statusCode).toBe(500) + const body = response.json() as { message?: string } + expect(body.message).toBe('Invalid token or tokenId provided') + }) + + // CUSTOM TESTS + it('tries to read from auth_users', async () => { + const res = getAuthUsersCollection(ownerUser).find({}).toArray() + await expect(res).rejects.toThrow('READ FORBIDDEN!') + }) + + it('tries to read from auth_users via a lookup', async () => { + const pipeline: Document[] = [ + { + $lookup: { + from: "auth_users", + localField: 'userId', + foreignField: 'userId', + as: 'users' + } + } + ] + + await expect( + getTodosCollection(ownerUser).aggregate(pipeline).toArray() + ).rejects.toThrow('READ FORBIDDEN!') + }) + + it('blocks unionWith to auth_users', async () => { + const pipeline: Document[] = [ + { + $unionWith: { + coll: 'auth_users', + pipeline: [ + { + $match: { + userId: ownerUser.id + } + } + ] + } + } + ] + + await expect( + getTodosCollection(ownerUser).aggregate(pipeline).toArray() + ).rejects.toThrow('READ FORBIDDEN!') + }) + + it('blocks facet lookup to auth_users', async () => { + const pipeline: Document[] = [ + { + $facet: { + data: [ + { + $lookup: { + from: 'auth_users', + let: { userId: '$userId' }, + pipeline: [ + { + $match: { + $expr: { $eq: ['$userId', '$$userId'] } + } + } + ], + as: 'users' + } + } + ] + } + } + ] + + await expect( + getTodosCollection(ownerUser).aggregate(pipeline).toArray() + ).rejects.toThrow('READ FORBIDDEN!') + }) + + it('filters sensitive fields in aggregate lookups', async () => { + const pipeline: Document[] = [ + { + $match: { + userId: ownerUser.id + } + }, + { + $lookup: { + from: 'projects', + let: { ownerId: '$userId' }, + pipeline: [ + { + $match: { + $expr: { $eq: ['$ownerId', '$$ownerId'] } + } + } + ], + as: 'projects' + } + } + ] + + const res = (await getTodosCollection(ownerUser).aggregate(pipeline).toArray()) as Array<{ + projects?: ProjectDoc[] + }> + + const projects = res.flatMap((item) => item.projects ?? []) + expect(projects.length).toBeGreaterThan(0) + projects.forEach((project) => { + expect(project).toHaveProperty('summary') + expect(project).not.toHaveProperty('secretNotes') + expect(project).not.toHaveProperty('internalCode') + }) + }) + + +}) diff --git a/tests/jest.e2e.config.ts b/tests/jest.e2e.config.ts new file mode 100644 index 0000000..726f6ce --- /dev/null +++ b/tests/jest.e2e.config.ts @@ -0,0 +1,30 @@ +import path from 'path' +import { config as loadEnv } from 'dotenv' +import type { Config } from '@jest/types' + +// Carica le variabili di ambiente da .env.e2e se esiste +loadEnv({ + path: path.resolve(__dirname, '../.env.e2e'), + override: false +}) + +if (!process.env.FLOWERBASE_APP_PATH) { + process.env.FLOWERBASE_APP_PATH = path.resolve(__dirname, 'e2e/app') +} + +const config: Config.InitialOptions = { + rootDir: path.resolve(__dirname, '../'), + preset: 'ts-jest', + globals: { + 'ts-jest': { + tsconfig: '/tests/tsconfig.json' + } + }, + setupFiles: ['/tests/jest.setup.ts'], + testEnvironment: 'node', + testMatch: ['/tests/e2e/**/*.test.ts'], + moduleFileExtensions: ['ts', 'js', 'json'], + verbose: true +} + +export default config diff --git a/tests/jest.setup.ts b/tests/jest.setup.ts new file mode 100644 index 0000000..aebe19a --- /dev/null +++ b/tests/jest.setup.ts @@ -0,0 +1,42 @@ +import { Blob as NodeBlob } from 'buffer' +import path from 'node:path' + +if (!process.env.FLOWERBASE_APP_PATH) { + process.env.FLOWERBASE_APP_PATH = path.resolve(__dirname, '../../tests/e2e/app') +} + +const BaseBlob = typeof globalThis.Blob !== 'undefined' ? globalThis.Blob : NodeBlob + +type PolyfillFilePropertyBag = FilePropertyBag & { + name?: string +} + +class FilePolyfill extends BaseBlob { + lastModified: number + name: string + + constructor(bits?: Iterable, options?: FilePropertyBag) { + super(bits, options as FilePropertyBag) + const fileOptions = options as PolyfillFilePropertyBag + this.name = fileOptions?.name ?? '' + this.lastModified = fileOptions?.lastModified ?? Date.now() + } +} + +if (typeof globalThis.File === 'undefined') { + globalThis.File = FilePolyfill as unknown as typeof File +} + +const createChannel = () => ({ + publish: jest.fn(), + subscribe: jest.fn() +}) + +jest.mock('node:diagnostics_channel', () => ({ + channel: jest.fn(createChannel), + tracingChannel: () => ({ + asyncStart: createChannel(), + asyncEnd: createChannel(), + error: createChannel() + }) +})) diff --git a/tests/tsconfig.json b/tests/tsconfig.json new file mode 100644 index 0000000..d08a235 --- /dev/null +++ b/tests/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.base.json", + "compilerOptions": { + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "outDir": "./dist" + }, + "include": ["./e2e/**/*.ts"] +}