diff --git a/Dockerfile.job-worker b/Dockerfile.job-worker index 82b4eb76..59ae79d5 100644 --- a/Dockerfile.job-worker +++ b/Dockerfile.job-worker @@ -6,14 +6,21 @@ COPY package*.json . COPY patches/ ./patches/ RUN npm ci COPY . . -RUN npm run build:job-worker +RUN npm run build:job-worker && mkdir -p /app/dist/fonts && cp src/fonts/*.ttf /app/dist/fonts/ FROM public.ecr.aws/lambda/nodejs:18 AS runner ENV NODE_ENV=production +ENV PDFKIT_DATA_DIR=/var/task/data COPY --from=build /app/dist/job-worker.js ${LAMBDA_TASK_ROOT} +COPY --from=build /app/node_modules/pdfkit/js/data /var/task/data +COPY --from=build /app/dist/fonts /var/task/fonts +COPY --from=build /app/src/fonts /var/task/fonts CMD ["job-worker.handler"] FROM public.ecr.aws/lambda/nodejs:18 AS dev +ENV PDFKIT_DATA_DIR=/var/task/data COPY --from=build /app/dist/job-worker.js ${LAMBDA_TASK_ROOT} +COPY --from=build /app/node_modules/pdfkit/js/data /var/task/data +COPY --from=build /app/dist/fonts /var/task/fonts +COPY --from=build /app/src/fonts /var/task/fonts CMD ["job-worker.handler"] - diff --git a/compose.local.yaml b/compose.local.yaml new file mode 100644 index 00000000..5dd2e914 --- /dev/null +++ b/compose.local.yaml @@ -0,0 +1,57 @@ +services: + server: + environment: + AWS_REGION: us-east-1 + AWS_ACCESS_KEY_ID: test + AWS_SECRET_ACCESS_KEY: test + AWS_ENDPOINT_URL_S3: http://localstack:4566 + AWS_S3_FORCE_PATH_STYLE: "true" + depends_on: + localstack: + condition: service_healthy + + job_worker: + environment: + AWS_REGION: us-east-1 + AWS_ACCESS_KEY_ID: test + AWS_SECRET_ACCESS_KEY: test + AWS_ENDPOINT_URL_S3: http://localstack:4566 + AWS_S3_FORCE_PATH_STYLE: "true" + EXPORT_PUBLIC_S3_ENDPOINT: http://localhost:4566 + depends_on: + localstack: + condition: service_healthy + + localstack: + image: localstack/localstack:latest + environment: + - SERVICES=s3 + - DEFAULT_REGION=us-east-1 + - LS_LOG=warn + ports: + - 4566:4566 + healthcheck: + test: ["CMD", "bash", "-c", "awslocal s3 ls >/dev/null 2>&1"] + interval: 10s + timeout: 5s + retries: 5 + + localstack-init: + image: amazon/aws-cli:2.32.11 + depends_on: + localstack: + condition: service_healthy + environment: + AWS_REGION: us-east-1 + AWS_ACCESS_KEY_ID: test + AWS_SECRET_ACCESS_KEY: test + entrypoint: ["/bin/sh", "-c"] + command: + - > + until aws --endpoint-url http://localstack:4566 s3api list-buckets >/dev/null 2>&1; do + echo "Waiting for localstack S3..."; + sleep 2; + done; + aws --endpoint-url http://localstack:4566 s3 mb s3://gbt-exports-local >/dev/null 2>&1 || true; + echo "Localstack S3 bucket ready"; + restart: "no" diff --git a/db/migrations/25-12-06-add-export-request.sql b/db/migrations/25-12-06-add-export-request.sql new file mode 100644 index 00000000..dd9d129a --- /dev/null +++ b/db/migrations/25-12-06-add-export-request.sql @@ -0,0 +1,36 @@ +create type export_request_status as enum ('PENDING', 'IN_PROGRESS', 'COMPLETE', 'FAILED'); + +create table export_request ( + id uuid primary key default generate_ulid(), + language_id uuid not null references language(id) on delete cascade, + book_id integer references book(id), + chapters integer[], + layout text not null default 'standard', + status export_request_status not null default 'PENDING', + job_id uuid references job(id), + download_url text, + expires_at timestamptz, + requested_by uuid not null references users(id) on delete cascade, + requested_at timestamptz not null default now(), + completed_at timestamptz, + export_key text +); + +create index export_request_status_idx on export_request(status); +create index export_request_requested_by_idx on export_request(requested_by); +create index export_request_expires_at_idx on export_request(expires_at); + +create table if not exists export_request_book ( + request_id uuid not null references export_request(id) on delete cascade, + book_id integer not null references book(id), + chapters integer[] not null, + primary key (request_id, book_id) +); + +create index if not exists export_request_book_request_idx on export_request_book(request_id); + +insert into job_type (name) +values + ('export_interlinear_pdf'), + ('cleanup_exports') +on conflict (name) do nothing; diff --git a/db/migrations/25-12-14-snapshot-interlinear-pdf-job-type.sql b/db/migrations/25-12-14-snapshot-interlinear-pdf-job-type.sql new file mode 100644 index 00000000..853b45e0 --- /dev/null +++ b/db/migrations/25-12-14-snapshot-interlinear-pdf-job-type.sql @@ -0,0 +1,8 @@ +select setval( + pg_get_serial_sequence('job_type', 'id'), + (select coalesce(max(id), 0) from job_type) +); + +insert into job_type (name) +values ('create_snapshot_interlinear_pdf') +on conflict (name) do nothing; diff --git a/db/scripts/schema.sql b/db/scripts/schema.sql index d007bccd..a32f30e9 100644 --- a/db/scripts/schema.sql +++ b/db/scripts/schema.sql @@ -114,6 +114,17 @@ CREATE TYPE public.text_direction AS ENUM ( 'rtl' ); +-- +-- Name: export_request_status; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.export_request_status AS ENUM ( + 'PENDING', + 'IN_PROGRESS', + 'COMPLETE', + 'FAILED' +); + -- -- Name: user_status; Type: TYPE; Schema: public; Owner: - @@ -577,6 +588,37 @@ CREATE TABLE public.language_member_role ( role public.language_role NOT NULL ); +-- +-- Name: export_request; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.export_request ( + id uuid DEFAULT public.generate_ulid() NOT NULL, + language_id uuid NOT NULL, + book_id integer, + chapters integer[], + layout text DEFAULT 'standard'::text NOT NULL, + status public.export_request_status DEFAULT 'PENDING'::public.export_request_status NOT NULL, + job_id uuid, + download_url text, + expires_at timestamp with time zone, + requested_by uuid NOT NULL, + requested_at timestamp with time zone DEFAULT now() NOT NULL, + completed_at timestamp with time zone, + export_key text +); + + +-- +-- Name: export_request_book; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.export_request_book ( + request_id uuid NOT NULL, + book_id integer NOT NULL, + chapters integer[] NOT NULL +); + -- -- Name: phrase; Type: TABLE; Schema: public; Owner: - @@ -1196,6 +1238,16 @@ ALTER TABLE ONLY public.language_import_job ALTER TABLE ONLY public.language_member_role ADD CONSTRAINT language_member_role_pkey PRIMARY KEY (language_id, user_id, role); +-- +-- Name: export_request export_request_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.export_request + ADD CONSTRAINT export_request_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY public.export_request_book + ADD CONSTRAINT export_request_book_pkey PRIMARY KEY (request_id, book_id); + -- -- Name: language language_pkey; Type: CONSTRAINT; Schema: public; Owner: - @@ -1456,6 +1508,18 @@ CREATE INDEX gloss_phrase_id_idx ON public.gloss USING btree (phrase_id); CREATE UNIQUE INDEX language_code_idx ON public.language USING btree (code); +-- +-- Name: export_request_status_idx; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX export_request_status_idx ON public.export_request USING btree (status); + +CREATE INDEX export_request_requested_by_idx ON public.export_request USING btree (requested_by); + +CREATE INDEX export_request_expires_at_idx ON public.export_request USING btree (expires_at); + +CREATE INDEX export_request_book_request_idx ON public.export_request_book USING btree (request_id); + -- -- Name: lemma_form_lemma_id_idx; Type: INDEX; Schema: public; Owner: - @@ -1662,6 +1726,43 @@ ALTER TABLE ONLY public.language_member_role ALTER TABLE ONLY public.language_member_role ADD CONSTRAINT language_member_role_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON UPDATE CASCADE ON DELETE RESTRICT; +-- +-- Name: export_request export_request_book_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.export_request + ADD CONSTRAINT export_request_book_id_fkey FOREIGN KEY (book_id) REFERENCES public.book(id); + + +-- +-- Name: export_request export_request_job_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.export_request + ADD CONSTRAINT export_request_job_id_fkey FOREIGN KEY (job_id) REFERENCES public.job(id); + + +-- +-- Name: export_request export_request_language_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.export_request + ADD CONSTRAINT export_request_language_id_fkey FOREIGN KEY (language_id) REFERENCES public.language(id) ON DELETE CASCADE; + + +-- +-- Name: export_request export_request_requested_by_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.export_request + ADD CONSTRAINT export_request_requested_by_fkey FOREIGN KEY (requested_by) REFERENCES public.users(id) ON DELETE CASCADE; + +ALTER TABLE ONLY public.export_request_book + ADD CONSTRAINT export_request_book_book_id_fkey FOREIGN KEY (book_id) REFERENCES public.book(id); + +ALTER TABLE ONLY public.export_request_book + ADD CONSTRAINT export_request_book_request_id_fkey FOREIGN KEY (request_id) REFERENCES public.export_request(id) ON DELETE CASCADE; + -- -- Name: language language_reference_language_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - @@ -1930,4 +2031,3 @@ ALTER TABLE ONLY public.word -- -- PostgreSQL database dump complete -- - diff --git a/package-lock.json b/package-lock.json index efcb2283..4a676430 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7,7 +7,10 @@ "name": "platform", "hasInstallScript": true, "dependencies": { + "@aws-sdk/client-s3": "3.662.0", "@aws-sdk/client-sqs": "^3.678.0", + "@aws-sdk/lib-storage": "3.658.1", + "@aws-sdk/s3-request-presigner": "3.662.0", "@google-cloud/translate": "8.5.0", "@gracious.tech/fetch-client": "0.8.9", "@next/env": "14.2.12", @@ -26,6 +29,7 @@ "@opentelemetry/semantic-conventions": "1.28.0", "date-fns": "4.1.0", "fathom-client": "3.7.2", + "fontkit": "^2.0.4", "fuzzysort": "3.0.2", "googleapis": "148.0.0", "install": "0.13.0", @@ -35,6 +39,8 @@ "nodemailer": "6.9.15", "oslo": "1.2.1", "patch-package": "8.0.0", + "pdf-lib": "^1.17.1", + "pdfkit": "^0.15.0", "pg": "8.12.0", "pg-copy-streams": "7.0.0", "pino": "9.6.0", @@ -44,8 +50,6 @@ "zod": "3.23.8" }, "devDependencies": { - "@aws-sdk/client-s3": "3.662.0", - "@aws-sdk/lib-storage": "3.658.1", "@faker-js/faker": "9.7.0", "@floating-ui/react-dom": "2.1.2", "@fortawesome/fontawesome-svg-core": "6.6.0", @@ -54,6 +58,7 @@ "@fortawesome/react-fontawesome": "0.2.2", "@headlessui/react": "1.7.19", "@headlessui/tailwindcss": "0.2.1", + "@testing-library/react": "^16.3.0", "@tiptap/react": "2.6.6", "@tiptap/starter-kit": "2.6.6", "@types/aws-lambda": "8.10.147", @@ -61,6 +66,7 @@ "@types/lodash": "4.17.7", "@types/node": "^20", "@types/nodemailer": "6.4.15", + "@types/pdfkit": "^0.13.8", "@types/pg": "8.11.6", "@types/pg-copy-streams": "1.2.5", "@types/pg-query-stream": "1.0.3", @@ -84,6 +90,7 @@ "postcss": "^8", "prettier": "3.4.2", "tailwindcss": "^3.4.1", + "ts-node": "^10.9.2", "typescript": "^5", "vite-tsconfig-paths": "5.1.4", "vitest": "3.0.4", @@ -281,7 +288,7 @@ "version": "3.662.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.662.0.tgz", "integrity": "sha512-zZ38Bjrtjn/KlYSxfkh6rcHX77nOXVZThIOc/RMnsMAC5qS+LT0ruhbYXG2q1Q7BaQ2QWQMz/0n/czWHRbOkNA==", - "dev": true, + "dev": false, "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", @@ -399,6 +406,7 @@ "version": "3.662.0", "dev": true, "license": "Apache-2.0", + "peer": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", @@ -451,6 +459,7 @@ "version": "3.662.0", "dev": true, "license": "Apache-2.0", + "peer": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", @@ -896,6 +905,7 @@ "node_modules/@aws-sdk/client-sso-oidc": { "version": "3.678.0", "license": "Apache-2.0", + "peer": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", @@ -947,6 +957,7 @@ "node_modules/@aws-sdk/client-sts": { "version": "3.678.0", "license": "Apache-2.0", + "peer": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", @@ -1144,7 +1155,7 @@ "version": "3.658.1", "resolved": "https://registry.npmjs.org/@aws-sdk/lib-storage/-/lib-storage-3.658.1.tgz", "integrity": "sha512-qk/CoTpsBj9A9KRT24MrB92TcoKxteCGKLuszU0fAb5KMx3tgJ5kNTkc7Ag/B+sZ1EWO+psqWUJKm2co5u1pDQ==", - "dev": true, + "dev": false, "license": "Apache-2.0", "dependencies": { "@smithy/abort-controller": "^3.1.4", @@ -1331,7 +1342,6 @@ }, "node_modules/@aws-sdk/middleware-sdk-s3": { "version": "3.662.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/core": "3.662.0", @@ -1355,7 +1365,6 @@ }, "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/core": { "version": "3.662.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "@smithy/core": "^2.4.7", @@ -1375,7 +1384,6 @@ }, "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/types": { "version": "3.662.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "@smithy/types": "^3.5.0", @@ -1456,9 +1464,40 @@ "node": ">=16.0.0" } }, + "node_modules/@aws-sdk/s3-request-presigner": { + "version": "3.662.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.662.0.tgz", + "integrity": "sha512-O3FXO4LGNXzIXtrWPBu+ImQcF3DxRiP87cJObdNDso3p+UZQ5rlsUnYovnD8WazFfUbBcYy6IK1+yYJDyXXQvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/signature-v4-multi-region": "3.662.0", + "@aws-sdk/types": "3.662.0", + "@aws-sdk/util-format-url": "3.662.0", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.3.6", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/s3-request-presigner/node_modules/@aws-sdk/types": { + "version": "3.662.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.662.0.tgz", + "integrity": "sha512-Ff9/KRmIm8iEzodxzISLj4/pB/0hX2nVw1RFeOBC65OuM6nHrAdWHHog/CVx25hS5JPU0uE3h6NlWRaBJ7AV5w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@aws-sdk/signature-v4-multi-region": { "version": "3.662.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.662.0", @@ -1474,7 +1513,6 @@ }, "node_modules/@aws-sdk/signature-v4-multi-region/node_modules/@aws-sdk/types": { "version": "3.662.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "@smithy/types": "^3.5.0", @@ -1514,7 +1552,6 @@ }, "node_modules/@aws-sdk/util-arn-parser": { "version": "3.568.0", - "dev": true, "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1536,6 +1573,34 @@ "node": ">=16.0.0" } }, + "node_modules/@aws-sdk/util-format-url": { + "version": "3.662.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-format-url/-/util-format-url-3.662.0.tgz", + "integrity": "sha512-McyEyXsZMzuk/nqrVEbjCSmsKykJ7UI4lTDMdaqFdL0l5K/6VWgbFc3xOZcxEGBIvNucHiusQhqJXYHCAG65Dg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.662.0", + "@smithy/querystring-builder": "^3.0.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-format-url/node_modules/@aws-sdk/types": { + "version": "3.662.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.662.0.tgz", + "integrity": "sha512-Ff9/KRmIm8iEzodxzISLj4/pB/0hX2nVw1RFeOBC65OuM6nHrAdWHHog/CVx25hS5JPU0uE3h6NlWRaBJ7AV5w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@aws-sdk/util-locate-window": { "version": "3.568.0", "license": "Apache-2.0", @@ -1590,6 +1655,65 @@ "node": ">=16.0.0" } }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", + "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", + "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, "node_modules/@emnapi/core": { "version": "0.45.0", "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-0.45.0.tgz", @@ -2162,6 +2286,7 @@ "version": "6.6.0", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@fortawesome/fontawesome-common-types": "6.6.0" }, @@ -3111,6 +3236,7 @@ "node_modules/@opentelemetry/api": { "version": "1.9.0", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=8.0.0" } @@ -3672,6 +3798,36 @@ "@opentelemetry/api": "^1.1.0" } }, + "node_modules/@pdf-lib/standard-fonts": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@pdf-lib/standard-fonts/-/standard-fonts-1.0.0.tgz", + "integrity": "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA==", + "license": "MIT", + "dependencies": { + "pako": "^1.0.6" + } + }, + "node_modules/@pdf-lib/standard-fonts/node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, + "node_modules/@pdf-lib/upng": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@pdf-lib/upng/-/upng-1.0.1.tgz", + "integrity": "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ==", + "license": "MIT", + "dependencies": { + "pako": "^1.0.10" + } + }, + "node_modules/@pdf-lib/upng/node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, "node_modules/@pkgjs/parseargs": { "version": "0.11.0", "dev": true, @@ -4658,10 +4814,69 @@ "url": "https://github.com/sponsors/tannerlinsley" } }, + "node_modules/@testing-library/dom": { + "version": "10.4.1", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.1.tgz", + "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "picocolors": "1.1.1", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@testing-library/dom/node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/@testing-library/react": { + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", + "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@testing-library/dom": "^10.0.0", + "@types/react": "^18.0.0 || ^19.0.0", + "@types/react-dom": "^18.0.0 || ^19.0.0", + "react": "^18.0.0 || ^19.0.0", + "react-dom": "^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@tiptap/core": { "version": "2.6.6", "dev": true, "license": "MIT", + "peer": true, "funding": { "type": "github", "url": "https://github.com/sponsors/ueberdosis" @@ -4927,6 +5142,7 @@ "version": "2.6.6", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "prosemirror-changeset": "^2.2.1", "prosemirror-collab": "^1.3.1", @@ -5011,6 +5227,34 @@ "node": ">= 10" } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", + "integrity": "sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, "node_modules/@tybys/wasm-util": { "version": "0.8.3", "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.8.3.tgz", @@ -5020,6 +5264,13 @@ "tslib": "^2.4.0" } }, + "node_modules/@types/aria-query": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", + "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/aws-lambda": { "version": "8.10.147", "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.147.tgz", @@ -5061,6 +5312,7 @@ "node_modules/@types/node": { "version": "20.16.1", "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~6.19.2" } @@ -5073,6 +5325,16 @@ "@types/node": "*" } }, + "node_modules/@types/pdfkit": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/@types/pdfkit/-/pdfkit-0.13.9.tgz", + "integrity": "sha512-RDG8Yb1zT7I01FfpwK7nMSA433XWpblMqSCtA5vJlSyavWZb303HUYPCel6JTiDDFqwGLvtAnYbH8N/e0Cb89g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/pg": { "version": "8.11.6", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.6.tgz", @@ -5124,6 +5386,7 @@ "version": "18.3.3", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -5133,6 +5396,7 @@ "version": "18.3.0", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@types/react": "*" } @@ -5433,6 +5697,7 @@ "node_modules/acorn": { "version": "8.12.1", "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -5455,6 +5720,19 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/agent-base": { "version": "7.1.1", "license": "MIT", @@ -5551,7 +5829,6 @@ }, "node_modules/array-buffer-byte-length": { "version": "1.0.1", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.5", @@ -5745,7 +6022,6 @@ }, "node_modules/available-typed-arrays": { "version": "1.0.7", - "dev": true, "license": "MIT", "dependencies": { "possible-typed-array-names": "^1.0.0" @@ -5915,6 +6191,15 @@ "node": ">=8" } }, + "node_modules/brotli": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/brotli/-/brotli-1.3.3.tgz", + "integrity": "sha512-oTKjJdShmDuGW94SyyaoQvAjf30dZaHnjJ8uAF+u2/vGJkJbJPJAT1gDiOJP5v1Zb6f9KEyW/1HpuaWIXtGHPg==", + "license": "MIT", + "dependencies": { + "base64-js": "^1.1.2" + } + }, "node_modules/buffer": { "version": "4.9.2", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", @@ -6080,6 +6365,7 @@ "version": "4.4.4", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@kurkle/color": "^0.3.0" }, @@ -6283,6 +6569,15 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, "node_modules/clone-deep": { "version": "4.0.1", "dev": true, @@ -6337,6 +6632,13 @@ "version": "0.0.1", "license": "MIT" }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, "node_modules/crelt": { "version": "1.0.6", "dev": true, @@ -6354,6 +6656,12 @@ "node": ">= 8" } }, + "node_modules/crypto-js": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.2.0.tgz", + "integrity": "sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==", + "license": "MIT" + }, "node_modules/cssesc": { "version": "3.0.0", "dev": true, @@ -6447,6 +6755,7 @@ "node_modules/date-fns": { "version": "4.1.0", "license": "MIT", + "peer": true, "funding": { "type": "github", "url": "https://github.com/sponsors/kossnocorp" @@ -6483,7 +6792,6 @@ }, "node_modules/deep-equal": { "version": "2.2.3", - "dev": true, "license": "MIT", "dependencies": { "array-buffer-byte-length": "^1.0.0", @@ -6534,7 +6842,6 @@ }, "node_modules/define-properties": { "version": "1.2.1", - "dev": true, "license": "MIT", "dependencies": { "define-data-property": "^1.0.1", @@ -6555,11 +6862,37 @@ "node": ">=0.4.0" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/dfa": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/dfa/-/dfa-1.2.0.tgz", + "integrity": "sha512-ED3jP8saaweFTjeGX8HQPjeC1YYyZs98jGNZx6IiBvxW7JG5v492kamAQB3m2wop07CvU/RQmzcKr6bgcC5D/Q==", + "license": "MIT" + }, "node_modules/didyoumean": { "version": "1.2.2", "dev": true, "license": "Apache-2.0" }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/dir-glob": { "version": "3.0.1", "dev": true, @@ -6587,6 +6920,13 @@ "node": ">=6.0.0" } }, + "node_modules/dom-accessibility-api": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", + "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", + "dev": true, + "license": "MIT" + }, "node_modules/dompurify": { "version": "3.1.6", "license": "(MPL-2.0 OR Apache-2.0)" @@ -6749,7 +7089,6 @@ }, "node_modules/es-get-iterator": { "version": "1.1.3", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.2", @@ -6906,6 +7245,7 @@ "version": "8.57.0", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -7064,6 +7404,7 @@ "version": "2.29.1", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "array-includes": "^3.1.7", "array.prototype.findlastindex": "^1.2.3", @@ -7412,7 +7753,6 @@ }, "node_modules/fast-deep-equal": { "version": "3.1.3", - "dev": true, "license": "MIT" }, "node_modules/fast-glob": { @@ -7565,9 +7905,34 @@ "dev": true, "license": "ISC" }, + "node_modules/fontkit": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/fontkit/-/fontkit-2.0.4.tgz", + "integrity": "sha512-syetQadaUEDNdxdugga9CpEYVaQIxOwk7GlwZWWZ19//qW4zE5bknOKeMBDYAASwnpaSHKJITRLMF9m1fp3s6g==", + "license": "MIT", + "dependencies": { + "@swc/helpers": "^0.5.12", + "brotli": "^1.3.2", + "clone": "^2.1.2", + "dfa": "^1.2.0", + "fast-deep-equal": "^3.1.3", + "restructure": "^3.0.0", + "tiny-inflate": "^1.0.3", + "unicode-properties": "^1.4.0", + "unicode-trie": "^2.0.0" + } + }, + "node_modules/fontkit/node_modules/@swc/helpers": { + "version": "0.5.17", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", + "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.8.0" + } + }, "node_modules/for-each": { "version": "0.3.3", - "dev": true, "license": "MIT", "dependencies": { "is-callable": "^1.1.3" @@ -7679,7 +8044,6 @@ }, "node_modules/functions-have-names": { "version": "1.2.3", - "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8023,7 +8387,6 @@ }, "node_modules/has-bigints": { "version": "1.0.2", - "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8071,7 +8434,6 @@ }, "node_modules/has-tostringtag": { "version": "1.0.2", - "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" @@ -8248,7 +8610,6 @@ }, "node_modules/internal-slot": { "version": "1.0.7", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -8271,7 +8632,6 @@ }, "node_modules/is-arguments": { "version": "1.1.1", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.2", @@ -8286,7 +8646,6 @@ }, "node_modules/is-array-buffer": { "version": "3.0.4", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.2", @@ -8315,7 +8674,6 @@ }, "node_modules/is-bigint": { "version": "1.0.4", - "dev": true, "license": "MIT", "dependencies": { "has-bigints": "^1.0.1" @@ -8337,7 +8695,6 @@ }, "node_modules/is-boolean-object": { "version": "1.1.2", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.2", @@ -8352,7 +8709,6 @@ }, "node_modules/is-callable": { "version": "1.2.7", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -8390,7 +8746,6 @@ }, "node_modules/is-date-object": { "version": "1.0.5", - "dev": true, "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" @@ -8480,7 +8835,6 @@ }, "node_modules/is-map": { "version": "2.0.3", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -8509,7 +8863,6 @@ }, "node_modules/is-number-object": { "version": "1.0.7", - "dev": true, "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" @@ -8546,7 +8899,6 @@ }, "node_modules/is-regex": { "version": "1.1.4", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.2", @@ -8561,7 +8913,6 @@ }, "node_modules/is-set": { "version": "2.0.3", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -8572,7 +8923,6 @@ }, "node_modules/is-shared-array-buffer": { "version": "1.0.3", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.7" @@ -8596,7 +8946,6 @@ }, "node_modules/is-string": { "version": "1.0.7", - "dev": true, "license": "MIT", "dependencies": { "has-tostringtag": "^1.0.0" @@ -8610,7 +8959,6 @@ }, "node_modules/is-symbol": { "version": "1.0.4", - "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.2" @@ -8638,7 +8986,6 @@ }, "node_modules/is-weakmap": { "version": "2.0.2", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -8660,7 +9007,6 @@ }, "node_modules/is-weakset": { "version": "2.0.3", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.7", @@ -8760,6 +9106,12 @@ "node": ">= 0.6.0" } }, + "node_modules/jpeg-exif": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/jpeg-exif/-/jpeg-exif-1.1.4.tgz", + "integrity": "sha512-a+bKEcCjtuW5WTdgeXFzswSrdqi0jk4XlEtZlx5A94wCoBpFjfFTbo/Tra5SpNCl/YFZPvcV1dJc+TAYeg6ROQ==", + "license": "MIT" + }, "node_modules/js-tokens": { "version": "4.0.0", "license": "MIT" @@ -8997,6 +9349,25 @@ "node": ">=10" } }, + "node_modules/linebreak": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/linebreak/-/linebreak-1.1.0.tgz", + "integrity": "sha512-MHp03UImeVhB7XZtjd0E4n6+3xr5Dq/9xI/5FptGk5FrbDR3zagPa2DS6U8ks/3HjbKWG9Q1M2ufOzxV2qLYSQ==", + "license": "MIT", + "dependencies": { + "base64-js": "0.0.8", + "unicode-trie": "^2.0.0" + } + }, + "node_modules/linebreak/node_modules/base64-js": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-0.0.8.tgz", + "integrity": "sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/lines-and-columns": { "version": "1.2.4", "dev": true, @@ -9329,6 +9700,16 @@ "dev": true, "license": "ISC" }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "dev": true, + "license": "MIT", + "bin": { + "lz-string": "bin/bin.js" + } + }, "node_modules/magic-string": { "version": "0.30.17", "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", @@ -9339,6 +9720,13 @@ "@jridgewell/sourcemap-codec": "^1.5.0" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, "node_modules/markdown-it": { "version": "14.1.0", "dev": true, @@ -9734,7 +10122,6 @@ }, "node_modules/object-is": { "version": "1.1.6", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.7", @@ -9756,7 +10143,6 @@ }, "node_modules/object.assign": { "version": "4.1.5", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.5", @@ -9950,6 +10336,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pako": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/pako/-/pako-0.2.9.tgz", + "integrity": "sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==", + "license": "MIT" + }, "node_modules/parent-module": { "version": "1.0.1", "dev": true, @@ -10110,11 +10502,81 @@ "node": ">= 14.16" } }, + "node_modules/pdf-lib": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/pdf-lib/-/pdf-lib-1.17.1.tgz", + "integrity": "sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw==", + "license": "MIT", + "dependencies": { + "@pdf-lib/standard-fonts": "^1.0.0", + "@pdf-lib/upng": "^1.0.1", + "pako": "^1.0.11", + "tslib": "^1.11.1" + } + }, + "node_modules/pdf-lib/node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "license": "(MIT AND Zlib)" + }, + "node_modules/pdf-lib/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "license": "0BSD" + }, + "node_modules/pdfkit": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/pdfkit/-/pdfkit-0.15.2.tgz", + "integrity": "sha512-s3GjpdBFSCaeDSX/v73MI5UsPqH1kjKut2AXCgxQ5OH10lPVOu5q5vLAG0OCpz/EYqKsTSw1WHpENqMvp43RKg==", + "license": "MIT", + "dependencies": { + "crypto-js": "^4.2.0", + "fontkit": "^1.8.1", + "jpeg-exif": "^1.1.4", + "linebreak": "^1.0.2", + "png-js": "^1.0.0" + } + }, + "node_modules/pdfkit/node_modules/@swc/helpers": { + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.3.17.tgz", + "integrity": "sha512-tb7Iu+oZ+zWJZ3HJqwx8oNwSDIU440hmVMDPhpACWQWnrZHK99Bxs70gT1L2dnr5Hg50ZRWEFkQCAnOVVV0z1Q==", + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/pdfkit/node_modules/fontkit": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/fontkit/-/fontkit-1.9.0.tgz", + "integrity": "sha512-HkW/8Lrk8jl18kzQHvAw9aTHe1cqsyx5sDnxncx652+CIfhawokEPkeM3BoIC+z/Xv7a0yMr0f3pRRwhGH455g==", + "license": "MIT", + "dependencies": { + "@swc/helpers": "^0.3.13", + "brotli": "^1.3.2", + "clone": "^2.1.2", + "deep-equal": "^2.0.5", + "dfa": "^1.2.0", + "restructure": "^2.0.1", + "tiny-inflate": "^1.0.3", + "unicode-properties": "^1.3.1", + "unicode-trie": "^2.0.0" + } + }, + "node_modules/pdfkit/node_modules/restructure": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/restructure/-/restructure-2.0.1.tgz", + "integrity": "sha512-e0dOpjm5DseomnXx2M5lpdZ5zoHqF1+bqdMJUohoYVVQa7cBdnk7fdmeI6byNWP/kiME72EeTiSypTCVnpLiDg==", + "license": "MIT" + }, "node_modules/pg": { "version": "8.12.0", "resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz", "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", "license": "MIT", + "peer": true, "dependencies": { "pg-connection-string": "^2.6.4", "pg-pool": "^3.6.2", @@ -10418,9 +10880,13 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, + "node_modules/png-js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/png-js/-/png-js-1.0.0.tgz", + "integrity": "sha512-k+YsbhpA9e+EFfKjTCH3VW6aoKlyNYI6NYdTfDL4CIvFnvsuO84ttonmZE7rc+v23SLTH8XX+5w/Ak9v0xGY4g==" + }, "node_modules/possible-typed-array-names": { "version": "1.0.0", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.4" @@ -10446,6 +10912,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "nanoid": "^3.3.8", "picocolors": "^1.1.1", @@ -10644,6 +11111,41 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/pretty-format/node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true, + "license": "MIT" + }, "node_modules/process-warning": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.1.tgz", @@ -10770,6 +11272,7 @@ "version": "1.22.3", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "orderedmap": "^2.0.0" } @@ -10796,6 +11299,7 @@ "version": "1.4.3", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "prosemirror-model": "^1.0.0", "prosemirror-transform": "^1.0.0", @@ -10840,6 +11344,7 @@ "version": "1.34.2", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "prosemirror-model": "^1.20.0", "prosemirror-state": "^1.0.0", @@ -10954,6 +11459,7 @@ "node_modules/react": { "version": "18.3.1", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0" }, @@ -10964,6 +11470,7 @@ "node_modules/react-dom": { "version": "18.3.1", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" @@ -11039,7 +11546,6 @@ }, "node_modules/regexp.prototype.flags": { "version": "1.5.2", - "dev": true, "license": "MIT", "dependencies": { "call-bind": "^1.0.6", @@ -11137,6 +11643,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/restructure": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/restructure/-/restructure-3.0.2.tgz", + "integrity": "sha512-gSfoiOEA0VPE6Tukkrr7I0RBdE0s7H1eFCDBk05l1KIQT1UIKNc5JZy6jdyW6eYH3aR3g5b3PuL77rq0hvwtAw==", + "license": "MIT" + }, "node_modules/retry-request": { "version": "7.0.2", "license": "MIT", @@ -11381,7 +11893,6 @@ }, "node_modules/set-function-name": { "version": "2.0.2", - "dev": true, "license": "MIT", "dependencies": { "define-data-property": "^1.1.4", @@ -11631,7 +12142,6 @@ }, "node_modules/stop-iteration-iterator": { "version": "1.0.0", - "dev": true, "license": "MIT", "dependencies": { "internal-slot": "^1.0.4" @@ -11971,6 +12481,7 @@ "version": "3.4.10", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -12091,6 +12602,12 @@ "real-require": "^0.2.0" } }, + "node_modules/tiny-inflate": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-inflate/-/tiny-inflate-1.0.3.tgz", + "integrity": "sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw==", + "license": "MIT" + }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", @@ -12204,6 +12721,58 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, "node_modules/tsconfck": { "version": "3.1.4", "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.1.4.tgz", @@ -12237,7 +12806,9 @@ } }, "node_modules/tslib": { - "version": "2.6.3", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, "node_modules/type-check": { @@ -12335,6 +12906,7 @@ "version": "5.5.4", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -12366,6 +12938,26 @@ "version": "6.19.8", "license": "MIT" }, + "node_modules/unicode-properties": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/unicode-properties/-/unicode-properties-1.4.1.tgz", + "integrity": "sha512-CLjCCLQ6UuMxWnbIylkisbRj31qxHPAurvena/0iwSVbQ2G1VY5/HjV0IRabOEbDHlzZlRdCrD4NhB0JtU40Pg==", + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.0", + "unicode-trie": "^2.0.0" + } + }, + "node_modules/unicode-trie": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-trie/-/unicode-trie-2.0.0.tgz", + "integrity": "sha512-x7bc76x0bm4prf1VLg79uhAzKw8DVboClSN5VxJuQ+LKDOVEW9CdH+VY7SP+vX7xCYQqzzgQpFqz15zeLvAtZQ==", + "license": "MIT", + "dependencies": { + "pako": "^0.2.5", + "tiny-inflate": "^1.0.0" + } + }, "node_modules/universalify": { "version": "0.2.0", "license": "MIT", @@ -12460,12 +13052,20 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, "node_modules/vite": { "version": "6.0.11", "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.11.tgz", "integrity": "sha512-4VL9mQPKoHy4+FE0NnRE/kbY51TOfaknxAjt3fJbGJxhIpBZiqVzlZDEesWWsuREXHwNdAoOFZ9MkPEVXczHwg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.24.2", "postcss": "^8.4.49", @@ -13177,7 +13777,6 @@ }, "node_modules/which-boxed-primitive": { "version": "1.0.2", - "dev": true, "license": "MIT", "dependencies": { "is-bigint": "^1.0.1", @@ -13217,7 +13816,6 @@ }, "node_modules/which-collection": { "version": "1.0.2", - "dev": true, "license": "MIT", "dependencies": { "is-map": "^2.0.3", @@ -13234,7 +13832,6 @@ }, "node_modules/which-typed-array": { "version": "1.1.15", - "dev": true, "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", @@ -13490,6 +14087,16 @@ "node": ">=8" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/yocto-queue": { "version": "0.1.0", "dev": true, diff --git a/package.json b/package.json index 67e464bd..05c30e27 100644 --- a/package.json +++ b/package.json @@ -10,10 +10,14 @@ "check-types": "tsc --noEmit", "prepare": "husky || true", "postinstall": "patch-package", - "test": "vitest" + "test": "vitest", + "test:localstack": "vitest run --config vitest.localstack.config.mts" }, "dependencies": { + "@aws-sdk/client-s3": "3.662.0", "@aws-sdk/client-sqs": "^3.678.0", + "@aws-sdk/lib-storage": "3.658.1", + "@aws-sdk/s3-request-presigner": "3.662.0", "@google-cloud/translate": "8.5.0", "@gracious.tech/fetch-client": "0.8.9", "@next/env": "14.2.12", @@ -32,6 +36,7 @@ "@opentelemetry/semantic-conventions": "1.28.0", "date-fns": "4.1.0", "fathom-client": "3.7.2", + "fontkit": "^2.0.4", "fuzzysort": "3.0.2", "googleapis": "148.0.0", "install": "0.13.0", @@ -41,6 +46,8 @@ "nodemailer": "6.9.15", "oslo": "1.2.1", "patch-package": "8.0.0", + "pdf-lib": "^1.17.1", + "pdfkit": "^0.15.0", "pg": "8.12.0", "pg-copy-streams": "7.0.0", "pino": "9.6.0", @@ -50,8 +57,6 @@ "zod": "3.23.8" }, "devDependencies": { - "@aws-sdk/client-s3": "3.662.0", - "@aws-sdk/lib-storage": "3.658.1", "@faker-js/faker": "9.7.0", "@floating-ui/react-dom": "2.1.2", "@fortawesome/fontawesome-svg-core": "6.6.0", @@ -60,6 +65,7 @@ "@fortawesome/react-fontawesome": "0.2.2", "@headlessui/react": "1.7.19", "@headlessui/tailwindcss": "0.2.1", + "@testing-library/react": "^16.3.0", "@tiptap/react": "2.6.6", "@tiptap/starter-kit": "2.6.6", "@types/aws-lambda": "8.10.147", @@ -67,6 +73,7 @@ "@types/lodash": "4.17.7", "@types/node": "^20", "@types/nodemailer": "6.4.15", + "@types/pdfkit": "^0.13.8", "@types/pg": "8.11.6", "@types/pg-copy-streams": "1.2.5", "@types/pg-query-stream": "1.0.3", @@ -90,6 +97,7 @@ "postcss": "^8", "prettier": "3.4.2", "tailwindcss": "^3.4.1", + "ts-node": "^10.9.2", "typescript": "^5", "vite-tsconfig-paths": "5.1.4", "vitest": "3.0.4", diff --git a/scripts/generate-interlinear-sample.ts b/scripts/generate-interlinear-sample.ts new file mode 100644 index 00000000..caeac13a --- /dev/null +++ b/scripts/generate-interlinear-sample.ts @@ -0,0 +1,200 @@ +import fs from "fs"; +import path from "path"; +import type { InterlinearChapterResult } from "../src/modules/export/data-access/InterlinearQueryService"; +import { TextDirectionRaw } from "../src/modules/languages/model"; + +const sample: InterlinearChapterResult = { + language: { + id: "lang", + code: "hbo", + name: "Hebrew", + textDirection: TextDirectionRaw.RTL, + }, + verses: [ + { + id: "v1", + number: 1, + words: [ + { + id: "w1", + text: "בְּרֵאשִׁית", + gloss: "in the beginning", + lemma: "רֵאשִׁית", + grammar: "N-fs", + }, + { + id: "w2", + text: "בָּרָא", + gloss: "created", + lemma: "בָּרָא", + grammar: "V-Qal", + }, + { + id: "w3", + text: "אֱלֹהִים", + gloss: "God", + lemma: "אֱלֹהִים", + grammar: "N-mp", + }, + { + id: "w4", + text: "אֵת", + gloss: "[obj]", + lemma: "אֵת", + grammar: "P-obj", + }, + { + id: "w5", + text: "הַשָּׁמַיִם", + gloss: "the heavens", + lemma: "שָׁמַיִם", + grammar: "N-mp", + }, + { + id: "w6", + text: "וְאֵת", + gloss: "and [obj]", + lemma: "וְאֵת", + grammar: "C+P-obj", + }, + { + id: "w7", + text: "הָאָרֶץ", + gloss: "the earth", + lemma: "אֶרֶץ", + grammar: "N-fs", + }, + ], + }, + { + id: "v2", + number: 2, + words: [ + { + id: "w8", + text: "וְהָאָרֶץ", + gloss: "and the earth", + lemma: "אֶרֶץ", + grammar: "C+N-fs", + }, + { + id: "w9", + text: "הָיְתָה", + gloss: "was", + lemma: "הָיָה", + grammar: "V-Qal-3fs", + }, + { + id: "w10", + text: "תֹהוּ", + gloss: "formless", + lemma: "תֹּהוּ", + grammar: "N-ms", + }, + { + id: "w11", + text: "וָבֹהוּ", + gloss: "and void", + lemma: "בֹּהוּ", + grammar: "C+N-ms", + }, + { + id: "w12", + text: "וְחֹשֶׁךְ", + gloss: "and darkness", + lemma: "חֹשֶׁךְ", + grammar: "C+N-ms", + }, + { + id: "w13", + text: "עַל־פְּנֵי", + gloss: "over the surface of", + lemma: "עַל־פְּנֵי", + grammar: "P", + }, + { + id: "w14", + text: "תְהוֹם", + gloss: "the deep", + lemma: "תְּהוֹם", + grammar: "N-fs", + }, + { + id: "w15", + text: "וְרוּחַ", + gloss: "and the spirit/breath", + lemma: "רוּחַ", + grammar: "C+N-fs", + }, + { + id: "w16", + text: "אֱלֹהִים", + gloss: "of God", + lemma: "אֱלֹהִים", + grammar: "N-mp cstr", + }, + { + id: "w17", + text: "מְרַחֶפֶת", + gloss: "was hovering", + lemma: "רָחַף", + grammar: "V-Piel-ptc-fs", + }, + { + id: "w18", + text: "עַל־פְּנֵי", + gloss: "over the surface of", + lemma: "עַל־פְּנֵי", + grammar: "P", + }, + { + id: "w19", + text: "הַמָּיִם", + gloss: "the waters", + lemma: "מַיִם", + grammar: "N-mp", + }, + ], + }, + ], +}; + +async function main() { + process.env.PDFKIT_DATA_DIR = + process.env.PDFKIT_DATA_DIR || + path.join(process.cwd(), "node_modules", "pdfkit", "js", "data"); + + const layout = + (process.env.INTERLINEAR_LAYOUT as "standard" | "parallel") || "standard"; + + const { generateInterlinearPdf } = await import( + "../src/modules/export/pdf/InterlinearPdfGenerator" + ); + + const { stream } = generateInterlinearPdf(sample, { + layout, + pageSize: "letter", + direction: "rtl", + header: { title: "Test Interlinear" }, + }); + + const chunks: Uint8Array[] = []; + for await (const chunk of stream) { + if (typeof chunk === "string") { + chunks.push(Buffer.from(chunk)); + } else if (Buffer.isBuffer(chunk)) { + chunks.push(chunk); + } else { + chunks.push(Buffer.from(chunk as any)); + } + } + const outPath = path.join(process.cwd(), "interlinear-sample.pdf"); + fs.writeFileSync(outPath, Buffer.concat(chunks.map((c) => Buffer.from(c)))); + // eslint-disable-next-line no-console + console.log("Wrote", outPath); +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/src/messages/ar.json b/src/messages/ar.json index 806044be..f20dd920 100644 --- a/src/messages/ar.json +++ b/src/messages/ar.json @@ -639,5 +639,50 @@ "reference": "{bookId, select, 1 {التكوين} 2 {الخروج} 3 {اللاويين} 4 {العدد} 5 {التثنية} 6 {يشوع} 7 {القضاة} 8 {راعوث} 9 {صموئيل الأول} 10 {صموئيل الثاني} 11 {الملوك الأول} 12 {الملوك الثاني} 13 {أخبار الأيام الأول} 14 {أخبار الأيام الثاني} 15 {عزرا} 16 {نحميا} 17 {استير} 18 {أيوب} 19 {المزامير} 20 {الأمثال} 21 {الجامعة} 22 {نشيد الأنشاد} 23 {إشعياء} 24 {أرميا} 25 {مراثي أرميا} 26 {حزقيال} 27 {دانيال} 28 {هوشع} 29 {يوئيل} 30 {عاموس} 31 {عوبديا} 32 {يونان} 33 {ميخا} 34 {ناحوم} 35 {حبقوق} 36 {صفنيا} 37 {حجاي} 38 {زكريا} 39 {ملاخي} 40 {متى} 41 {مرقس} 42 {لوقا} 43 {يوحنا} 44 {أعمال الرسل} 45 {رومية} 46 {كورنثوس الأولى} 47 {كورنثوس الثانية} 48 {غلاطية} 49 {أفسس} 50 {فيلبي} 51 {كولوسي} 52 {تسالونيكي الأولى} 53 {تسالونيكي الثانية} 54 {تيموثاوس الأولى} 55 {تيموثاوس الثانية} 56 {تيطس} 57 {فليمون} 58 {العبرانيين} 59 {يعقوب} 60 {بطرس الأولى} 61 {بطرس الثانية} 62 {يوحنا الأولى} 63 {يوحنا الثانية} 64 {يوحنا الثالثة} 65 {يهوذا} 66 {رؤيا} other {}} {chapterNumber}:{verseNumber}", "not_found": "غير موجود", "close": "إغلاق" + }, + "InterlinearExport": { + "title": "تصدير PDF بين السطور", + "description": "إنشاء ملف PDF بين السطور للفصول المحددة.", + "form": { + "books_label": "الأسفار", + "books_placeholder": "كل الأسفار (الافتراضي)", + "books_help": "اتركه فارغًا لتصدير كل الأسفار.", + "chapters_label": "الإصحاحات (مفصولة بفواصل أو نطاقات)", + "chapters_placeholder": "مثال: 1,2,4-6 (اتركه فارغًا للكل)", + "layout_label": "التخطيط", + "layout_standard": "قياسي (كلمة بكلمة)", + "layout_parallel": "متوازٍ (الأصل | عمود الترجمة)", + "submit": "إنشاء PDF", + "queued": "تمت الإضافة للطابور..." + }, + "status": { + "title": "الحالة", + "all_books": "كل الأسفار", + "download": "تنزيل PDF", + "expires": "ينتهي", + "generating": "جارٍ إنشاء PDF…", + "failed": "فشل التصدير. حاول مرة أخرى.", + "missing": "لم يتم العثور على التصدير. حاول مرة أخرى.", + "labels": { + "PENDING": "في الطابور", + "IN_PROGRESS": "قيد المعالجة", + "COMPLETE": "مكتمل", + "FAILED": "فشل" + } + }, + "errors": { + "invalid": "غير صالح", + "language_required": "اللغة مطلوبة.", + "language_not_found": "اللغة غير موجودة.", + "no_books_available": "لا توجد أسفار متاحة للتصدير.", + "no_chapters_available": "لا توجد إصحاحات مطابقة للأسفار المحددة.", + "export_failed": "فشل التصدير.", + "chapters_range_invalid": "يجب أن تكون نطاقات الإصحاحات أرقامًا موجبة وأن يكون البدء قبل الانتهاء.", + "chapters_numeric_or_ranges": "يجب أن تكون الإصحاحات أرقامًا أو نطاقات.", + "chapters_positive": "يجب أن تكون الإصحاحات أرقامًا موجبة.", + "chapters_required_or_blank": "أدخل إصحاحًا واحدًا على الأقل أو اتركه فارغًا للكل.", + "books_numeric_ids": "يجب أن تكون الأسفار أرقامًا.", + "books_required": "اختر سفرًا واحدًا على الأقل." + } } } diff --git a/src/messages/en.json b/src/messages/en.json index 266027e9..2487d4fe 100644 --- a/src/messages/en.json +++ b/src/messages/en.json @@ -642,5 +642,50 @@ "reference": "{bookId, select, 1 {Genesis} 2 {Exodus} 3 {Leviticus} 4 {Numbers} 5 {Deuteronomy} 6 {Joshua} 7 {Judges} 8 {Ruth} 9 {1 Samuel} 10 {2 Samuel} 11 {1 Kings} 12 {2 Kings} 13 {1 Chronicles} 14 {2 Chronicles} 15 {Ezra} 16 {Nehemiah} 17 {Esther} 18 {Job} 19 {Psalm} 20 {Proverbs} 21 {Ecclesiastes} 22 {Song of Songs} 23 {Isaiah} 24 {Jeremiah} 25 {Lamentations} 26 {Ezekiel} 27 {Daniel} 28 {Hosea} 29 {Joel} 30 {Amos} 31 {Obadiah} 32 {Jonah} 33 {Micah} 34 {Nahum} 35 {Habakkuk} 36 {Zephaniah} 37 {Haggai} 38 {Zechariah} 39 {Malachi} 40 {Matthew} 41 {Mark} 42 {Luke} 43 {John} 44 {Acts} 45 {Romans} 46 {1 Corinthians} 47 {2 Corinthians} 48 {Galatians} 49 {Ephesians} 50 {Philippians} 51 {Colossians} 52 {1 Thessalonians} 53 {2 Thessalonians} 54 {1 Timothy} 55 {2 Timothy} 56 {Titus} 57 {Philemon} 58 {Hebrews} 59 {James} 60 {1 Peter} 61 {2 Peter} 62 {1 John} 63 {2 John} 64 {3 John} 65 {Jude} 66 {Revelation} other {}} {chapterNumber}:{verseNumber}", "not_found": "Not Found", "close": "Close" + }, + "InterlinearExport": { + "title": "Interlinear PDF Export", + "description": "Generate a PDF interlinear for selected chapters.", + "form": { + "books_label": "Books", + "books_placeholder": "All books (default)", + "books_help": "Leave blank to export all books.", + "chapters_label": "Chapters (comma-separated or ranges)", + "chapters_placeholder": "e.g. 1,2,4-6 (leave blank for all)", + "layout_label": "Layout", + "layout_standard": "Standard (word by word)", + "layout_parallel": "Parallel (Original | Gloss column)", + "submit": "Generate PDF", + "queued": "Queued..." + }, + "status": { + "title": "Status", + "all_books": "All books", + "download": "Download PDF", + "expires": "Expires", + "generating": "Generating PDF…", + "failed": "Export failed. Please try again.", + "missing": "Export not found. Please try again.", + "labels": { + "PENDING": "Queued", + "IN_PROGRESS": "In progress", + "COMPLETE": "Complete", + "FAILED": "Failed" + } + }, + "errors": { + "invalid": "Invalid", + "language_required": "Language is required.", + "language_not_found": "Language not found.", + "no_books_available": "No books available for export.", + "no_chapters_available": "No matching chapters found for the selected books.", + "export_failed": "Export failed.", + "chapters_range_invalid": "Chapter ranges must be positive numbers, and start must be before end.", + "chapters_numeric_or_ranges": "Chapters must be numeric or ranges.", + "chapters_positive": "Chapters must be positive numbers.", + "chapters_required_or_blank": "Please enter at least one chapter or leave blank for all.", + "books_numeric_ids": "Books must be numeric ids.", + "books_required": "Please choose at least one book." + } } } diff --git a/src/modules/export/actions/pollInterlinearExportStatus.test.ts b/src/modules/export/actions/pollInterlinearExportStatus.test.ts new file mode 100644 index 00000000..ca31504c --- /dev/null +++ b/src/modules/export/actions/pollInterlinearExportStatus.test.ts @@ -0,0 +1,123 @@ +import "@/tests/vitest/mocks/nextjs"; +import { initializeDatabase } from "@/tests/vitest/dbUtils"; +import { describe, expect, test } from "vitest"; +import { pollInterlinearExportStatus } from "./pollInterlinearExportStatus"; +import { createScenario } from "@/tests/scenarios"; +import logIn from "@/tests/vitest/login"; +import { LanguageMemberRoleRaw } from "@/modules/languages/model"; +import { query } from "@/db"; +import { ulid } from "@/shared/ulid"; + +initializeDatabase(); + +async function createExportRequest({ + languageId, + requestedBy, + status = "PENDING", + downloadUrl = null, + expiresAt = null, +}: { + languageId: string; + requestedBy: string; + status?: string; + downloadUrl?: string | null; + expiresAt?: Date | null; +}) { + await query( + `insert into book (id, name) values ($1, $2) on conflict (id) do nothing`, + [1, "Test Book"], + ); + const id = ulid(); + await query( + `insert into export_request ( + id, language_id, book_id, chapters, layout, status, requested_by, requested_at, download_url, expires_at + ) + values ($1, $2, $3, $4, $5, $6, $7, now(), $8, $9)`, + [ + id, + languageId, + 1, + [1], + "standard", + status, + requestedBy, + downloadUrl, + expiresAt, + ], + ); + return id; +} + +describe("pollInterlinearExportStatus", () => { + test("requires authentication", async () => { + const formData = new FormData(); + formData.set("id", "non-existent"); + await expect(pollInterlinearExportStatus(formData)).toBeNextjsNotFound(); + }); + + test("allows authorized language member to read their request", async () => { + const scenario = await createScenario({ + users: { member: {} }, + languages: { + language: { + members: [ + { userId: "member", roles: [LanguageMemberRoleRaw.Translator] }, + ], + }, + }, + }); + const language = scenario.languages.language; + const user = scenario.users.member; + await logIn(user.id); + const expiresAt = new Date(); + const requestId = await createExportRequest({ + languageId: language.id, + requestedBy: user.id, + status: "COMPLETE", + downloadUrl: "https://example.com/file.pdf", + expiresAt, + }); + + const formData = new FormData(); + formData.set("id", requestId); + const response = await pollInterlinearExportStatus(formData); + + expect(response).toEqual({ + id: requestId, + status: "COMPLETE", + bookId: 1, + downloadUrl: "https://example.com/file.pdf", + expiresAt, + }); + }); + + test("rejects users not in the language", async () => { + const scenario = await createScenario( + { + users: { member: {} }, + languages: { + language: { + members: [ + { userId: "member", roles: [LanguageMemberRoleRaw.Translator] }, + ], + }, + }, + }, + { + users: { outsider: {} }, + }, + ); + const language = scenario.languages.language; + const requestId = await createExportRequest({ + languageId: language.id, + requestedBy: scenario.users.member.id, + status: "IN_PROGRESS", + }); + + await logIn(scenario.users.outsider.id); + const formData = new FormData(); + formData.set("id", requestId); + + await expect(pollInterlinearExportStatus(formData)).toBeNextjsNotFound(); + }); +}); diff --git a/src/modules/export/actions/pollInterlinearExportStatus.ts b/src/modules/export/actions/pollInterlinearExportStatus.ts new file mode 100644 index 00000000..94f885a3 --- /dev/null +++ b/src/modules/export/actions/pollInterlinearExportStatus.ts @@ -0,0 +1,61 @@ +"use server"; + +import { z } from "zod"; +import { notFound } from "next/navigation"; +import { verifySession } from "@/session"; +import Policy from "@/modules/access/public/Policy"; +import GetInterlinearExportStatus from "../use-cases/GetInterlinearExportStatus"; +import exportRequestRepository from "../data-access/ExportRequestRepository"; + +const schema = z.object({ id: z.string().min(1) }); + +export interface ExportRequestStatusRow { + id: string; + status: string; + bookId: number | null; + downloadUrl: string | null; + expiresAt: Date | null; +} + +const getInterlinearExportStatusUseCase = new GetInterlinearExportStatus({ + exportRequestRepository, +}); + +export async function pollInterlinearExportStatus( + arg1: FormData, + arg2?: FormData, +) { + const formData = arg2 ?? arg1; + + const session = await verifySession(); + const userId = session?.user?.id; + if (!userId) notFound(); + + const parsed = schema.parse({ id: formData.get("id") }); + const statusRow = await getInterlinearExportStatusUseCase.execute(parsed.id); + if (!statusRow) { + return null; + } + + const policy = new Policy({ + systemRoles: [Policy.SystemRole.Admin], + languageRoles: [Policy.LanguageRole.Admin, Policy.LanguageRole.Translator], + }); + const authorized = await policy.authorize({ + actorId: userId, + languageCode: statusRow.languageCode, + }); + if (!authorized) { + notFound(); + } + + return { + id: statusRow.id, + status: statusRow.status, + bookId: statusRow.bookId, + downloadUrl: statusRow.downloadUrl, + expiresAt: statusRow.expiresAt, + } satisfies ExportRequestStatusRow; +} + +export default pollInterlinearExportStatus; diff --git a/src/modules/export/actions/requestInterlinearExport.test.ts b/src/modules/export/actions/requestInterlinearExport.test.ts new file mode 100644 index 00000000..d46873d9 --- /dev/null +++ b/src/modules/export/actions/requestInterlinearExport.test.ts @@ -0,0 +1,337 @@ +import "@/tests/vitest/mocks/nextjs"; +import { initializeDatabase } from "@/tests/vitest/dbUtils"; +import { describe, expect, test, vi } from "vitest"; +import { requestInterlinearExport } from "./requestInterlinearExport"; +import { createScenario } from "@/tests/scenarios"; +import logIn from "@/tests/vitest/login"; +import { enqueueJob } from "@/shared/jobs/enqueueJob"; +import { LanguageMemberRoleRaw } from "@/modules/languages/model"; +import { query } from "@/db"; +import { SystemRoleRaw } from "@/modules/users/model/SystemRole"; + +vi.mock("@/shared/jobs/enqueueJob"); + +initializeDatabase(); + +async function findExportRequest(id: string) { + const result = await query( + `select id, + language_id as "languageId", + book_id as "bookId", + chapters, + layout, + status, + requested_by as "requestedBy" + from export_request + where id = any($1)`, + [[id]], + ); + return result.rows[0]; +} + +async function findExportRequestBooks(id: string) { + const result = await query( + `select request_id as "requestId", + book_id as "bookId", + chapters + from export_request_book + where request_id = $1 + order by book_id`, + [id], + ); + return result.rows; +} +describe("requestInterlinearExport", () => { + test("rejects unauthenticated requests", async () => { + const formData = new FormData(); + await expect(requestInterlinearExport(formData)).toBeNextjsNotFound(); + expect(enqueueJob).not.toHaveBeenCalled(); + }); + + test("validates chapters input", async () => { + const scenario = await createScenario({ + users: { translator: {} }, + languages: { + language: { + members: [ + { + userId: "translator", + roles: [LanguageMemberRoleRaw.Translator], + }, + ], + }, + }, + }); + const user = scenario.users.translator; + const language = scenario.languages.language; + await logIn(user.id); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("bookIds", "1"); + formData.set("chapters", "one,two"); + formData.set("layout", "standard"); + + const response = await requestInterlinearExport(formData); + expect(response).toEqual({ + state: "error", + validation: { chapters: ["Chapters must be numeric or ranges."] }, + }); + expect(enqueueJob).not.toHaveBeenCalled(); + }); + + test("returns validation error for unknown language before enqueue", async () => { + const scenario = await createScenario({ + users: { admin: { systemRoles: [SystemRoleRaw.Admin] } }, + }); + await logIn(scenario.users.admin.id); + + const formData = new FormData(); + formData.set("languageCode", "missing"); + formData.set("bookIds", "1"); + formData.set("chapters", "1"); + formData.set("layout", "standard"); + + const response = await requestInterlinearExport(formData); + expect(response).toEqual({ + state: "error", + validation: { languageCode: ["Language not found."] }, + }); + expect(enqueueJob).not.toHaveBeenCalled(); + }); + + test("denies non-members", async () => { + const scenario = await createScenario({ + users: { outsider: {} }, + languages: { language: {} }, + }); + const user = scenario.users.outsider; + const language = scenario.languages.language; + await logIn(user.id); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("bookIds", "1"); + formData.set("chapters", "1"); + formData.set("layout", "standard"); + + await expect(requestInterlinearExport(formData)).toBeNextjsNotFound(); + expect(enqueueJob).not.toHaveBeenCalled(); + }); + + test("creates export request with parsed options", async () => { + const scenario = await createScenario({ + users: { translator: {} }, + languages: { + language: { + members: [ + { + userId: "translator", + roles: [LanguageMemberRoleRaw.Translator], + }, + ], + }, + }, + }); + const user = scenario.users.translator; + const language = scenario.languages.language; + await logIn(user.id); + + await query( + `insert into book (id, name) values ($1, $2) on conflict (id) do nothing`, + [4, "Test Book"], + ); + await query( + `insert into verse (id, number, book_id, chapter) values ($1, $2, $3, $4) + on conflict (id) do nothing`, + ["4-1-1", 1, 4, 1], + ); + await query( + `insert into verse (id, number, book_id, chapter) values ($1, $2, $3, $4) + on conflict (id) do nothing`, + ["4-2-1", 1, 4, 2], + ); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("bookIds", "4"); + formData.set("chapters", "1, 2"); + formData.set("layout", "parallel"); + + const response = await requestInterlinearExport(formData); + expect(response.state).toBe("success"); + expect(response.requestIds?.[0].id).toBeDefined(); + expect(enqueueJob).toHaveBeenCalledTimes(1); + expect(enqueueJob).toHaveBeenCalledWith("export_interlinear_pdf", { + books: [{ bookId: 4, chapters: [1, 2] }], + languageCode: language.code, + layout: "parallel", + requestId: response.requestIds?.[0].id, + }); + + const request = await findExportRequest(response.requestIds?.[0].id!); + expect(request).toMatchObject({ + id: response.requestIds?.[0].id, + languageId: language.id, + layout: "parallel", + status: "PENDING", + requestedBy: user.id, + }); + + const books = await findExportRequestBooks(response.requestIds?.[0].id!); + expect(books).toEqual([ + { requestId: response.requestIds?.[0].id, bookId: 4, chapters: [1, 2] }, + ]); + }); + + test("silently skips invalid chapters for selected books", async () => { + const scenario = await createScenario({ + users: { translator: {} }, + languages: { + language: { + members: [ + { + userId: "translator", + roles: [LanguageMemberRoleRaw.Translator], + }, + ], + }, + }, + }); + const user = scenario.users.translator; + const language = scenario.languages.language; + await logIn(user.id); + + await query( + `insert into book (id, name) values ($1, $2) on conflict (id) do nothing`, + [5, "Test Book"], + ); + await query( + `insert into verse (id, number, book_id, chapter) values ($1, $2, $3, $4) + on conflict (id) do nothing`, + ["5-1-1", 1, 5, 1], + ); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("bookIds", "5"); + formData.set("chapters", "1,2"); + formData.set("layout", "standard"); + + const response = await requestInterlinearExport(formData); + expect(response.state).toBe("success"); + + const requestId = response.requestIds?.[0].id!; + expect(enqueueJob).toHaveBeenCalledWith("export_interlinear_pdf", { + books: [{ bookId: 5, chapters: [1] }], + languageCode: language.code, + layout: "standard", + requestId, + }); + + const books = await findExportRequestBooks(requestId); + expect(books).toEqual([{ requestId, bookId: 5, chapters: [1] }]); + }); + + test("returns a validation error when no chapters match", async () => { + const scenario = await createScenario({ + users: { translator: {} }, + languages: { + language: { + members: [ + { + userId: "translator", + roles: [LanguageMemberRoleRaw.Translator], + }, + ], + }, + }, + }); + const user = scenario.users.translator; + const language = scenario.languages.language; + await logIn(user.id); + + await query( + `insert into book (id, name) values ($1, $2) on conflict (id) do nothing`, + [6, "Test Book"], + ); + await query( + `insert into verse (id, number, book_id, chapter) values ($1, $2, $3, $4) + on conflict (id) do nothing`, + ["6-1-1", 1, 6, 1], + ); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("bookIds", "6"); + formData.set("chapters", "99"); + formData.set("layout", "standard"); + + const response = await requestInterlinearExport(formData); + expect(response).toEqual({ + state: "error", + validation: { + chapters: ["No matching chapters found for the selected books."], + }, + }); + expect(enqueueJob).not.toHaveBeenCalled(); + }); + + test("defaults to all books and chapters when none provided", async () => { + const scenario = await createScenario({ + users: { translator: {} }, + languages: { + language: { + members: [ + { + userId: "translator", + roles: [LanguageMemberRoleRaw.Translator], + }, + ], + }, + }, + }); + const user = scenario.users.translator; + const language = scenario.languages.language; + await logIn(user.id); + + await query( + `insert into book (id, name) values (1, 'Book One'), (2, 'Book Two') + on conflict (id) do nothing`, + [], + ); + await query( + `insert into verse (id, number, book_id, chapter) values + ('1-1-1', 1, 1, 1), + ('1-1-2', 2, 1, 2), + ('2-1-1', 1, 2, 1) + on conflict (id) do nothing`, + [], + ); + + const formData = new FormData(); + formData.set("languageCode", language.code); + formData.set("layout", "standard"); + + const response = await requestInterlinearExport(formData); + expect(response.state).toBe("success"); + expect(response.requestIds).toHaveLength(1); + expect(enqueueJob).toHaveBeenCalledTimes(1); + + const requestIds = response.requestIds ?? []; + const requests = await Promise.all( + requestIds.map((r) => findExportRequest(r.id)), + ); + expect(requests).toEqual( + expect.arrayContaining([ + expect.objectContaining({ bookId: null, chapters: null }), + ]), + ); + + const books = await findExportRequestBooks(requestIds[0].id); + expect(books).toEqual([ + { requestId: requestIds[0].id, bookId: 1, chapters: [1, 2] }, + { requestId: requestIds[0].id, bookId: 2, chapters: [1] }, + ]); + }); +}); diff --git a/src/modules/export/actions/requestInterlinearExport.ts b/src/modules/export/actions/requestInterlinearExport.ts new file mode 100644 index 00000000..c103cc8c --- /dev/null +++ b/src/modules/export/actions/requestInterlinearExport.ts @@ -0,0 +1,238 @@ +"use server"; + +import * as z from "zod"; +import { notFound } from "next/navigation"; +import { getTranslations } from "next-intl/server"; +import { verifySession } from "@/session"; +import Policy from "@/modules/access/public/Policy"; +import { FormState } from "@/components/Form"; +import { serverActionLogger } from "@/server-action"; +import bookQueryService from "../data-access/BookQueryService"; +import exportRequestRepository from "../data-access/ExportRequestRepository"; +import languageLookupQueryService from "../data-access/LanguageLookupQueryService"; +import RequestInterlinearExport, { + ExportLanguageNotFoundError, + NoBooksAvailableForExportError, + NoChaptersAvailableForExportError, +} from "../use-cases/RequestInterlinearExport"; +import { enqueueJob } from "@/shared/jobs/enqueueJob"; +import { ExportLayout } from "../model"; + +const exportPolicy = new Policy({ + systemRoles: [Policy.SystemRole.Admin], + languageRoles: [Policy.LanguageRole.Admin, Policy.LanguageRole.Translator], +}); + +const requestSchema = z.object({ + languageCode: z.string().min(1), + layout: z.enum(["standard", "parallel"]).default("standard"), +}); + +type RequestInterlinearExportResult = FormState & { + requestIds?: { id: string; bookId: number | null }[]; +}; + +const requestInterlinearExportUseCase = new RequestInterlinearExport({ + bookQueryService, + languageLookupQueryService, + exportRequestRepository, + enqueueJob, +}); + +export async function requestInterlinearExport( + arg1: FormState | FormData, + arg2?: FormData, +): Promise { + const formData = arg2 ?? (arg1 as FormData); + const logger = serverActionLogger("requestInterlinearExport"); + const t = await getTranslations("InterlinearExport"); + + const session = await verifySession(); + const userId = session?.user.id; + if (!userId) notFound(); + + const parsed = requestSchema.safeParse( + { + languageCode: formData.get("languageCode"), + layout: formData.get("layout") ?? undefined, + }, + { + errorMap: (error) => { + if (error.path.toString() === "languageCode") { + return { message: t("errors.language_required") }; + } + return { message: t("errors.invalid") }; + }, + }, + ); + + if (!parsed.success) { + logger.error("request parse error"); + return { + state: "error", + validation: parsed.error.flatten().fieldErrors, + }; + } + + const authorized = await exportPolicy.authorize({ + actorId: userId, + languageCode: parsed.data.languageCode, + }); + + if (!authorized) { + logger.error("unauthorized"); + notFound(); + } + + const parsedChapters = parseChapters(formData.get("chapters")); + if (!parsedChapters.success) { + return { + state: "error", + validation: { chapters: [t(`errors.${parsedChapters.errorKey}`)] }, + }; + } + + const parsedBooks = parseBookIds(formData.get("bookIds")); + if (!parsedBooks.success) { + return { + state: "error", + validation: { bookIds: [t(`errors.${parsedBooks.errorKey}`)] }, + }; + } + + try { + const { requestId, bookId } = await requestInterlinearExportUseCase.execute( + { + languageCode: parsed.data.languageCode, + requestedBy: userId, + bookIds: parsedBooks.bookIds, + chapters: parsedChapters.chapters, + layout: parsed.data.layout, + }, + ); + + return { + state: "success", + requestIds: [{ id: requestId, bookId }], + }; + } catch (error) { + if (error instanceof ExportLanguageNotFoundError) { + return { + state: "error", + validation: { languageCode: [t("errors.language_not_found")] }, + }; + } + if (error instanceof NoBooksAvailableForExportError) { + return { + state: "error", + validation: { bookIds: [t("errors.no_books_available")] }, + }; + } + if (error instanceof NoChaptersAvailableForExportError) { + return { + state: "error", + validation: { chapters: [t("errors.no_chapters_available")] }, + }; + } + logger.error({ err: error }, "failed to request export"); + return { state: "error", error: t("errors.export_failed") }; + } +} + +type ChaptersParseErrorKey = + | "chapters_range_invalid" + | "chapters_numeric_or_ranges" + | "chapters_positive" + | "chapters_required_or_blank"; + +function parseChapters( + chapters: FormDataEntryValue | null, +): + | { success: true; chapters: number[] | null } + | { success: false; errorKey: ChaptersParseErrorKey } { + const raw = typeof chapters === "string" ? chapters : ""; + const trimmed = raw.trim(); + if (trimmed.length === 0) { + return { success: true, chapters: null }; + } + + const chaptersList: number[] = []; + const parts = trimmed + .split(",") + .map((part) => part.trim()) + .filter(Boolean); + + for (const part of parts) { + const rangeMatch = part.match(/^([0-9]+)-([0-9]+)$/); + if (rangeMatch) { + const start = Number(rangeMatch[1]); + const end = Number(rangeMatch[2]); + if (start <= 0 || end <= 0 || start > end) { + return { + success: false, + errorKey: "chapters_range_invalid", + }; + } + for (let i = start; i <= end; i += 1) { + chaptersList.push(i); + } + continue; + } + + if (!/^[0-9]+$/.test(part)) { + return { success: false, errorKey: "chapters_numeric_or_ranges" }; + } + const value = Number(part); + if (value <= 0) { + return { + success: false, + errorKey: "chapters_positive", + }; + } + chaptersList.push(value); + } + + if (chaptersList.length === 0) { + return { + success: false, + errorKey: "chapters_required_or_blank", + }; + } + + const unique = Array.from(new Set(chaptersList)).sort((a, b) => a - b); + return { success: true, chapters: unique }; +} + +type BookIdsParseErrorKey = "books_numeric_ids" | "books_required"; + +function parseBookIds( + bookIds: FormDataEntryValue | null, +): + | { success: true; bookIds: number[] | null } + | { success: false; errorKey: BookIdsParseErrorKey } { + const raw = typeof bookIds === "string" ? bookIds : ""; + const parts = raw + .split(",") + .map((part) => part.trim()) + .filter(Boolean); + + if (parts.length === 0) { + return { success: true, bookIds: null }; + } + + const invalid = parts.filter((part) => !/^[0-9]+$/.test(part)); + if (invalid.length > 0) { + return { success: false, errorKey: "books_numeric_ids" }; + } + + const unique = Array.from(new Set(parts.map((part) => Number(part)))).filter( + (id) => id > 0, + ); + if (unique.length === 0) { + return { success: false, errorKey: "books_required" }; + } + + return { success: true, bookIds: unique }; +} + +export default requestInterlinearExport; diff --git a/src/modules/export/data-access/BookQueryService.ts b/src/modules/export/data-access/BookQueryService.ts new file mode 100644 index 00000000..b342e0b8 --- /dev/null +++ b/src/modules/export/data-access/BookQueryService.ts @@ -0,0 +1,41 @@ +import { query } from "@/db"; + +export interface BookRow { + id: number; + name: string; +} + +export interface BookChaptersRow { + bookId: number; + chapters: number[]; +} + +const bookQueryService = { + async findAll(): Promise { + const result = await query( + `select id, name from book order by id asc`, + [], + ); + return result.rows; + }, + + async findChapters(bookIds: number[]): Promise { + if (bookIds.length === 0) return []; + + const result = await query( + ` + select book_id as "bookId", + array_agg(distinct chapter order by chapter) as chapters + from verse + where book_id = any($1) + group by book_id + order by book_id + `, + [bookIds], + ); + + return result.rows; + }, +}; + +export default bookQueryService; diff --git a/src/modules/export/data-access/ExportRequestRepository.ts b/src/modules/export/data-access/ExportRequestRepository.ts new file mode 100644 index 00000000..329281f4 --- /dev/null +++ b/src/modules/export/data-access/ExportRequestRepository.ts @@ -0,0 +1,180 @@ +import { query, transaction } from "@/db"; +import { ExportLayout, ExportRequestStatusRaw } from "../model"; + +export interface ExportBookSelection { + bookId: number; + chapters: number[]; +} + +export interface ExportRequestStatusResult { + id: string; + status: ExportRequestStatusRaw; + bookId: number | null; + downloadUrl: string | null; + expiresAt: Date | null; + languageCode: string; +} + +const exportRequestRepository = { + async createInterlinearRequest({ + requestId, + languageId, + requestedBy, + layout, + books, + }: { + requestId: string; + languageId: string; + requestedBy: string; + layout: ExportLayout; + books: ExportBookSelection[]; + }): Promise { + const singleBook = books.length === 1 ? books[0] : null; + + await transaction(async (q) => { + await q( + ` + insert into export_request ( + id, + language_id, + book_id, + chapters, + layout, + status, + requested_by, + requested_at + ) + values ( + $1, + $2, + $3, + $4, + $5, + $6, + $7, + now() + ) + `, + [ + requestId, + languageId, + singleBook?.bookId ?? null, + singleBook?.chapters ?? null, + layout, + ExportRequestStatusRaw.Pending, + requestedBy, + ], + ); + + for (const book of books) { + await q( + ` + insert into export_request_book (request_id, book_id, chapters) + values ($1, $2, $3) + `, + [requestId, book.bookId, book.chapters], + ); + } + }); + }, + + async findStatus( + requestId: string, + ): Promise { + const result = await query( + ` + select er.id, + er.status, + er.book_id as "bookId", + er.download_url as "downloadUrl", + er.expires_at as "expiresAt", + l.code as "languageCode" + from export_request er + join language l on l.id = er.language_id + where er.id = $1 + limit 1 + `, + [requestId], + ); + return result.rows[0]; + }, + + async loadBooks( + requestId: string, + ): Promise<{ bookId: number; chapters: number[] }[]> { + const result = await query<{ bookId: number; chapters: number[] }>( + ` + select erb.book_id as "bookId", + erb.chapters + from export_request_book erb + where erb.request_id = $1 + order by erb.book_id + `, + [requestId], + ); + return result.rows; + }, + + async markInProgress({ + requestId, + jobId, + exportKey, + }: { + requestId: string; + jobId: string; + exportKey: string; + }): Promise<{ exportKey: string }> { + const result = await query<{ exportKey: string }>( + ` + update export_request + set status = $1, + job_id = $2, + export_key = coalesce(export_key, $3) + where id = $4 + returning export_key as "exportKey" + `, + [ExportRequestStatusRaw.InProgress, jobId, exportKey, requestId], + ); + const row = result.rows[0]; + if (!row?.exportKey) { + throw new Error(`export request ${requestId} not found`); + } + return row; + }, + + async markComplete({ + requestId, + downloadUrl, + expiresAt, + }: { + requestId: string; + downloadUrl: string; + expiresAt: Date; + }): Promise { + await query( + ` + update export_request + set status = $1, + download_url = $2, + expires_at = $3, + completed_at = now() + where id = $4 + `, + [ExportRequestStatusRaw.Complete, downloadUrl, expiresAt, requestId], + ); + }, + + async markFailed(requestId: string): Promise { + await query( + ` + update export_request + set status = $1, + completed_at = now() + where id = $2 + `, + [ExportRequestStatusRaw.Failed, requestId], + ); + }, +}; + +export default exportRequestRepository; diff --git a/src/modules/export/data-access/ExportStorageRepository.ts b/src/modules/export/data-access/ExportStorageRepository.ts new file mode 100644 index 00000000..00744ad4 --- /dev/null +++ b/src/modules/export/data-access/ExportStorageRepository.ts @@ -0,0 +1,96 @@ +import { Upload } from "@aws-sdk/lib-storage"; +import { GetObjectCommand, DeleteObjectCommand } from "@aws-sdk/client-s3"; +import { Readable } from "stream"; +import { createLogger } from "@/logging"; +import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; +import { getS3Client, s3BodyToUint8Array } from "@/shared/s3"; + +const EXPORT_BUCKET_PREFIX = process.env.EXPORT_BUCKET_PREFIX ?? "gbt-exports"; + +const s3Client = getS3Client(); + +export interface ExportStorageOptions { + environment: "prod" | "local"; +} + +export const exportStorageRepository = { + async uploadPdf({ + environment, + key, + stream, + }: ExportStorageOptions & { + key: string; + stream: Readable; + }): Promise { + const bucket = `${EXPORT_BUCKET_PREFIX}-${environment}`; + const logger = createLogger({ bucket, key }); + + const upload = new Upload({ + client: s3Client, + params: { + Bucket: bucket, + Key: key, + Body: stream, + ContentType: "application/pdf", + }, + }); + + await upload.done(); + logger.info("Export PDF uploaded"); + + return `s3://${bucket}/${key}`; + }, + + bucketName(environment: "prod" | "local") { + return `${EXPORT_BUCKET_PREFIX}-${environment}`; + }, + + async presignPdf({ + environment, + key, + expiresInSeconds, + }: ExportStorageOptions & { + key: string; + expiresInSeconds: number; + }): Promise { + const bucket = `${EXPORT_BUCKET_PREFIX}-${environment}`; + const command = new GetObjectCommand({ Bucket: bucket, Key: key }); + const url = await getSignedUrl(s3Client, command, { + expiresIn: expiresInSeconds, + }); + const publicEndpoint = process.env.EXPORT_PUBLIC_S3_ENDPOINT; + if (publicEndpoint) { + try { + const parsed = new URL(url); + const target = new URL(publicEndpoint); + parsed.protocol = target.protocol; + parsed.host = target.host; + return parsed.toString(); + } catch { + return url; + } + } + return url; + }, + + async fetchBuffer({ + environment, + key, + }: ExportStorageOptions & { key: string }): Promise { + const bucket = `${EXPORT_BUCKET_PREFIX}-${environment}`; + const res = await s3Client.send( + new GetObjectCommand({ Bucket: bucket, Key: key }), + ); + return s3BodyToUint8Array(res.Body); + }, + + async deleteObject({ + environment, + key, + }: ExportStorageOptions & { key: string }): Promise { + const bucket = `${EXPORT_BUCKET_PREFIX}-${environment}`; + await s3Client.send(new DeleteObjectCommand({ Bucket: bucket, Key: key })); + }, +}; + +export default exportStorageRepository; diff --git a/src/modules/export/data-access/InterlinearCoverageQueryService.test.ts b/src/modules/export/data-access/InterlinearCoverageQueryService.test.ts new file mode 100644 index 00000000..7930e418 --- /dev/null +++ b/src/modules/export/data-access/InterlinearCoverageQueryService.test.ts @@ -0,0 +1,109 @@ +import { initializeDatabase } from "@/tests/vitest/dbUtils"; +import { describe, expect, test } from "vitest"; +import { createScenario } from "@/tests/scenarios"; +import { query } from "@/db"; +import interlinearCoverageQueryService from "./InterlinearCoverageQueryService"; + +initializeDatabase(); + +describe("interlinearCoverageQueryService", () => { + test("returns chapters with approved glosses only", async () => { + const scenario = await createScenario({ + languages: { language: {} }, + }); + const language = scenario.languages.language; + + await query( + `insert into book (id, name) values (1, 'Book One'), (2, 'Book Two')`, + [], + ); + await query( + ` + insert into verse (id, number, book_id, chapter) + values + ('1-1-1', 1, 1, 1), + ('1-2-1', 1, 1, 2), + ('2-1-1', 1, 2, 1) + `, + [], + ); + await query(`insert into lemma (id) values ('l1')`, []); + await query( + `insert into lemma_form (id, grammar, lemma_id) values ('f1', 'g', 'l1')`, + [], + ); + await query( + ` + insert into word (id, text, verse_id, form_id) + values + ('w1', 'a', '1-1-1', 'f1'), + ('w2', 'b', '1-2-1', 'f1'), + ('w3', 'c', '2-1-1', 'f1') + `, + [], + ); + + const phrase1 = await query<{ id: number }>( + `insert into phrase (language_id, created_at) values ($1, now()) returning id`, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase1.rows[0].id, "w1"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["hello", "APPROVED", phrase1.rows[0].id], + ); + + const phrase2 = await query<{ id: number }>( + `insert into phrase (language_id, created_at) values ($1, now()) returning id`, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase2.rows[0].id, "w2"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["hello", "APPROVED", phrase2.rows[0].id], + ); + + const phrase3 = await query<{ id: number }>( + `insert into phrase (language_id, created_at) values ($1, now()) returning id`, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase3.rows[0].id, "w3"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["draft", "UNAPPROVED", phrase3.rows[0].id], + ); + + const phrase4 = await query<{ id: number }>( + ` + insert into phrase (language_id, created_at, deleted_at) + values ($1, now(), now()) + returning id + `, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase4.rows[0].id, "w3"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["deleted", "APPROVED", phrase4.rows[0].id], + ); + + const result = + await interlinearCoverageQueryService.findApprovedGlossChapters( + language.id, + ); + + expect(result).toEqual([{ bookId: 1, chapters: [1, 2] }]); + }); +}); diff --git a/src/modules/export/data-access/InterlinearCoverageQueryService.ts b/src/modules/export/data-access/InterlinearCoverageQueryService.ts new file mode 100644 index 00000000..5aeab9cd --- /dev/null +++ b/src/modules/export/data-access/InterlinearCoverageQueryService.ts @@ -0,0 +1,38 @@ +import { query } from "@/db"; + +export interface BookChaptersRow { + bookId: number; + chapters: number[]; +} + +const interlinearCoverageQueryService = { + async findApprovedGlossChapters( + languageId: string, + ): Promise { + const result = await query( + ` + select + v.book_id as "bookId", + array_agg(distinct v.chapter order by v.chapter) as chapters + from phrase ph + join gloss g + on g.phrase_id = ph.id + and g.state = 'APPROVED' + join phrase_word phw + on phw.phrase_id = ph.id + join word w + on w.id = phw.word_id + join verse v + on v.id = w.verse_id + where ph.language_id = $1 + and ph.deleted_at is null + group by v.book_id + order by v.book_id + `, + [languageId], + ); + return result.rows; + }, +}; + +export default interlinearCoverageQueryService; diff --git a/src/modules/export/data-access/InterlinearQueryService.ts b/src/modules/export/data-access/InterlinearQueryService.ts new file mode 100644 index 00000000..b8c154b4 --- /dev/null +++ b/src/modules/export/data-access/InterlinearQueryService.ts @@ -0,0 +1,112 @@ +import { query } from "@/db"; +import { TextDirectionRaw } from "@/modules/languages/model"; + +export interface InterlinearWord { + id: string; + text: string; + gloss?: string; + linkedWords?: string[]; + lemma: string; + grammar: string; + footnote?: string; +} + +export interface InterlinearVerse { + id: string; + number: number; + words: InterlinearWord[]; +} + +export interface InterlinearChapterResult { + language: { + id: string; + code: string; + name: string; + textDirection: TextDirectionRaw; + }; + verses: InterlinearVerse[]; +} + +const interlinearQueryService = { + async fetchChapters( + bookId: number, + chapters: number[], + languageCode: string, + ): Promise { + const languageResult = await query<{ + id: string; + code: string; + name: string; + textDirection: TextDirectionRaw; + }>( + ` + select id, code, name, text_direction as "textDirection" + from language + where code = $1 + `, + [languageCode], + ); + + const language = languageResult.rows[0]; + if (!language) { + throw new Error(`Language ${languageCode} not found`); + } + + const versesResult = await query( + ` + SELECT + v.id, + v.number, + words.words + FROM verse AS v + JOIN LATERAL ( + SELECT json_agg(json_strip_nulls(json_build_object( + 'id', w.id, + 'text', w.text, + 'gloss', g.gloss, + 'linkedWords', ph.linked_words, + 'footnote', fn.content, + 'lemma', lf.lemma_id, + 'grammar', lf.grammar + )) ORDER BY w.id) AS words + FROM word AS w + LEFT JOIN LATERAL ( + SELECT ph.id, wds.words AS linked_words FROM phrase_word AS phw + JOIN phrase AS ph ON ph.id = phw.phrase_id + LEFT JOIN LATERAL ( + SELECT array_agg(phw2.word_id) AS words FROM phrase_word AS phw2 + WHERE phw2.phrase_id = ph.id + AND phw2.word_id != phw.word_id + GROUP BY phw2.phrase_id + ) AS wds ON true + WHERE phw.word_id = w.id + AND ph.deleted_at IS NULL + AND ph.language_id = (SELECT id FROM language WHERE code = $3) + ) AS ph ON true + LEFT JOIN gloss AS g ON g.phrase_id = ph.id AND g.state = 'APPROVED' + LEFT JOIN footnote AS fn ON fn.phrase_id = ph.id + JOIN lemma_form AS lf ON lf.id = w.form_id + WHERE w.verse_id = v.id + ) AS words ON true + WHERE v.book_id = $1 AND v.chapter = ANY($2) + ORDER BY v.chapter, v.number + `, + [bookId, chapters, languageCode], + ); + + return { + language, + verses: versesResult.rows, + }; + }, + + async fetchChapter( + bookId: number, + chapter: number, + languageCode: string, + ): Promise { + return this.fetchChapters(bookId, [chapter], languageCode); + }, +}; + +export default interlinearQueryService; diff --git a/src/modules/export/data-access/LanguageLookupQueryService.ts b/src/modules/export/data-access/LanguageLookupQueryService.ts new file mode 100644 index 00000000..ebc2f636 --- /dev/null +++ b/src/modules/export/data-access/LanguageLookupQueryService.ts @@ -0,0 +1,29 @@ +import { query } from "@/db"; +import { TextDirectionRaw } from "@/modules/languages/model"; + +export interface ExportLanguageRow { + id: string; + code: string; + name: string; + textDirection: TextDirectionRaw; +} + +const languageLookupQueryService = { + async findByCode(code: string): Promise { + const result = await query( + ` + select id, + code, + name, + text_direction as "textDirection" + from language + where code = $1 + limit 1 + `, + [code], + ); + return result.rows[0]; + }, +}; + +export default languageLookupQueryService; diff --git a/src/modules/export/jobs/cleanupExportsJob.ts b/src/modules/export/jobs/cleanupExportsJob.ts new file mode 100644 index 00000000..7f04549e --- /dev/null +++ b/src/modules/export/jobs/cleanupExportsJob.ts @@ -0,0 +1,74 @@ +import { logger } from "@/logging"; +import { query } from "@/db"; +import exportStorageRepository from "@/modules/export/data-access/ExportStorageRepository"; +import { Job } from "@/shared/jobs/model"; +import { EXPORT_JOB_TYPES } from "./jobTypes"; +import { getStorageEnvironment } from "@/shared/storageEnvironment"; + +const DEFAULT_EXPIRY_FALLBACK_DAYS = 7; + +function getExpiryFallbackDays(): number { + const raw = process.env.EXPORT_EXPIRY_FALLBACK_DAYS; + if (!raw) return DEFAULT_EXPIRY_FALLBACK_DAYS; + + const parsed = Number.parseInt(raw, 10); + if (!Number.isFinite(parsed) || parsed <= 0) { + return DEFAULT_EXPIRY_FALLBACK_DAYS; + } + return parsed; +} + +export async function cleanupExportsJob(job: Job) { + const jobLogger = logger.child({ + jobId: job.id, + jobType: EXPORT_JOB_TYPES.CLEANUP_EXPORTS, + }); + const environment = getStorageEnvironment(); + const expiryFallbackDays = getExpiryFallbackDays(); + + const expiredKeys = await query<{ + exportKey: string; + }>( + `with expired as ( + select id, export_key + from export_request + where export_key is not null + and ( + (expires_at is not null and expires_at < now()) + or (expires_at is null and requested_at < now() - ($1 * interval '1 day')) + ) + ) + update export_request er + set download_url = null, + export_key = null + from expired e + where er.id = e.id + returning e.export_key as "exportKey"`, + [expiryFallbackDays], + ); + + for (const row of expiredKeys.rows) { + try { + await exportStorageRepository.deleteObject({ + environment, + key: row.exportKey, + }); + jobLogger.info({ key: row.exportKey }, "Deleted expired export"); + } catch (error) { + jobLogger.error( + { err: error, key: row.exportKey }, + "Failed deleting expired export", + ); + } + } + + await query( + `delete from export_request + where coalesce(expires_at, requested_at) < now() - ($1 * interval '1 day')`, + [expiryFallbackDays], + ); + + return { deleted: expiredKeys.rows.length }; +} + +export default cleanupExportsJob; diff --git a/src/modules/export/jobs/cleanupExportsJob.unit.ts b/src/modules/export/jobs/cleanupExportsJob.unit.ts new file mode 100644 index 00000000..3b6e3629 --- /dev/null +++ b/src/modules/export/jobs/cleanupExportsJob.unit.ts @@ -0,0 +1,144 @@ +import "@/tests/vitest/mocks/nextjs"; +import { initializeDatabase } from "@/tests/vitest/dbUtils"; +import { beforeEach, describe, expect, test, vi } from "vitest"; +import cleanupExportsJob from "./cleanupExportsJob"; +import { query } from "@/db"; +import { createScenario } from "@/tests/scenarios"; +import { ulid } from "@/shared/ulid"; +import { JobStatus } from "@/shared/jobs/model"; +import { EXPORT_JOB_TYPES } from "./jobTypes"; + +const { mockDeleteObject } = vi.hoisted(() => ({ + mockDeleteObject: vi.fn(), +})); + +vi.mock("@/modules/export/data-access/ExportStorageRepository", () => { + const repo = { + deleteObject: mockDeleteObject, + uploadPdf: vi.fn(), + presignPdf: vi.fn(), + fetchBuffer: vi.fn(), + bucketName: vi.fn(), + }; + return { __esModule: true, exportStorageRepository: repo, default: repo }; +}); + +initializeDatabase(); + +async function insertExportRequest({ + id = ulid(), + languageId, + requestedBy, + exportKey, + downloadUrl, + expiresAt, + requestedAt = new Date(), + bookId = null, + chapters = [1], +}: { + id?: string; + languageId: string; + requestedBy: string; + exportKey: string | null; + downloadUrl: string | null; + expiresAt: Date | null; + requestedAt?: Date; + bookId?: number | null; + chapters?: number[]; +}): Promise { + await query( + `insert into export_request ( + id, language_id, book_id, chapters, layout, status, requested_by, requested_at, export_key, download_url, expires_at + ) values ($1, $2, $3, $4, 'standard', 'COMPLETE', $5, $6, $7, $8, $9)`, + [ + id, + languageId, + bookId, + chapters, + requestedBy, + requestedAt, + exportKey, + downloadUrl, + expiresAt, + ], + ); + return id; +} + +describe("cleanupExportsJob", () => { + beforeEach(async () => { + mockDeleteObject.mockReset(); + await query(`delete from export_request`, []); + }); + + test("removes expired exports from storage and clears DB fields", async () => { + const scenario = await createScenario({ + users: { user: {} }, + languages: { language: {} }, + }); + const language = scenario.languages.language; + const user = scenario.users.user; + + const exportId = await insertExportRequest({ + languageId: language.id, + requestedBy: user.id, + exportKey: "path/to/export.pdf", + downloadUrl: "https://example.com/export.pdf", + expiresAt: new Date(Date.now() - 1000), + }); + + await cleanupExportsJob({ + id: "job-1", + type: EXPORT_JOB_TYPES.CLEANUP_EXPORTS, + payload: undefined, + createdAt: new Date(), + status: JobStatus.Pending, + updatedAt: new Date(), + }); + + expect(mockDeleteObject).toHaveBeenCalledWith( + expect.objectContaining({ key: "path/to/export.pdf" }), + ); + const result = await query( + `select export_key, download_url from export_request where id = $1`, + [exportId], + ); + expect(result.rows[0]).toEqual({ export_key: null, download_url: null }); + }); + + test("uses fallback when expires_at is missing", async () => { + const scenario = await createScenario({ + users: { user: {} }, + languages: { language: {} }, + }); + const language = scenario.languages.language; + const user = scenario.users.user; + const oldDate = new Date(Date.now() - 8 * 24 * 60 * 60 * 1000); + const exportId = await insertExportRequest({ + languageId: language.id, + requestedBy: user.id, + exportKey: "path/to/stale.pdf", + downloadUrl: "https://example.com/stale.pdf", + expiresAt: null, + requestedAt: oldDate, + }); + + await cleanupExportsJob({ + id: "job-2", + type: EXPORT_JOB_TYPES.CLEANUP_EXPORTS, + payload: undefined, + createdAt: new Date(), + status: JobStatus.Pending, + updatedAt: new Date(), + }); + + expect(mockDeleteObject).toHaveBeenCalledWith( + expect.objectContaining({ key: "path/to/stale.pdf" }), + ); + const remaining = await query( + `select count(*)::int as count from export_request where id = $1`, + [exportId], + ); + expect(remaining.rows[0].count).toBe(0); + }); +}); diff --git a/src/modules/export/jobs/exportInterlinearMerge.ts b/src/modules/export/jobs/exportInterlinearMerge.ts new file mode 100644 index 00000000..b2682453 --- /dev/null +++ b/src/modules/export/jobs/exportInterlinearMerge.ts @@ -0,0 +1,47 @@ +import { PDFDocument } from "pdf-lib"; +import exportStorageRepository from "@/modules/export/data-access/ExportStorageRepository"; +import { Readable } from "stream"; + +export async function mergePdfs({ + environment, + partKeys, + targetKey, +}: { + environment: "prod" | "local"; + partKeys: string[]; + targetKey: string; +}): Promise<{ uploaded: boolean; pages: number }> { + const uniquePartKeys = Array.from(new Set(partKeys)); + const mergedPdf = await PDFDocument.create(); + let mergedPages = 0; + + for (const key of uniquePartKeys) { + const bytes = await exportStorageRepository.fetchBuffer({ + environment, + key, + }); + if (!bytes || bytes.byteLength === 0) continue; + const partPdf = await PDFDocument.load(bytes); + const copiedPages = await mergedPdf.copyPages( + partPdf, + partPdf.getPageIndices(), + ); + copiedPages.forEach((p) => mergedPdf.addPage(p)); + mergedPages += copiedPages.length; + } + + if (mergedPages === 0) { + return { uploaded: false, pages: 0 }; + } + + const mergedBytes = await mergedPdf.save(); + await exportStorageRepository.uploadPdf({ + environment, + key: targetKey, + stream: Readable.from([mergedBytes]), + }); + + return { uploaded: true, pages: mergedPages }; +} + +export default mergePdfs; diff --git a/src/modules/export/jobs/exportInterlinearMerge.unit.ts b/src/modules/export/jobs/exportInterlinearMerge.unit.ts new file mode 100644 index 00000000..0ce14213 --- /dev/null +++ b/src/modules/export/jobs/exportInterlinearMerge.unit.ts @@ -0,0 +1,123 @@ +import { mergePdfs } from "./exportInterlinearMerge"; +import { describe, expect, it, vi, beforeEach } from "vitest"; +import { PDFDocument } from "pdf-lib"; +import { Readable } from "stream"; + +const { mockFetchBuffer, mockUploadPdf } = vi.hoisted(() => { + return { + mockFetchBuffer: vi.fn(), + mockUploadPdf: vi.fn(), + }; +}); + +vi.mock("@/modules/export/data-access/ExportStorageRepository", () => { + const repo = { + fetchBuffer: mockFetchBuffer, + uploadPdf: mockUploadPdf, + deleteObject: vi.fn(), + presignPdf: vi.fn(), + bucketName: vi.fn(), + }; + return { __esModule: true, exportStorageRepository: repo, default: repo }; +}); + +describe("mergePdfs", () => { + beforeEach(() => { + mockFetchBuffer.mockReset(); + mockUploadPdf.mockReset(); + }); + + it("merges available parts in order", async () => { + const partOne = await createPdfWithPages(1); + const partTwo = await createPdfWithPages(2); + mockFetchBuffer + .mockResolvedValueOnce(partOne) + .mockResolvedValueOnce(partTwo); + + let uploadedBytes: Uint8Array | undefined; + mockUploadPdf.mockImplementation(async ({ stream }) => { + uploadedBytes = await streamToBuffer(stream as Readable); + return "s3://bucket/final.pdf"; + }); + + const result = await mergePdfs({ + environment: "local", + partKeys: ["part-a.pdf", "part-b.pdf"], + targetKey: "final.pdf", + }); + + expect(result).toEqual({ uploaded: true, pages: 3 }); + expect(mockUploadPdf).toHaveBeenCalledWith( + expect.objectContaining({ + environment: "local", + key: "final.pdf", + }), + ); + expect(mockFetchBuffer).toHaveBeenCalledTimes(2); + expect(uploadedBytes).toBeDefined(); + + const merged = await PDFDocument.load(uploadedBytes!); + expect(merged.getPageCount()).toBe(3); + }); + + it("skips missing parts without uploading", async () => { + mockFetchBuffer.mockResolvedValueOnce(undefined); + + const result = await mergePdfs({ + environment: "local", + partKeys: ["missing.pdf"], + targetKey: "final.pdf", + }); + + expect(result).toEqual({ uploaded: false, pages: 0 }); + expect(mockUploadPdf).not.toHaveBeenCalled(); + }); + + it("deduplicates repeated part keys while preserving order", async () => { + const partOne = await createPdfWithPages(1); + const partTwo = await createPdfWithPages(1); + mockFetchBuffer + .mockResolvedValueOnce(partOne) + .mockResolvedValueOnce(partTwo); + + const result = await mergePdfs({ + environment: "local", + partKeys: ["part-a.pdf", "part-a.pdf", "part-b.pdf", "part-a.pdf"], + targetKey: "final.pdf", + }); + + expect(result).toEqual({ uploaded: true, pages: 2 }); + expect(mockFetchBuffer).toHaveBeenCalledTimes(2); + expect(mockFetchBuffer.mock.calls[0][0]).toEqual( + expect.objectContaining({ key: "part-a.pdf" }), + ); + expect(mockFetchBuffer.mock.calls[1][0]).toEqual( + expect.objectContaining({ key: "part-b.pdf" }), + ); + }); +}); + +async function createPdfWithPages(count: number): Promise { + const pdf = await PDFDocument.create(); + for (let i = 0; i < count; i++) { + pdf.addPage(); + } + const bytes = await pdf.save(); + return bytes; +} + +async function streamToBuffer(stream: Readable): Promise { + const parts: Uint8Array[] = []; + for await (const chunk of stream) { + parts.push(chunk instanceof Uint8Array ? chunk : Uint8Array.from(chunk)); + } + const merged = new Uint8Array( + parts.reduce((sum, p) => sum + p.byteLength, 0), + ); + let offset = 0; + for (const part of parts) { + merged.set(part, offset); + offset += part.byteLength; + } + return merged; +} diff --git a/src/modules/export/jobs/exportInterlinearPdfJob.ts b/src/modules/export/jobs/exportInterlinearPdfJob.ts new file mode 100644 index 00000000..d36c115d --- /dev/null +++ b/src/modules/export/jobs/exportInterlinearPdfJob.ts @@ -0,0 +1,182 @@ +import { Job } from "@/shared/jobs/model"; +import { logger } from "@/logging"; +import interlinearQueryService from "@/modules/export/data-access/InterlinearQueryService"; +import { generateInterlinearPdf } from "@/modules/export/pdf/InterlinearPdfGenerator"; +import { ExportLayout } from "@/modules/export/model"; +import exportStorageRepository from "@/modules/export/data-access/ExportStorageRepository"; +import type { Logger } from "pino"; +import { detectScript } from "@/shared/scriptDetection"; +import bookQueryService from "@/modules/export/data-access/BookQueryService"; +import mergePdfs from "./exportInterlinearMerge"; +import exportRequestRepository from "../data-access/ExportRequestRepository"; +import { EXPORT_JOB_TYPES } from "./jobTypes"; +import { getStorageEnvironment } from "@/shared/storageEnvironment"; + +interface ExportInterlinearPayload { + requestId: string; + languageCode: string; + books: { bookId: number; chapters: number[] }[]; + layout: ExportLayout; + exportKey?: string; +} + +export async function exportInterlinearPdfJob( + job: Job, +): Promise<{ url?: string; expiresAt?: Date }> { + const jobLogger = logger.child({ jobId: job.id, jobType: job.type }); + + const environment = getStorageEnvironment(); + + const { + languageCode, + books, + layout, + requestId, + exportKey: incomingExportKey, + } = job.payload; + + if (job.type !== EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF) { + jobLogger.error( + `received job type ${job.type}, expected ${EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF}`, + ); + throw new Error( + `Expected job type ${EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF}, but received ${job.type}`, + ); + } + + const partKeys: string[] = []; + try { + const stableExportKey = + incomingExportKey ?? `interlinear/${languageCode}/${requestId}.pdf`; + const { exportKey } = await exportRequestRepository.markInProgress({ + requestId, + jobId: job.id, + exportKey: stableExportKey, + }); + + const booksPayload = + books && books.length > 0 ? + books + : await exportRequestRepository.loadBooks(requestId); + + if (booksPayload.length === 0) { + throw new Error("No chapters found for export"); + } + + const booksById = new Map( + (await bookQueryService.findAll()).map((book) => [book.id, book.name]), + ); + + for (const { bookId, chapters } of booksPayload) { + if (!chapters.length) continue; + + const bookName = booksById.get(bookId) ?? `Book ${bookId}`; + + const chapterData = await interlinearQueryService.fetchChapters( + bookId, + chapters, + languageCode, + ); + + const sampleText = + chapterData.verses?.[0]?.words?.[0]?.text ?? + chapterData.verses?.[0]?.words?.[0]?.gloss ?? + ""; + const sourceScript = detectScript(sampleText); + + const glossLanguageName = chapterData.language.name; + const titleLayout = layout === "parallel" ? "Parallel" : "Interlinear"; + const sourceLanguageLabel = + sourceScript === "hebrew" ? "Hebrew" + : sourceScript === "greek" ? "Greek" + : "Original"; + const chapterLabel = + chapters.length === 1 ? + `Chapter ${chapters[0]}` + : `Chapters ${chapters[0]}-${chapters[chapters.length - 1]}`; + + const { stream } = generateInterlinearPdf(chapterData, { + layout, + pageSize: "letter", + direction: chapterData.language.textDirection, + sourceScript, + header: { + title: `${glossLanguageName}/${sourceLanguageLabel} ${titleLayout}`, + subtitle: `${bookName} - ${chapterLabel}`, + }, + footer: { + generatedAt: job.createdAt ?? new Date(), + pageOffset: 0, + }, + }); + + const partKey = partKeyForBook(exportKey, bookId); + await exportStorageRepository.uploadPdf({ + environment, + key: partKey, + stream, + }); + partKeys.push(partKey); + } + + const mergeResult = await mergePdfs({ + environment, + partKeys, + targetKey: exportKey, + }); + if (!mergeResult.uploaded) { + throw new Error("No PDF parts available to merge"); + } + + const presigned = await exportStorageRepository.presignPdf({ + environment, + key: exportKey, + expiresInSeconds: 60 * 60 * 24, + }); + const expiresAt = new Date(Date.now() + 24 * 60 * 60 * 1000); + + await exportRequestRepository.markComplete({ + requestId, + downloadUrl: presigned, + expiresAt, + }); + + jobLogger.info({ url: presigned }, "Interlinear PDF export complete"); + + return { url: presigned, expiresAt }; + } catch (error) { + jobLogger.error({ err: error }, "Interlinear PDF export failed"); + await exportRequestRepository.markFailed(requestId); + throw error; + } finally { + if (partKeys.length > 0) { + await cleanupParts(partKeys, environment, jobLogger); + } + } +} + +export default exportInterlinearPdfJob; + +function partKeyForBook(exportKey: string, bookId: number): string { + const base = exportKey.replace(/\.pdf$/i, ""); + return `${base}-book-${bookId}.pdf`; +} + +async function cleanupParts( + partKeys: string[], + environment: "prod" | "local", + jobLogger: Logger, +) { + await Promise.all( + partKeys.map(async (key) => { + try { + await exportStorageRepository.deleteObject({ environment, key }); + } catch (error) { + jobLogger.warn( + { err: error, key }, + "Failed to delete part after merge", + ); + } + }), + ); +} diff --git a/src/modules/export/jobs/exportInterlinearPdfJob.unit.ts b/src/modules/export/jobs/exportInterlinearPdfJob.unit.ts new file mode 100644 index 00000000..90079040 --- /dev/null +++ b/src/modules/export/jobs/exportInterlinearPdfJob.unit.ts @@ -0,0 +1,157 @@ +import { Readable } from "stream"; +import { describe, expect, it, beforeEach, afterEach, vi } from "vitest"; +import { exportInterlinearPdfJob } from "./exportInterlinearPdfJob"; +import { PDFDocument } from "pdf-lib"; +import { JobStatus } from "@/shared/jobs/model"; +import { EXPORT_JOB_TYPES } from "./jobTypes"; + +const { + mockQuery, + mockFetchChapters, + mockUploadPdf, + mockPresignPdf, + mockDeleteObject, + mockFetchBuffer, + mockMergePdfs, + mockGenerateInterlinearPdf, +} = vi.hoisted(() => { + return { + mockQuery: vi.fn(), + mockFetchChapters: vi.fn(), + mockUploadPdf: vi.fn(), + mockPresignPdf: vi.fn(), + mockDeleteObject: vi.fn(), + mockFetchBuffer: vi.fn(), + mockMergePdfs: vi.fn(), + mockGenerateInterlinearPdf: vi.fn(), + }; +}); + +vi.mock("@/db", () => ({ query: mockQuery })); +vi.mock("@/modules/export/data-access/InterlinearQueryService", () => { + return { + __esModule: true, + default: { + fetchChapters: mockFetchChapters, + }, + }; +}); +vi.mock("@/modules/export/data-access/ExportStorageRepository", () => { + const repo = { + uploadPdf: mockUploadPdf, + presignPdf: mockPresignPdf, + deleteObject: mockDeleteObject, + fetchBuffer: mockFetchBuffer, + }; + return { __esModule: true, exportStorageRepository: repo, default: repo }; +}); +vi.mock("./exportInterlinearMerge", () => ({ + __esModule: true, + default: mockMergePdfs, + mergePdfs: mockMergePdfs, +})); +vi.mock("@/modules/export/pdf/InterlinearPdfGenerator", () => ({ + generateInterlinearPdf: mockGenerateInterlinearPdf, +})); + +const baseJob = { + id: "job-1", + createdAt: new Date(), + updatedAt: new Date(), + status: JobStatus.Pending, + type: EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF, + payload: { + requestId: "req-1", + languageCode: "spa", + books: [{ bookId: 1, chapters: [1, 2] }], + layout: "standard" as const, + }, +}; + +async function createPdfWithPages(count: number) { + const pdf = await PDFDocument.create(); + for (let i = 0; i < count; i += 1) { + pdf.addPage(); + } + return Buffer.from(await pdf.save()); +} + +describe("exportInterlinearPdfJob", () => { + beforeEach(async () => { + vi.useFakeTimers(); + mockQuery.mockReset(); + mockUploadPdf.mockReset(); + mockPresignPdf.mockReset(); + mockDeleteObject.mockReset(); + mockFetchBuffer.mockReset(); + mockMergePdfs.mockReset(); + mockGenerateInterlinearPdf.mockReset(); + + const partBytes = await createPdfWithPages(1); + + mockQuery.mockImplementation(async (text: string, params: any[]) => { + if (text.includes("returning export_key")) { + return { rows: [{ exportKey: params[2] }] }; + } + if (text.includes("select id, name from book")) { + return { rows: [{ id: 1, name: "Genesis" }] }; + } + return { rows: [] }; + }); + mockFetchChapters.mockResolvedValue({ + language: { textDirection: "LTR", name: "Test Language" }, + verses: [ + { chapter: 1, number: 1, words: [{ text: "a", gloss: "a" }] }, + { chapter: 2, number: 1, words: [{ text: "b", gloss: "b" }] }, + ], + }); + mockGenerateInterlinearPdf.mockImplementation(() => ({ + stream: Readable.from([partBytes]), + })); + mockPresignPdf.mockResolvedValue("https://example.com/final.pdf"); + mockMergePdfs.mockResolvedValue({ uploaded: true, pages: 3 }); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it("processes multi-chapter exports in one job and merges parts in order", async () => { + await exportInterlinearPdfJob(baseJob); + + expect(mockUploadPdf).toHaveBeenCalledTimes(baseJob.payload.books.length); + + const uploadedKeys = mockUploadPdf.mock.calls.map((c) => c[0]?.key); + uploadedKeys.forEach((key) => expect(key).toMatch(/-book-1\.pdf$/)); + + expect(mockMergePdfs).toHaveBeenCalledWith( + expect.objectContaining({ + partKeys: uploadedKeys, + }), + ); + expect(mockPresignPdf).toHaveBeenCalledWith( + expect.objectContaining({ key: "interlinear/spa/req-1.pdf" }), + ); + expect(mockDeleteObject).toHaveBeenCalledTimes(uploadedKeys.length); + }); + + it("marks requests as failed and cleans up parts on errors", async () => { + mockUploadPdf.mockImplementationOnce(async () => { + throw new Error("upload failed"); + }); + + const failingJob = { + ...baseJob, + payload: { + ...baseJob.payload, + books: [{ bookId: 1, chapters: [1] }], + }, + }; + + await expect(exportInterlinearPdfJob(failingJob)).rejects.toThrow(); + + const failureUpdate = mockQuery.mock.calls[mockQuery.mock.calls.length - 1]; + expect(failureUpdate[1]).toEqual(["FAILED", failingJob.payload.requestId]); + expect(mockDeleteObject).not.toHaveBeenCalled(); + }); +}); diff --git a/src/modules/export/jobs/jobTypes.ts b/src/modules/export/jobs/jobTypes.ts new file mode 100644 index 00000000..a46f3546 --- /dev/null +++ b/src/modules/export/jobs/jobTypes.ts @@ -0,0 +1,4 @@ +export const EXPORT_JOB_TYPES = { + EXPORT_INTERLINEAR_PDF: "export_interlinear_pdf", + CLEANUP_EXPORTS: "cleanup_exports", +}; diff --git a/src/modules/export/model.ts b/src/modules/export/model.ts new file mode 100644 index 00000000..c432f6e5 --- /dev/null +++ b/src/modules/export/model.ts @@ -0,0 +1,29 @@ +export type ExportLayout = "standard" | "parallel"; + +export enum ExportRequestStatusRaw { + Pending = "PENDING", + InProgress = "IN_PROGRESS", + Complete = "COMPLETE", + Failed = "FAILED", +} + +export type ExportRequestStatus = + | ExportRequestStatusRaw.Pending + | ExportRequestStatusRaw.InProgress + | ExportRequestStatusRaw.Complete + | ExportRequestStatusRaw.Failed; + +export interface ExportRequest { + id: string; + languageId: string; + bookId?: number; + chapters?: number[]; + layout: ExportLayout; + status: ExportRequestStatus; + jobId?: string; + downloadUrl?: string; + expiresAt?: Date; + requestedBy: string; + requestedAt: Date; + completedAt?: Date; +} diff --git a/src/modules/export/pdf/InterlinearPdfGenerator.ts b/src/modules/export/pdf/InterlinearPdfGenerator.ts new file mode 100644 index 00000000..3165b926 --- /dev/null +++ b/src/modules/export/pdf/InterlinearPdfGenerator.ts @@ -0,0 +1,445 @@ +import PDFDocument from "pdfkit"; +import { Readable } from "stream"; +import { + InterlinearChapterResult, + InterlinearVerse, +} from "../data-access/InterlinearQueryService"; +import path from "path"; +import fs from "fs"; + +export interface PdfGeneratorOptions { + pageSize?: "letter" | "a4"; + layout: "standard" | "parallel"; + direction?: "ltr" | "rtl"; + header?: { title?: string; subtitle?: string }; + footer?: { generatedAt?: Date; pageOffset?: number; pageTotal?: number }; + sourceScript?: "hebrew" | "greek"; +} + +export interface GeneratedPdf { + stream: Readable; + pageCount: number; +} + +export function generateInterlinearPdf( + chapter: InterlinearChapterResult, + options: PdfGeneratorOptions, +): GeneratedPdf { + const normalizedDirection = + (options.direction ?? "ltr").toLowerCase() === "rtl" ? "rtl" : "ltr"; + // Resolve fonts packaged with the worker bundle; fallback to local src/fonts for dev + const fontCandidates = [ + "/var/task/fonts", + path.join(process.cwd(), "fonts"), + path.join(__dirname, "fonts"), + path.join(process.cwd(), "dist", "fonts"), + path.join(process.cwd(), "src", "fonts"), + ]; + const fontBase = fontCandidates.find((p) => fs.existsSync(p)); + if (!fontBase) { + throw new Error( + `Font directory not found. Tried: ${fontCandidates.join(", ")}`, + ); + } + const hebrewFontPath = path.join(fontBase, "SBL_Hbrw.ttf"); + const greekFontPath = path.join(fontBase, "SBL_grk.ttf"); + if (!fs.existsSync(hebrewFontPath) || !fs.existsSync(greekFontPath)) { + throw new Error( + `SBL fonts not found. Expected at ${hebrewFontPath} and ${greekFontPath}`, + ); + } + + const needsBufferedPages = + typeof options.footer?.pageTotal === "number" && + Number.isFinite(options.footer.pageTotal); + + const doc = new PDFDocument({ + size: options.pageSize ?? "letter", + margin: 56, + bufferPages: needsBufferedPages, + }); + + const hebrewFont = fs.readFileSync(path.join(fontBase, "SBL_Hbrw.ttf")); + const greekFont = fs.readFileSync(path.join(fontBase, "SBL_grk.ttf")); + doc.registerFont("SBLHebrew", hebrewFont); + doc.registerFont("SBLGreek", greekFont); + const primaryFont = + options.sourceScript === "hebrew" ? "SBLHebrew" + : options.sourceScript === "greek" ? "SBLGreek" + : normalizedDirection === "rtl" ? "SBLHebrew" + : "SBLGreek"; + const glossFont = "Helvetica"; + const alignment = + options.sourceScript === "hebrew" || normalizedDirection === "rtl" ? + "right" + : "left"; + const stream = doc as unknown as Readable; + + let pageCount = 1; + if (!needsBufferedPages) { + renderFooter(doc, 1, options.footer); + doc.on("pageAdded", () => { + pageCount += 1; + renderFooter(doc, pageCount, options.footer); + }); + } + + renderHeader(doc, primaryFont, glossFont, options.header, alignment); + + const contentWidth = + doc.page.width - doc.page.margins.left - doc.page.margins.right; + const verseSpacing = options.layout === "parallel" ? 1.2 : 0.5; + + for (const verse of chapter.verses) { + ensureSpace(doc, 40); + renderVerse( + doc, + verse, + options.layout, + primaryFont, + glossFont, + alignment, + contentWidth, + ); + doc.moveDown(verseSpacing); + } + + if (needsBufferedPages) { + pageCount = addFooter(doc, options.footer); + } + doc.end(); + return { stream, pageCount }; +} + +function renderVerse( + doc: PDFKit.PDFDocument, + verse: InterlinearVerse, + layout: "standard" | "parallel", + font: string, + glossFont: string, + alignment: "left" | "right", + contentWidth: number, +) { + doc + .font(glossFont) + .fontSize(10) + .fillColor("#444") + .text(`Verse ${verse.number}`, { + continued: false, + underline: true, + align: alignment, + }) + .fillColor("#000"); + doc.moveDown(0.2); + + if (layout === "standard") { + renderInterlinearLine( + doc, + verse.words, + font, + glossFont, + alignment, + contentWidth, + ); + } else { + renderParallelVerse(doc, verse, font, glossFont, alignment, contentWidth); + } +} + +function renderInterlinearLine( + doc: PDFKit.PDFDocument, + words: InterlinearVerse["words"], + primaryFont: string, + glossFont: string, + alignment: "left" | "right", + contentWidth: number, +) { + const glossFontSize = 12; + const primaryFontSize = 24; + const columnPadding = 12; + const orderedWords = alignment === "right" ? [...words].reverse() : words; + + type WordColumn = { + word: InterlinearVerse["words"][number]; + gloss: string; + columnWidth: number; + }; + + const measuredWords: WordColumn[] = orderedWords.map((word) => { + const gloss = formatGloss(word); + doc.font(glossFont).fontSize(glossFontSize); + const glossWidth = gloss ? doc.widthOfString(gloss) : 0; + doc.font(primaryFont).fontSize(primaryFontSize); + const primaryWidth = doc.widthOfString(word.text); + const columnWidth = Math.min( + Math.max(glossWidth, primaryWidth) + columnPadding, + contentWidth, + ); + + return { word, gloss, columnWidth }; + }); + + const lines: WordColumn[][] = []; + let currentLine: WordColumn[] = []; + let currentWidth = 0; + + for (const item of measuredWords) { + const nextWidth = currentWidth + item.columnWidth; + if (currentLine.length > 0 && nextWidth > contentWidth) { + lines.push(currentLine); + currentLine = [item]; + currentWidth = item.columnWidth; + } else { + currentLine.push(item); + currentWidth = nextWidth; + } + } + if (currentLine.length > 0) { + lines.push(currentLine); + } + + for (const line of lines) { + const lineWidth = line.reduce((sum, item) => sum + item.columnWidth, 0); + const startX = + alignment === "right" ? + doc.page.width - doc.page.margins.right - lineWidth + : doc.page.margins.left; + + const primaryHeights = line.map((item) => { + doc.font(primaryFont).fontSize(primaryFontSize); + return doc.heightOfString(item.word.text, { + width: item.columnWidth, + align: "center", + }); + }); + const maxPrimaryHeight = + primaryHeights.length ? Math.max(...primaryHeights) : 0; + + const glossHeights = line.map((item) => { + doc.font(glossFont).fontSize(glossFontSize); + return doc.heightOfString(item.gloss || " ", { + width: item.columnWidth, + align: "center", + }); + }); + const maxGlossHeight = glossHeights.length ? Math.max(...glossHeights) : 0; + + const lineHeight = maxPrimaryHeight + maxGlossHeight + 12; + ensureSpace(doc, lineHeight); + + const glossY = doc.y; + const primaryY = glossY; + const glossBelowY = primaryY + maxPrimaryHeight + 4; + + doc.fillColor("#000").font(primaryFont).fontSize(primaryFontSize); + let cursorX = startX; + for (const item of line) { + doc.text(item.word.text, cursorX, primaryY, { + width: item.columnWidth, + align: "center", + }); + cursorX += item.columnWidth; + } + + doc.font(glossFont).fontSize(glossFontSize).fillColor("#444"); + cursorX = startX; + for (const item of line) { + if (item.gloss) { + doc.text(item.gloss, cursorX, glossBelowY, { + width: item.columnWidth, + align: "center", + }); + } + cursorX += item.columnWidth; + } + + doc.y = glossBelowY + maxGlossHeight + 6; + } +} + +function renderParallelVerse( + doc: PDFKit.PDFDocument, + verse: InterlinearVerse, + primaryFont: string, + glossFont: string, + alignment: "left" | "right", + contentWidth: number, +) { + const primaryFontSize = 22; + const glossFontSize = 14; + const columnGap = 32; + const glossColumnWidth = contentWidth * 0.45; + const primaryColumnWidth = contentWidth - glossColumnWidth - columnGap; + + const primaryText = verse.words + .map((w) => w.text) + .join(alignment === "right" ? " " : " "); + const glossText = verse.words + .map((w) => formatParagraphDetail(w)) + .filter(Boolean) + .join(" "); + + const estimatedHeight = + doc.heightOfString(primaryText, { + width: primaryColumnWidth, + align: alignment, + }) + + doc.heightOfString(glossText, { + width: glossColumnWidth, + align: "left", + }) + + 20; + ensureSpace(doc, estimatedHeight); + + const startX = doc.page.margins.left; + const glossX = startX; + const primaryX = glossX + glossColumnWidth + columnGap; + + const primaryY = doc.y; + doc + .font(primaryFont) + .fontSize(primaryFontSize) + .fillColor("#000") + .text(primaryText, primaryX, primaryY, { + width: primaryColumnWidth, + align: alignment, + }); + + const glossY = primaryY; + doc + .font(glossFont) + .fontSize(glossFontSize) + .fillColor("#444") + .text(glossText, glossX, glossY, { + width: glossColumnWidth, + align: "left", + }); + + const primaryHeight = doc.heightOfString(primaryText, { + width: primaryColumnWidth, + align: alignment, + }); + const glossHeight = doc.heightOfString(glossText, { + width: glossColumnWidth, + align: "left", + }); + + doc.y = Math.max(primaryY + primaryHeight, glossY + glossHeight) + 10; +} + +function addFooter( + doc: PDFKit.PDFDocument, + footer?: { generatedAt?: Date; pageOffset?: number; pageTotal?: number }, +) { + const pageRange = doc.bufferedPageRange(); + const pageOffset = footer?.pageOffset ?? 0; + for (let i = 0; i < pageRange.count; i++) { + doc.switchToPage(i); + const y = doc.page.height - doc.page.margins.bottom - 10; + const width = + doc.page.width - doc.page.margins.left - doc.page.margins.right; + const pageNumber = pageOffset + i + 1; + const pageLabel = + footer?.pageTotal ? + `Page ${pageNumber} of ${footer.pageTotal}` + : `Page ${pageNumber}`; + doc + .fontSize(8) + .fillColor("#555") + .text(pageLabel, doc.page.margins.left, y, { + width, + lineBreak: false, + align: "center", + }); + if (footer?.generatedAt) { + doc.text( + `Generated: ${footer.generatedAt.toISOString()}`, + doc.page.margins.left, + y, + { + width, + lineBreak: false, + align: "right", + }, + ); + } + doc.fillColor("#000"); + } + return pageRange.count; +} + +function renderFooter( + doc: PDFKit.PDFDocument, + pageNumber: number, + footer?: { generatedAt?: Date; pageOffset?: number; pageTotal?: number }, +) { + const pageOffset = footer?.pageOffset ?? 0; + const pageLabel = + footer?.pageTotal ? + `Page ${pageOffset + pageNumber} of ${footer.pageTotal}` + : `Page ${pageOffset + pageNumber}`; + const y = doc.page.height - doc.page.margins.bottom - 10; + const width = doc.page.width - doc.page.margins.left - doc.page.margins.right; + + const previousX = doc.x; + const previousY = doc.y; + + doc.fontSize(8).fillColor("#555").text(pageLabel, doc.page.margins.left, y, { + width, + lineBreak: false, + align: "center", + }); + if (footer?.generatedAt) { + doc.text( + `Generated: ${footer.generatedAt.toISOString()}`, + doc.page.margins.left, + y, + { + width, + lineBreak: false, + align: "right", + }, + ); + } + doc.fillColor("#000"); + + doc.x = previousX; + doc.y = previousY; +} + +function renderHeader( + doc: PDFKit.PDFDocument, + primaryFont: string, + glossFont: string, + header: PdfGeneratorOptions["header"], + alignment: "left" | "right", +) { + const headerTitle = header?.title ?? "Interlinear export"; + const headerSubtitle = header?.subtitle ?? ""; + doc.font(primaryFont).fontSize(14).text(headerTitle, { align: "center" }); + if (headerSubtitle) { + doc + .font(glossFont) + .fontSize(10) + .fillColor("#555") + .text(headerSubtitle, { align: alignment }) + .fillColor("#000"); + } + doc.moveDown(); +} + +function ensureSpace(doc: PDFKit.PDFDocument, required: number) { + const available = doc.page.height - doc.page.margins.bottom - doc.y - 10; // padding + if (required > available) { + doc.addPage(); + } +} + +function formatGloss(word: InterlinearVerse["words"][number]) { + const parts = [word.gloss].filter(Boolean); + return parts.length ? parts.join(" ") : ""; +} + +function formatParagraphDetail(word: InterlinearVerse["words"][number]) { + return word.gloss ? word.gloss : ""; +} diff --git a/src/modules/export/pdf/InterlinearPdfGenerator.unit.ts b/src/modules/export/pdf/InterlinearPdfGenerator.unit.ts new file mode 100644 index 00000000..67e346d0 --- /dev/null +++ b/src/modules/export/pdf/InterlinearPdfGenerator.unit.ts @@ -0,0 +1,93 @@ +import path from "path"; +import { describe, expect, it, beforeEach } from "vitest"; +import { PDFDocument } from "pdf-lib"; +import { generateInterlinearPdf } from "./InterlinearPdfGenerator"; +import type { InterlinearChapterResult } from "../data-access/InterlinearQueryService"; +import { TextDirectionRaw } from "@/modules/languages/model"; + +async function streamToBuffer( + stream: NodeJS.ReadableStream, +): Promise { + const parts: Uint8Array[] = []; + for await (const chunk of stream) { + if (chunk instanceof Uint8Array) { + parts.push(chunk); + } else if (Buffer.isBuffer(chunk)) { + parts.push( + new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength), + ); + } else if (typeof chunk === "string") { + parts.push(new Uint8Array(Buffer.from(chunk))); + } + } + const total = parts.reduce((sum, p) => sum + p.byteLength, 0); + const merged = new Uint8Array(total); + let offset = 0; + for (const part of parts) { + merged.set(part, offset); + offset += part.byteLength; + } + return merged; +} + +function buildSampleChapter(): InterlinearChapterResult { + return { + language: { + id: "lang-1", + code: "en", + name: "Test Language", + textDirection: TextDirectionRaw.LTR, + }, + verses: [ + { + id: "verse-1", + number: 1, + words: [ + { + id: "w1", + text: "λόγος", + gloss: "word", + lemma: "logos", + grammar: "N-NSM", + }, + { + id: "w2", + text: "θεοῦ", + gloss: "of God", + lemma: "theos", + grammar: "N-GSM", + }, + ], + }, + ], + }; +} + +describe("generateInterlinearPdf", () => { + beforeEach(() => { + process.env.PDFKIT_DATA_DIR = path.join( + process.cwd(), + "node_modules", + "pdfkit", + "js", + "data", + ); + }); + + it("creates a readable PDF with Helvetica gloss and SBL body fonts", async () => { + const { stream, pageCount } = generateInterlinearPdf(buildSampleChapter(), { + layout: "standard", + pageSize: "letter", + direction: "ltr", + header: { title: "Test Header", subtitle: "Book 1" }, + footer: { generatedAt: new Date(), pageOffset: 0 }, + }); + + const pdfBytes = await streamToBuffer(stream); + expect(pdfBytes.byteLength).toBeGreaterThan(0); + expect(pageCount).toBeGreaterThanOrEqual(1); + + const pdf = await PDFDocument.load(pdfBytes); + expect(pdf.getPageCount()).toBeGreaterThanOrEqual(1); + }); +}); diff --git a/src/modules/export/public/ExportClient.ts b/src/modules/export/public/ExportClient.ts new file mode 100644 index 00000000..b779cf36 --- /dev/null +++ b/src/modules/export/public/ExportClient.ts @@ -0,0 +1,11 @@ +import bookQueryService, { + type BookRow, +} from "../data-access/BookQueryService"; + +export type PublicBookView = BookRow; + +export const exportClient = { + async findAllBooks(): Promise { + return bookQueryService.findAll(); + }, +}; diff --git a/src/modules/export/public/InterlinearPdfClient.ts b/src/modules/export/public/InterlinearPdfClient.ts new file mode 100644 index 00000000..ea000478 --- /dev/null +++ b/src/modules/export/public/InterlinearPdfClient.ts @@ -0,0 +1,55 @@ +import interlinearCoverageQueryService, { + type BookChaptersRow, +} from "../data-access/InterlinearCoverageQueryService"; +import bookQueryService, { + type BookRow, +} from "../data-access/BookQueryService"; +import interlinearQueryService, { + type InterlinearChapterResult, +} from "../data-access/InterlinearQueryService"; +import { + generateInterlinearPdf, + type GeneratedPdf, + type PdfGeneratorOptions, +} from "../pdf/InterlinearPdfGenerator"; + +export type ApprovedGlossChaptersByBook = BookChaptersRow; +export type { + BookRow, + InterlinearChapterResult, + GeneratedPdf, + PdfGeneratorOptions, +}; + +export const interlinearPdfClient = { + async findApprovedGlossChapters( + languageId: string, + ): Promise { + return interlinearCoverageQueryService.findApprovedGlossChapters( + languageId, + ); + }, + + async findAllBooks(): Promise { + return bookQueryService.findAll(); + }, + + async fetchChapters( + bookId: number, + chapters: number[], + languageCode: string, + ): Promise { + return interlinearQueryService.fetchChapters( + bookId, + chapters, + languageCode, + ); + }, + + generateInterlinearPdf( + chapter: InterlinearChapterResult, + options: PdfGeneratorOptions, + ): GeneratedPdf { + return generateInterlinearPdf(chapter, options); + }, +}; diff --git a/src/modules/export/react/InterlinearExportPanel.client.test.tsx b/src/modules/export/react/InterlinearExportPanel.client.test.tsx new file mode 100644 index 00000000..d610e5b1 --- /dev/null +++ b/src/modules/export/react/InterlinearExportPanel.client.test.tsx @@ -0,0 +1,154 @@ +import { describe, expect, it, vi, beforeEach, MockedFunction } from "vitest"; +import { render, screen, fireEvent, act } from "@testing-library/react"; +import InterlinearExportPanelClient, { + PollExportStatusAction, + RequestExportAction, +} from "./InterlinearExportPanelClient"; +import { FormState } from "@/components/Form"; +import React from "react"; + +vi.mock("@/components/Button", () => ({ + __esModule: true, + default: ({ children, ...props }: any) => ( + + ), +})); +vi.mock("@/components/Icon", () => ({ + __esModule: true, + Icon: () => null, +})); +vi.mock("@/components/Form", () => { + const React = require("react") as typeof import("react"); + const FormContext = React.createContext({ state: "idle" }); + return { + __esModule: true, + default: ({ + action, + children, + className, + }: { + action: (state: FormState, formData: FormData) => Promise; + children: React.ReactNode; + className?: string; + }) => { + return ( +
{ + event.preventDefault(); + const formData = new FormData(event.currentTarget); + await action({ state: "idle" }, formData); + }} + > + + {children} + +
+ ); + }, + FormState: {}, + useFormContext: () => ({ state: "idle" }), + }; +}); + +vi.mock("@/components/FieldError", () => ({ + __esModule: true, + default: () => null, +})); +vi.mock("@/components/SortableMultiselectInput", () => ({ + __esModule: true, + default: ({ name, placeholder }: { name: string; placeholder?: string }) => ( + + ), +})); + +describe("InterlinearExportPanelClient", () => { + const requestExport = + vi.fn() as MockedFunction; + const pollExportStatus = + vi.fn() as MockedFunction; + const strings = { + title: "Interlinear PDF Export", + description: "Generate a PDF interlinear for selected chapters.", + booksLabel: "Books", + booksPlaceholder: "All books (default)", + booksHelp: "Leave blank to export all books.", + chaptersLabel: "Chapters (comma-separated or ranges)", + chaptersPlaceholder: "e.g. 1,2,4-6 (leave blank for all)", + layoutLabel: "Layout", + layoutStandard: "Standard (word by word)", + layoutParallel: "Parallel (Original | Gloss column)", + submit: "Generate PDF", + queued: "Queued...", + statusTitle: "Status", + allBooksLabel: "All books", + downloadLabel: "Download PDF", + expiresLabel: "Expires", + generatingLabel: "Generating PDF…", + failedLabel: "Export failed. Please try again.", + missingLabel: "Export not found. Please try again.", + statusLabels: { + PENDING: "Queued", + IN_PROGRESS: "In progress", + COMPLETE: "Complete", + FAILED: "Failed", + }, + }; + + beforeEach(() => { + requestExport.mockReset(); + pollExportStatus.mockReset(); + }); + + it("uses provided actions and updates status from polling", async () => { + requestExport.mockResolvedValue({ + state: "success", + requestIds: [{ id: "req-123", bookId: 1 }], + } as FormState & { requestIds: { id: string; bookId: number }[] }); + pollExportStatus.mockResolvedValue({ + id: "req-123", + status: "COMPLETE", + bookId: 1, + downloadUrl: "https://example.com/export.pdf", + expiresAt: null, + }); + + render( + , + ); + + fireEvent.change(screen.getByLabelText(strings.chaptersLabel), { + target: { value: "1" }, + }); + fireEvent.change(screen.getByTestId("book-select"), { + target: { value: "1" }, + }); + + const submitButton = screen.getByRole("button", { + name: /generate pdf/i, + }); + await act(async () => { + fireEvent.click(submitButton); + }); + + expect(requestExport).toHaveBeenCalledTimes(1); + const submittedForm = requestExport.mock.calls[0]?.[0] as FormData; + expect(submittedForm.get("languageCode")).toBe("spa"); + + expect( + await screen.findByText(strings.statusLabels.COMPLETE), + ).not.toBeNull(); + expect(screen.getByText(/Download PDF/)).not.toBeNull(); + }); +}); diff --git a/src/modules/export/react/InterlinearExportPanel.tsx b/src/modules/export/react/InterlinearExportPanel.tsx new file mode 100644 index 00000000..9ae72cc5 --- /dev/null +++ b/src/modules/export/react/InterlinearExportPanel.tsx @@ -0,0 +1,50 @@ +import { requestInterlinearExport } from "@/modules/export/actions/requestInterlinearExport"; +import { pollInterlinearExportStatus } from "@/modules/export/actions/pollInterlinearExportStatus"; +import InterlinearExportPanelClient from "./InterlinearExportPanelClient"; +import { getTranslations } from "next-intl/server"; + +export default async function InterlinearExportPanel({ + languageCode, + books, +}: { + languageCode: string; + books: { id: number; name: string }[]; +}) { + const t = await getTranslations("InterlinearExport"); + + return ( + + ); +} diff --git a/src/modules/export/react/InterlinearExportPanelClient.tsx b/src/modules/export/react/InterlinearExportPanelClient.tsx new file mode 100644 index 00000000..61a85b00 --- /dev/null +++ b/src/modules/export/react/InterlinearExportPanelClient.tsx @@ -0,0 +1,355 @@ +"use client"; + +import React, { useEffect, useRef, useState } from "react"; +import Button from "@/components/Button"; +import TextInput from "@/components/TextInput"; +import FieldError from "@/components/FieldError"; +import FormLabel from "@/components/FormLabel"; +import { Icon } from "@/components/Icon"; +import Form, { FormState } from "@/components/Form"; +import SortableMultiselectInput from "@/components/SortableMultiselectInput"; + +export type StatusRow = { + id: string; + status: string; + bookId: number | null; + downloadUrl?: string | null; + expiresAt?: string | Date | null; + missingCount?: number; + error?: "missing" | "poll_failed"; +}; + +export type RequestExportAction = ( + formData: FormData, +) => Promise< + FormState & { requestIds?: { id: string; bookId: number | null }[] } +>; + +export type PollExportStatusAction = ( + formData: FormData, +) => Promise; + +export interface InterlinearExportPanelClientProps { + languageCode: string; + books: { id: number; name: string }[]; + strings: { + title: string; + description: string; + booksLabel: string; + booksPlaceholder: string; + booksHelp: string; + chaptersLabel: string; + chaptersPlaceholder: string; + layoutLabel: string; + layoutStandard: string; + layoutParallel: string; + submit: string; + queued: string; + statusTitle: string; + allBooksLabel: string; + downloadLabel: string; + expiresLabel: string; + generatingLabel: string; + failedLabel: string; + missingLabel: string; + statusLabels: Record; + }; + requestExport: RequestExportAction; + pollExportStatus: PollExportStatusAction; +} + +export default function InterlinearExportPanelClient({ + languageCode, + books, + strings, + requestExport, + pollExportStatus, +}: InterlinearExportPanelClientProps) { + const [statuses, setStatuses] = useState>({}); + const [pollingIds, setPollingIds] = useState([]); + const [pending, setPending] = useState(false); + const missingCountsRef = useRef>({}); + + const handleSubmit = async ( + _state: FormState, + formData: FormData, + ): Promise => { + try { + setPending(true); + setStatuses({}); + setPollingIds([]); + missingCountsRef.current = {}; + + const result = await requestExport(formData); + if (result.state === "error") { + return result; + } + + const requestIds = result.requestIds ?? []; + if (requestIds.length === 0) { + return { state: "error", error: strings.failedLabel }; + } + + const nextStatuses: Record = {}; + requestIds.forEach(({ id, bookId }) => { + nextStatuses[id] = { + id, + status: "PENDING", + bookId, + downloadUrl: null, + expiresAt: null, + }; + }); + setStatuses(nextStatuses); + setPollingIds(requestIds.map((request) => request.id)); + + const poll = new FormData(); + poll.set("id", requestIds[0].id); + const statusRow = await pollExportStatus(poll); + if (statusRow) { + setStatuses((prev) => ({ + ...prev, + [statusRow.id]: { ...prev[statusRow.id], ...statusRow }, + })); + } + + return { state: "success" }; + } catch (error) { + console.error(error); + return { state: "error", error: strings.failedLabel }; + } finally { + setPending(false); + } + }; + + useEffect(() => { + if (pollingIds.length === 0) return; + + let cancelled = false; + let timeoutId: ReturnType | null = null; + const MAX_MISSING_POLLS = 5; + + const poll = async () => { + const nextPendingIds: string[] = []; + try { + for (const id of pollingIds) { + const pollForm = new FormData(); + pollForm.set("id", id); + const statusRow = await pollExportStatus(pollForm); + if (statusRow) { + missingCountsRef.current[id] = 0; + setStatuses((prev) => ({ + ...prev, + [statusRow.id]: { + ...prev[statusRow.id], + ...statusRow, + missingCount: 0, + error: undefined, + }, + })); + if ( + statusRow.status !== "COMPLETE" && + statusRow.status !== "FAILED" + ) { + nextPendingIds.push(id); + } + } else { + const nextCount = (missingCountsRef.current[id] ?? 0) + 1; + missingCountsRef.current[id] = nextCount; + + if (nextCount >= MAX_MISSING_POLLS) { + setStatuses((prev) => { + const current = prev[id]; + if (!current) return prev; + return { + ...prev, + [id]: { + ...current, + missingCount: nextCount, + status: "FAILED", + error: "missing", + }, + }; + }); + continue; + } + + setStatuses((prev) => { + const current = prev[id]; + if (!current) return prev; + return { + ...prev, + [id]: { + ...current, + missingCount: nextCount, + }, + }; + }); + nextPendingIds.push(id); + } + } + } catch (error) { + console.error("Failed to poll export status", error); + setStatuses((prev) => { + const next = { ...prev }; + pollingIds.forEach((id) => { + if (!next[id]) return; + next[id] = { ...next[id], status: "FAILED", error: "poll_failed" }; + }); + return next; + }); + return; + } + + if (!cancelled) { + if (nextPendingIds.length > 0) { + timeoutId = setTimeout(() => setPollingIds(nextPendingIds), 3000); + } else { + setPollingIds([]); + } + } + }; + + poll(); + + return () => { + cancelled = true; + if (timeoutId) { + clearTimeout(timeoutId); + } + }; + }, [pollExportStatus, pollingIds]); + + return ( +
+
+
+

+ {strings.title} +

+

+ {strings.description} +

+
+ +
+
+ + +
+ {strings.booksLabel} + ({ + value: String(book.id), + label: `${book.id}. ${book.name}`, + }))} + /> +

{strings.booksHelp}

+ +
+ +
+ {strings.chaptersLabel} + + +
+ +
+ {strings.layoutLabel} + +
+ +
+ +
+
+ + {Object.keys(statuses).length > 0 && ( +
+
{strings.statusTitle}
+
+ {Object.values(statuses).map((status) => { + const bookName = + status.bookId && status.bookId > 0 ? + (books.find((book) => book.id === status.bookId)?.name ?? + `Book ${status.bookId}`) + : strings.allBooksLabel; + const isComplete = status.status === "COMPLETE"; + const isFailed = status.status === "FAILED"; + const statusLabel = + strings.statusLabels[status.status] ?? status.status; + return ( +
+
+
+ + {status.bookId ? `Book ${status.bookId}` : "Book"} + + {bookName} +
+ + {statusLabel} + +
+ {status.downloadUrl && ( +
+ + {status.expiresAt && ( + + {strings.expiresLabel}:{" "} + {new Date(status.expiresAt).toLocaleString()} + + )} +
+ )} + {isFailed && ( + + {status.error === "missing" ? + strings.missingLabel + : strings.failedLabel} + + )} + {!isComplete && !isFailed && ( + + {strings.generatingLabel} + + )} +
+ ); + })} +
+
+ )} +
+
+
+ ); +} diff --git a/src/modules/export/use-cases/GetInterlinearExportStatus.ts b/src/modules/export/use-cases/GetInterlinearExportStatus.ts new file mode 100644 index 00000000..94b78968 --- /dev/null +++ b/src/modules/export/use-cases/GetInterlinearExportStatus.ts @@ -0,0 +1,13 @@ +import type exportRequestRepository from "../data-access/ExportRequestRepository"; + +export default class GetInterlinearExportStatus { + constructor( + private readonly deps: { + exportRequestRepository: typeof exportRequestRepository; + }, + ) {} + + async execute(requestId: string) { + return this.deps.exportRequestRepository.findStatus(requestId); + } +} diff --git a/src/modules/export/use-cases/RequestInterlinearExport.ts b/src/modules/export/use-cases/RequestInterlinearExport.ts new file mode 100644 index 00000000..b8623966 --- /dev/null +++ b/src/modules/export/use-cases/RequestInterlinearExport.ts @@ -0,0 +1,107 @@ +import { ulid } from "@/shared/ulid"; +import { ExportLayout } from "../model"; +import type bookQueryService from "../data-access/BookQueryService"; +import type languageLookupQueryService from "../data-access/LanguageLookupQueryService"; +import type exportRequestRepository from "../data-access/ExportRequestRepository"; +import type { ExportBookSelection } from "../data-access/ExportRequestRepository"; +import type { enqueueJob } from "@/shared/jobs/enqueueJob"; +import { EXPORT_JOB_TYPES } from "../jobs/jobTypes"; + +export class NoBooksAvailableForExportError extends Error {} +export class NoChaptersAvailableForExportError extends Error {} +export class ExportLanguageNotFoundError extends Error { + constructor(readonly languageCode: string) { + super(); + } +} + +export interface RequestInterlinearExportRequest { + languageCode: string; + requestedBy: string; + layout: ExportLayout; + bookIds: number[] | null; + chapters: number[] | null; +} + +export interface RequestInterlinearExportResult { + requestId: string; + bookId: number | null; + books: ExportBookSelection[]; +} + +export default class RequestInterlinearExport { + constructor( + private readonly deps: { + bookQueryService: typeof bookQueryService; + languageLookupQueryService: typeof languageLookupQueryService; + exportRequestRepository: typeof exportRequestRepository; + enqueueJob: typeof enqueueJob; + }, + ) {} + + async execute( + request: RequestInterlinearExportRequest, + ): Promise { + const language = await this.deps.languageLookupQueryService.findByCode( + request.languageCode, + ); + if (!language) { + throw new ExportLanguageNotFoundError(request.languageCode); + } + + const allBooks = await this.deps.bookQueryService.findAll(); + const selectedBookIds = request.bookIds ?? allBooks.map((book) => book.id); + if (selectedBookIds.length === 0) { + throw new NoBooksAvailableForExportError(); + } + + const chaptersByBookRows = + await this.deps.bookQueryService.findChapters(selectedBookIds); + const chaptersByBook = new Map( + chaptersByBookRows.map((row) => [row.bookId, row.chapters]), + ); + + const books: ExportBookSelection[] = []; + for (const bookId of selectedBookIds) { + const available = chaptersByBook.get(bookId) ?? []; + if (available.length === 0) { + continue; + } + + const chapters = + request.chapters ? + request.chapters.filter((chapter) => available.includes(chapter)) + : available; + if (chapters.length === 0) { + continue; + } + books.push({ bookId, chapters }); + } + + if (books.length === 0) { + throw new NoChaptersAvailableForExportError(); + } + + const requestId = ulid(); + await this.deps.exportRequestRepository.createInterlinearRequest({ + requestId, + languageId: language.id, + requestedBy: request.requestedBy, + layout: request.layout, + books, + }); + + await this.deps.enqueueJob(EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF, { + requestId, + languageCode: request.languageCode, + books, + layout: request.layout, + }); + + return { + requestId, + bookId: books.length === 1 ? books[0].bookId : null, + books, + }; + } +} diff --git a/src/modules/languages/react/LanguageSettingsPage.tsx b/src/modules/languages/react/LanguageSettingsPage.tsx index e2d5993c..7a1af00b 100644 --- a/src/modules/languages/react/LanguageSettingsPage.tsx +++ b/src/modules/languages/react/LanguageSettingsPage.tsx @@ -19,6 +19,8 @@ import { updateLanguageSettings } from "@/modules/languages/actions/updateLangua import Form from "@/components/Form"; import { languageQueryService } from "../data-access/LanguageQueryService"; import { notFound } from "next/navigation"; +import InterlinearExportPanel from "@/modules/export/react/InterlinearExportPanel"; +import { exportClient } from "@/modules/export/public/ExportClient"; interface LanguageSettingsPageProps { params: { code: string }; @@ -41,10 +43,11 @@ export default async function LanguageSettingsPage({ }: LanguageSettingsPageProps) { const t = await getTranslations("LanguageSettingsPage"); - const [languageSettings, languages, translations] = await Promise.all([ + const [languageSettings, languages, translations, books] = await Promise.all([ languageQueryService.findSettingsByCode(params.code), languageQueryService.findAll(), fetchTranslations(params.code), + exportClient.findAllBooks(), ]); if (!languageSettings) { notFound(); @@ -217,6 +220,10 @@ export default async function LanguageSettingsPage({ + ); } diff --git a/src/modules/snapshots/data-access/SnapshotStorageRepository.ts b/src/modules/snapshots/data-access/SnapshotStorageRepository.ts new file mode 100644 index 00000000..d90d08fb --- /dev/null +++ b/src/modules/snapshots/data-access/SnapshotStorageRepository.ts @@ -0,0 +1,70 @@ +import { Upload } from "@aws-sdk/lib-storage"; +import { DeleteObjectCommand, GetObjectCommand } from "@aws-sdk/client-s3"; +import { Readable } from "stream"; +import { createLogger } from "@/logging"; +import type { StorageEnvironment } from "@/shared/storageEnvironment"; +import { getS3Client, s3BodyToUint8Array } from "@/shared/s3"; +import { snapshotBucketName } from "./snapshotBucket"; + +const s3Client = getS3Client(); + +export const snapshotStorageRepository = { + bucketName(environment: StorageEnvironment) { + return snapshotBucketName(environment); + }, + + async uploadPdf({ + environment, + key, + stream, + }: { + environment: StorageEnvironment; + key: string; + stream: Readable; + }): Promise { + const bucket = this.bucketName(environment); + const logger = createLogger({ bucket, key }); + + const upload = new Upload({ + client: s3Client, + params: { + Bucket: bucket, + Key: key, + Body: stream, + ContentType: "application/pdf", + }, + }); + + await upload.done(); + logger.info("Snapshot PDF uploaded"); + + return `s3://${bucket}/${key}`; + }, + + async fetchBuffer({ + environment, + key, + }: { + environment: StorageEnvironment; + key: string; + }): Promise { + const bucket = this.bucketName(environment); + const res = await s3Client.send( + new GetObjectCommand({ Bucket: bucket, Key: key }), + ); + return s3BodyToUint8Array(res.Body); + }, + + async deleteObject({ + environment, + key, + }: { + environment: StorageEnvironment; + key: string; + }): Promise { + const bucket = this.bucketName(environment); + await s3Client.send(new DeleteObjectCommand({ Bucket: bucket, Key: key })); + }, +}; + +export default snapshotStorageRepository; diff --git a/src/modules/snapshots/data-access/snapshotBucket.ts b/src/modules/snapshots/data-access/snapshotBucket.ts new file mode 100644 index 00000000..1fe5cdaa --- /dev/null +++ b/src/modules/snapshots/data-access/snapshotBucket.ts @@ -0,0 +1,8 @@ +import type { StorageEnvironment } from "@/shared/storageEnvironment"; + +const SNAPSHOT_BUCKET_PREFIX = + process.env.SNAPSHOT_BUCKET_PREFIX ?? "gbt-snapshots"; + +export function snapshotBucketName(environment: StorageEnvironment): string { + return `${SNAPSHOT_BUCKET_PREFIX}-${environment}`; +} diff --git a/src/modules/snapshots/data-access/snapshotObjectRepository.ts b/src/modules/snapshots/data-access/snapshotObjectRepository.ts index fb1a8317..fdd48b01 100644 --- a/src/modules/snapshots/data-access/snapshotObjectRepository.ts +++ b/src/modules/snapshots/data-access/snapshotObjectRepository.ts @@ -1,13 +1,15 @@ import { Upload } from "@aws-sdk/lib-storage"; -import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { GetObjectCommand } from "@aws-sdk/client-s3"; import { Readable, Transform, TransformCallback } from "stream"; import { Snapshot, SnapshotObjectPlugin } from "../model"; import { createLogger, logger } from "@/logging"; import { languageSnapshotObjectPlugins } from "@/modules/languages/data-access/snapshotObjectPlugins"; import { translationSnapshotObjectPlugins } from "@/modules/translation/data-access/snapshotObjectPlugins"; import { reportingSnapshotObjectPlugins } from "@/modules/reporting/data-access/snapshotObjectPlugins"; +import type { StorageEnvironment } from "@/shared/storageEnvironment"; +import { getS3Client, s3BodyToReadable } from "@/shared/s3"; +import { snapshotBucketName } from "./snapshotBucket"; -const SNAPSHOT_BUCKET_PREFIX = "gbt-snapshots"; const SNAPSHOT_OBJECT_PLUGINS: SnapshotObjectPlugin[] = [ ...languageSnapshotObjectPlugins, ...translationSnapshotObjectPlugins, @@ -19,7 +21,7 @@ export const snapshotObjectRepository = { environment, snapshot, }: { - environment: "prod" | "local"; + environment: StorageEnvironment; snapshot: Snapshot; }): Promise { const logger = createLogger({ @@ -27,6 +29,7 @@ export const snapshotObjectRepository = { languageId: snapshot.languageId, snapshotId: snapshot.id, }); + const bucket = snapshotBucketName(environment); for (const plugin of SNAPSHOT_OBJECT_PLUGINS) { if (!plugin.read) { @@ -40,7 +43,7 @@ export const snapshotObjectRepository = { await uploadJson({ key: `${snapshot.languageId}/${snapshot.id}/${plugin.resourceName}.jsonl`, - bucket: `${SNAPSHOT_BUCKET_PREFIX}-${environment}`, + bucket, stream: objectStream.pipe(new SerializeJsonLTransform()), }); @@ -53,7 +56,7 @@ export const snapshotObjectRepository = { environment, snapshot, }: { - environment: "prod" | "local"; + environment: StorageEnvironment; snapshot: Snapshot; }): Promise { const logger = createLogger({ @@ -61,6 +64,7 @@ export const snapshotObjectRepository = { snapshotId: snapshot.id, languageId: snapshot.languageId, }); + const bucket = snapshotBucketName(environment); for (let i = SNAPSHOT_OBJECT_PLUGINS.length - 1; i >= 0; i--) { const plugin = SNAPSHOT_OBJECT_PLUGINS[i]; @@ -69,7 +73,7 @@ export const snapshotObjectRepository = { for (const plugin of SNAPSHOT_OBJECT_PLUGINS) { const maybeStream = await downloadJson({ - bucket: `${SNAPSHOT_BUCKET_PREFIX}-${environment}`, + bucket, key: `${snapshot.languageId}/${snapshot.id}/${plugin.resourceName}.jsonl`, }); @@ -147,7 +151,7 @@ export class DeserializeJsonLTransform extends Transform { } } -const s3Client = new S3Client(); +const s3Client = getS3Client(); async function downloadJson({ key, @@ -170,13 +174,13 @@ async function downloadJson({ } throw error; } - const stream = response.Body; + const body = response.Body; + if (!body) return undefined; - // Typescript doesn't know that the response type is always a stream when used in nodejs. - if (!(stream instanceof Readable)) { - throw new Error("Expected Body to be a Readable stream in Node.js"); + const stream = await s3BodyToReadable(body); + if (!stream) { + throw new Error("Unsupported S3 Body type"); } - return stream; } diff --git a/src/modules/snapshots/data-access/snapshotQueryService.ts b/src/modules/snapshots/data-access/snapshotQueryService.ts index fae9beb6..7c359cdc 100644 --- a/src/modules/snapshots/data-access/snapshotQueryService.ts +++ b/src/modules/snapshots/data-access/snapshotQueryService.ts @@ -106,7 +106,7 @@ export const snapshotQueryService = { where type_id IN ( select id from job_type - where name IN ($2, $3) + where name IN ($2, $3, $4) ) and payload->>'languageId' = $1 and status IN ('pending', 'in-progress') @@ -115,6 +115,7 @@ export const snapshotQueryService = { languageId, SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT, SNAPSHOT_JOB_TYPES.RESTORE_SNAPSHOT, + SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF, ], ); return result.rows[0]; diff --git a/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.localstack.test.ts b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.localstack.test.ts new file mode 100644 index 00000000..e90185ee --- /dev/null +++ b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.localstack.test.ts @@ -0,0 +1,208 @@ +import { describe, expect, it, beforeAll, afterAll, vi } from "vitest"; +import { initializeDatabase } from "@/tests/vitest/dbUtils"; +import { createScenario } from "@/tests/scenarios"; +import { query } from "@/db"; +import { ulid } from "@/shared/ulid"; +import { s3BodyToUint8Array } from "@/shared/s3"; +import { + CreateBucketCommand, + GetObjectCommand, + HeadBucketCommand, + ListObjectsV2Command, + S3Client, +} from "@aws-sdk/client-s3"; +import { PDFDocument } from "pdf-lib"; + +initializeDatabase(); + +describe("create_snapshot_interlinear_pdf (localstack)", () => { + const localstackEndpoint = + process.env.AWS_ENDPOINT_URL_S3 ?? "http://localhost:4566"; + const region = process.env.AWS_REGION ?? "us-east-1"; + const accessKeyId = process.env.AWS_ACCESS_KEY_ID ?? "test"; + const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY ?? "test"; + const snapshotBucketPrefix = + process.env.SNAPSHOT_BUCKET_PREFIX ?? "gbt-snapshots"; + const environment = "local"; + const bucket = `${snapshotBucketPrefix}-${environment}`; + + const s3 = new S3Client({ + region, + endpoint: localstackEndpoint, + forcePathStyle: true, + credentials: { accessKeyId, secretAccessKey }, + }); + + beforeAll(async () => { + vi.stubEnv("NODE_ENV", "test"); + vi.stubEnv("AWS_REGION", region); + vi.stubEnv("AWS_ACCESS_KEY_ID", accessKeyId); + vi.stubEnv("AWS_SECRET_ACCESS_KEY", secretAccessKey); + vi.stubEnv("AWS_ENDPOINT_URL_S3", localstackEndpoint); + vi.stubEnv("AWS_S3_FORCE_PATH_STYLE", "true"); + vi.stubEnv("SNAPSHOT_BUCKET_PREFIX", snapshotBucketPrefix); + + await ensureBucket(s3, bucket); + }); + + afterAll(() => { + vi.unstubAllEnvs(); + }); + + it("processes the job and uploads a merged PDF to snapshot storage", async () => { + const scenario = await createScenario({ languages: { language: {} } }); + const language = scenario.languages.language; + + await query( + `insert into job_type (name) values ($1) on conflict (name) do nothing`, + ["create_snapshot_interlinear_pdf"], + ); + + await query( + `insert into book (id, name) values (1, 'Genesis'), (2, 'Exodus')`, + [], + ); + await query( + ` + insert into verse (id, number, book_id, chapter) + values + ('1-1-1', 1, 1, 1), + ('2-1-1', 1, 2, 1) + `, + [], + ); + await query(`insert into lemma (id) values ('l1')`, []); + await query( + `insert into lemma_form (id, grammar, lemma_id) values ('f1', 'g', 'l1')`, + [], + ); + await query( + ` + insert into word (id, text, verse_id, form_id) + values + ('w1', 'λόγος', '1-1-1', 'f1'), + ('w2', 'θεοῦ', '2-1-1', 'f1') + `, + [], + ); + + const phrase1 = await query<{ id: number }>( + `insert into phrase (language_id, created_at) values ($1, now()) returning id`, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase1.rows[0].id, "w1"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["word", "APPROVED", phrase1.rows[0].id], + ); + + const phrase2 = await query<{ id: number }>( + `insert into phrase (language_id, created_at) values ($1, now()) returning id`, + [language.id], + ); + await query( + `insert into phrase_word (phrase_id, word_id) values ($1, $2)`, + [phrase2.rows[0].id, "w2"], + ); + await query( + `insert into gloss (gloss, state, phrase_id, updated_at) values ($1, $2, $3, now())`, + ["of God", "APPROVED", phrase2.rows[0].id], + ); + + const snapshotId = ulid(); + const jobId = ulid(); + const payload = { + languageId: language.id, + languageCode: language.code, + snapshotId, + }; + const createdAt = new Date(); + + await query( + ` + insert into job (id, status, payload, created_at, updated_at, type_id) + values ( + $1, $2, $3, $4, $5, + (select id from job_type where name = $6) + ) + `, + [ + jobId, + "pending", + payload, + createdAt, + createdAt, + "create_snapshot_interlinear_pdf", + ], + ); + + const { processJob } = await import("@/shared/jobs/processJob"); + await processJob({ + body: JSON.stringify({ + id: jobId, + type: "create_snapshot_interlinear_pdf", + payload, + }), + receiptHandle: "localstack-test", + } as any); + + const jobResult = await query<{ status: string; data: any }>( + `select status, data from job where id = $1`, + [jobId], + ); + expect(jobResult.rows[0]?.status).toBe("complete"); + expect(jobResult.rows[0]?.data).toMatchObject({ + uploaded: true, + key: `${language.id}/${snapshotId}/interlinear/standard.pdf`, + books: 2, + }); + + const key = `${language.id}/${snapshotId}/interlinear/standard.pdf`; + const pdfBytes = await fetchObjectBytes(s3, { bucket, key }); + const pdf = await PDFDocument.load(pdfBytes); + expect(pdf.getPageCount()).toBeGreaterThan(0); + + const partsPrefix = `${language.id}/${snapshotId}/interlinear/parts/`; + const parts = await s3.send( + new ListObjectsV2Command({ Bucket: bucket, Prefix: partsPrefix }), + ); + const partKeys = (parts.Contents ?? []) + .map((o) => o.Key) + .filter((k): k is string => typeof k === "string"); + expect(partKeys).toEqual([]); + }); +}); + +async function ensureBucket(s3Client: S3Client, bucket: string) { + try { + await s3Client.send(new HeadBucketCommand({ Bucket: bucket })); + return; + } catch { + // fallthrough + } + + try { + await s3Client.send(new CreateBucketCommand({ Bucket: bucket })); + } catch (error) { + const name = error instanceof Error ? error.name : ""; + if (name === "BucketAlreadyExists" || name === "BucketAlreadyOwnedByYou") { + return; + } + throw error; + } +} + +async function fetchObjectBytes( + s3Client: S3Client, + { bucket, key }: { bucket: string; key: string }, +): Promise { + const res = await s3Client.send( + new GetObjectCommand({ Bucket: bucket, Key: key }), + ); + const bytes = await s3BodyToUint8Array(res.Body); + if (!bytes) throw new Error("Expected GetObjectCommand Body"); + return bytes; +} diff --git a/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.ts b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.ts new file mode 100644 index 00000000..69036396 --- /dev/null +++ b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.ts @@ -0,0 +1,218 @@ +import { Job } from "@/shared/jobs/model"; +import { logger } from "@/logging"; +import { SNAPSHOT_JOB_TYPES } from "./jobTypes"; +import { getStorageEnvironment } from "@/shared/storageEnvironment"; +import { interlinearPdfClient } from "@/modules/export/public/InterlinearPdfClient"; +import { detectScript } from "@/shared/scriptDetection"; +import snapshotStorageRepository from "../data-access/SnapshotStorageRepository"; +import { PDFDocument } from "pdf-lib"; +import { Readable } from "stream"; +import type { Logger } from "pino"; + +export interface CreateSnapshotInterlinearPdfPayload { + languageId: string; + languageCode: string; + snapshotId: string; +} + +export type CreateSnapshotInterlinearPdfJob = + Job; + +export async function createSnapshotInterlinearPdfJob( + job: CreateSnapshotInterlinearPdfJob, +): Promise<{ uploaded: boolean; key?: string; books: number; pages: number }> { + const jobLogger = logger.child({ + job: { + id: job.id, + type: job.type, + languageId: job.payload.languageId, + snapshotId: job.payload.snapshotId, + }, + }); + + if (job.type !== SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF) { + jobLogger.error( + `received job type ${job.type}, expected ${SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF}`, + ); + throw new Error( + `Expected job type ${SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF}, but received ${job.type}`, + ); + } + + const environment = getStorageEnvironment(); + const { languageId, languageCode, snapshotId } = job.payload; + const selections = + await interlinearPdfClient.findApprovedGlossChapters(languageId); + + if (selections.length === 0) { + jobLogger.info("No approved glosses found; skipping snapshot PDF export"); + return { uploaded: false, books: 0, pages: 0 }; + } + + const booksById = new Map( + (await interlinearPdfClient.findAllBooks()).map((book) => [ + book.id, + book.name, + ]), + ); + + const partKeys: string[] = []; + const targetKey = snapshotInterlinearPdfKey(languageId, snapshotId); + + try { + for (const { bookId, chapters } of selections) { + if (!chapters.length) continue; + + const bookName = booksById.get(bookId) ?? `Book ${bookId}`; + const chapterData = await interlinearPdfClient.fetchChapters( + bookId, + chapters, + languageCode, + ); + + const sampleText = + chapterData.verses?.[0]?.words?.[0]?.text ?? + chapterData.verses?.[0]?.words?.[0]?.gloss ?? + ""; + const sourceScript = detectScript(sampleText); + + const glossLanguageName = chapterData.language.name; + const sourceLanguageLabel = + sourceScript === "hebrew" ? "Hebrew" + : sourceScript === "greek" ? "Greek" + : "Original"; + const chapterLabel = + chapters.length === 1 ? + `Chapter ${chapters[0]}` + : `Chapters ${chapters[0]}-${chapters[chapters.length - 1]}`; + + const { stream } = interlinearPdfClient.generateInterlinearPdf( + chapterData, + { + layout: "standard", + pageSize: "letter", + direction: chapterData.language.textDirection, + sourceScript, + header: { + title: `${glossLanguageName}/${sourceLanguageLabel} Interlinear`, + subtitle: `${bookName} - ${chapterLabel}`, + }, + footer: { + generatedAt: job.createdAt, + pageOffset: 0, + }, + }, + ); + + const partKey = snapshotInterlinearPartKey( + languageId, + snapshotId, + bookId, + ); + await snapshotStorageRepository.uploadPdf({ + environment, + key: partKey, + stream, + }); + partKeys.push(partKey); + } + + const mergeResult = await mergePdfs({ + environment, + partKeys, + targetKey, + }); + + if (!mergeResult.uploaded) { + return { uploaded: false, books: selections.length, pages: 0 }; + } + + jobLogger.info({ key: targetKey }, "Snapshot interlinear PDF generated"); + return { + uploaded: true, + key: targetKey, + books: selections.length, + pages: mergeResult.pages, + }; + } finally { + if (partKeys.length > 0) { + await cleanupParts(partKeys, environment, jobLogger); + } + } +} + +function snapshotInterlinearPdfKey( + languageId: string, + snapshotId: string, +): string { + return `${languageId}/${snapshotId}/interlinear/standard.pdf`; +} + +function snapshotInterlinearPartKey( + languageId: string, + snapshotId: string, + bookId: number, +): string { + return `${languageId}/${snapshotId}/interlinear/parts/book-${bookId}.pdf`; +} + +async function mergePdfs({ + environment, + partKeys, + targetKey, +}: { + environment: "prod" | "local"; + partKeys: string[]; + targetKey: string; +}): Promise<{ uploaded: boolean; pages: number }> { + const uniquePartKeys = Array.from(new Set(partKeys)); + const mergedPdf = await PDFDocument.create(); + let mergedPages = 0; + + for (const key of uniquePartKeys) { + const bytes = await snapshotStorageRepository.fetchBuffer({ + environment, + key, + }); + if (!bytes || bytes.byteLength === 0) continue; + const partPdf = await PDFDocument.load(bytes); + const copiedPages = await mergedPdf.copyPages( + partPdf, + partPdf.getPageIndices(), + ); + copiedPages.forEach((p) => mergedPdf.addPage(p)); + mergedPages += copiedPages.length; + } + + if (mergedPages === 0) { + return { uploaded: false, pages: 0 }; + } + + const mergedBytes = await mergedPdf.save(); + await snapshotStorageRepository.uploadPdf({ + environment, + key: targetKey, + stream: Readable.from([mergedBytes]), + }); + + return { uploaded: true, pages: mergedPages }; +} + +async function cleanupParts( + partKeys: string[], + environment: "prod" | "local", + jobLogger: Logger, +) { + await Promise.all( + partKeys.map(async (key) => { + try { + await snapshotStorageRepository.deleteObject({ environment, key }); + } catch (error) { + jobLogger.warn( + { err: error, key }, + "Failed to delete snapshot interlinear part after merge", + ); + } + }), + ); +} diff --git a/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.unit.ts b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.unit.ts new file mode 100644 index 00000000..c91d18e3 --- /dev/null +++ b/src/modules/snapshots/jobs/createSnapshotInterlinearPdfJob.unit.ts @@ -0,0 +1,157 @@ +import { Readable } from "stream"; +import { describe, expect, it, beforeEach, vi } from "vitest"; +import { createSnapshotInterlinearPdfJob } from "./createSnapshotInterlinearPdfJob"; +import { JobStatus } from "@/shared/jobs/model"; +import { SNAPSHOT_JOB_TYPES } from "./jobTypes"; +import { PDFDocument } from "pdf-lib"; + +const { + mockFindApprovedGlossChapters, + mockFindAllBooks, + mockFetchChapters, + mockGenerateInterlinearPdf, + mockUploadPdf, + mockFetchBuffer, + mockDeleteObject, +} = vi.hoisted(() => { + return { + mockFindApprovedGlossChapters: vi.fn(), + mockFindAllBooks: vi.fn(), + mockFetchChapters: vi.fn(), + mockGenerateInterlinearPdf: vi.fn(), + mockUploadPdf: vi.fn(), + mockFetchBuffer: vi.fn(), + mockDeleteObject: vi.fn(), + }; +}); + +vi.mock("@/modules/export/public/InterlinearPdfClient", () => ({ + __esModule: true, + interlinearPdfClient: { + findApprovedGlossChapters: mockFindApprovedGlossChapters, + findAllBooks: mockFindAllBooks, + fetchChapters: mockFetchChapters, + generateInterlinearPdf: mockGenerateInterlinearPdf, + }, +})); +vi.mock("../data-access/SnapshotStorageRepository", () => { + const repo = { + uploadPdf: mockUploadPdf, + fetchBuffer: mockFetchBuffer, + deleteObject: mockDeleteObject, + bucketName: vi.fn(), + }; + return { __esModule: true, snapshotStorageRepository: repo, default: repo }; +}); + +async function createPdfWithPages(count: number) { + const pdf = await PDFDocument.create(); + for (let i = 0; i < count; i += 1) { + pdf.addPage(); + } + return Buffer.from(await pdf.save()); +} + +describe("createSnapshotInterlinearPdfJob", () => { + beforeEach(async () => { + mockFindApprovedGlossChapters.mockReset(); + mockFindAllBooks.mockReset(); + mockFetchChapters.mockReset(); + mockGenerateInterlinearPdf.mockReset(); + mockUploadPdf.mockReset(); + mockFetchBuffer.mockReset(); + mockDeleteObject.mockReset(); + + const partBytes = await createPdfWithPages(1); + + mockFindApprovedGlossChapters.mockResolvedValue([ + { bookId: 1, chapters: [1] }, + { bookId: 2, chapters: [1, 2] }, + ]); + mockFindAllBooks.mockResolvedValue([ + { id: 1, name: "Genesis" }, + { id: 2, name: "Exodus" }, + ]); + mockFetchChapters.mockResolvedValue({ + language: { + id: "lang-1", + code: "spa", + name: "Test Language", + textDirection: "ltr", + }, + verses: [ + { + id: "verse-1", + number: 1, + words: [ + { + id: "w1", + text: "λόγος", + gloss: "word", + lemma: "l", + grammar: "g", + }, + ], + }, + ], + }); + mockGenerateInterlinearPdf.mockImplementation(() => ({ + stream: Readable.from([partBytes]), + pageCount: 1, + })); + mockUploadPdf.mockResolvedValue("s3://bucket/key"); + mockFetchBuffer.mockResolvedValue(partBytes); + mockDeleteObject.mockResolvedValue(undefined); + }); + + it("uploads per-book parts, merges them, and cleans up parts", async () => { + const job = { + id: "job-1", + type: SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF, + status: JobStatus.Pending, + payload: { languageId: "lang-1", languageCode: "spa", snapshotId: "s1" }, + createdAt: new Date(), + updatedAt: new Date(), + }; + + const result = await createSnapshotInterlinearPdfJob(job); + + expect(result).toEqual({ + uploaded: true, + key: "lang-1/s1/interlinear/standard.pdf", + books: 2, + pages: 2, + }); + + const uploadedKeys = mockUploadPdf.mock.calls.map((c) => c[0]?.key); + expect(uploadedKeys).toEqual([ + "lang-1/s1/interlinear/parts/book-1.pdf", + "lang-1/s1/interlinear/parts/book-2.pdf", + "lang-1/s1/interlinear/standard.pdf", + ]); + + const deletedKeys = mockDeleteObject.mock.calls.map((c) => c[0]?.key); + expect(deletedKeys).toEqual([ + "lang-1/s1/interlinear/parts/book-1.pdf", + "lang-1/s1/interlinear/parts/book-2.pdf", + ]); + }); + + it("skips export when there are no approved glosses", async () => { + mockFindApprovedGlossChapters.mockResolvedValue([]); + + const job = { + id: "job-2", + type: SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF, + status: JobStatus.Pending, + payload: { languageId: "lang-1", languageCode: "spa", snapshotId: "s1" }, + createdAt: new Date(), + updatedAt: new Date(), + }; + + const result = await createSnapshotInterlinearPdfJob(job); + expect(result).toEqual({ uploaded: false, books: 0, pages: 0 }); + expect(mockUploadPdf).not.toHaveBeenCalled(); + expect(mockDeleteObject).not.toHaveBeenCalled(); + }); +}); diff --git a/src/modules/snapshots/jobs/createSnapshotJob.ts b/src/modules/snapshots/jobs/createSnapshotJob.ts index 5f90e73c..4cafad66 100644 --- a/src/modules/snapshots/jobs/createSnapshotJob.ts +++ b/src/modules/snapshots/jobs/createSnapshotJob.ts @@ -6,6 +6,8 @@ import { Snapshot } from "../model"; import { ulid } from "@/shared/ulid"; import { snapshotRepository } from "../data-access/SnapshotRepository"; import { snapshotObjectRepository } from "../data-access/snapshotObjectRepository"; +import { getStorageEnvironment } from "@/shared/storageEnvironment"; +import { enqueueJob } from "@/shared/jobs/enqueueJob"; export type CreateSnapshotJob = Job<{ languageId: string; @@ -29,7 +31,7 @@ export async function createSnapshotJob(job: CreateSnapshotJob) { ); } - const language = languageQueryService.findById(job.payload.languageId); + const language = await languageQueryService.findById(job.payload.languageId); if (!language) { throw new Error(`Language ${job.payload.languageId} not found`); } @@ -41,10 +43,15 @@ export async function createSnapshotJob(job: CreateSnapshotJob) { }; await snapshotObjectRepository.upload({ - environment: process.env.NODE_ENV === "production" ? "prod" : "local", + environment: getStorageEnvironment(), snapshot, }); await snapshotRepository.create(snapshot); + await enqueueJob(SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF, { + languageId: snapshot.languageId, + languageCode: language.code, + snapshotId: snapshot.id, + }); jobLogger.info(`Created snapshot ${snapshot.id}`); } diff --git a/src/modules/snapshots/jobs/createSnapshotJob.unit.ts b/src/modules/snapshots/jobs/createSnapshotJob.unit.ts new file mode 100644 index 00000000..f0b7239e --- /dev/null +++ b/src/modules/snapshots/jobs/createSnapshotJob.unit.ts @@ -0,0 +1,80 @@ +import { describe, expect, it, vi, beforeEach } from "vitest"; +import { createSnapshotJob } from "./createSnapshotJob"; +import { SNAPSHOT_JOB_TYPES } from "./jobTypes"; +import { JobStatus } from "@/shared/jobs/model"; + +const { + mockFindLanguageById, + mockUploadSnapshot, + mockCreateSnapshot, + mockEnqueueJob, +} = vi.hoisted(() => ({ + mockFindLanguageById: vi.fn(), + mockUploadSnapshot: vi.fn(), + mockCreateSnapshot: vi.fn(), + mockEnqueueJob: vi.fn(), +})); + +vi.mock("@/modules/languages/data-access/LanguageQueryService", () => ({ + __esModule: true, + languageQueryService: { + findById: mockFindLanguageById, + }, +})); +vi.mock("../data-access/snapshotObjectRepository", () => ({ + __esModule: true, + snapshotObjectRepository: { + upload: mockUploadSnapshot, + }, +})); +vi.mock("../data-access/SnapshotRepository", () => ({ + __esModule: true, + snapshotRepository: { + create: mockCreateSnapshot, + }, +})); +vi.mock("@/shared/jobs/enqueueJob", () => ({ + __esModule: true, + enqueueJob: mockEnqueueJob, +})); +vi.mock("@/shared/ulid", () => ({ + ulid: () => "snapshot-1", +})); + +describe("createSnapshotJob", () => { + beforeEach(() => { + mockFindLanguageById.mockReset(); + mockUploadSnapshot.mockReset(); + mockCreateSnapshot.mockReset(); + mockEnqueueJob.mockReset(); + }); + + it("enqueues a follow-up interlinear PDF job after creating a snapshot", async () => { + mockFindLanguageById.mockResolvedValue({ + id: "lang-1", + code: "spa", + name: "Spanish", + }); + mockUploadSnapshot.mockResolvedValue(undefined); + mockCreateSnapshot.mockResolvedValue(undefined); + mockEnqueueJob.mockResolvedValue(undefined); + + await createSnapshotJob({ + id: "job-1", + type: SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT, + status: JobStatus.Pending, + payload: { languageId: "lang-1" }, + createdAt: new Date(), + updatedAt: new Date(), + }); + + expect(mockEnqueueJob).toHaveBeenCalledWith( + SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF, + { + languageId: "lang-1", + languageCode: "spa", + snapshotId: "snapshot-1", + }, + ); + }); +}); diff --git a/src/modules/snapshots/jobs/jobTypes.ts b/src/modules/snapshots/jobs/jobTypes.ts index 8fad90da..af9f783a 100644 --- a/src/modules/snapshots/jobs/jobTypes.ts +++ b/src/modules/snapshots/jobs/jobTypes.ts @@ -1,4 +1,5 @@ export const SNAPSHOT_JOB_TYPES = { CREATE_SNAPSHOT: "create_snapshot", + CREATE_SNAPSHOT_INTERLINEAR_PDF: "create_snapshot_interlinear_pdf", RESTORE_SNAPSHOT: "restore_snapshot", }; diff --git a/src/modules/snapshots/jobs/restoreSnapshotJob.ts b/src/modules/snapshots/jobs/restoreSnapshotJob.ts index 2ef2ea98..1f58b6bd 100644 --- a/src/modules/snapshots/jobs/restoreSnapshotJob.ts +++ b/src/modules/snapshots/jobs/restoreSnapshotJob.ts @@ -3,6 +3,7 @@ import { Job } from "@/shared/jobs/model"; import { SNAPSHOT_JOB_TYPES } from "./jobTypes"; import { snapshotObjectRepository } from "../data-access/snapshotObjectRepository"; import { snapshotQueryService } from "../data-access/snapshotQueryService"; +import { getStorageEnvironment } from "@/shared/storageEnvironment"; export type RestoreSnapshotJob = Job<{ snapshotId: string; @@ -35,7 +36,7 @@ export async function restoreSnapshotJob(job: RestoreSnapshotJob) { await new Promise((resolve) => setTimeout(resolve, 3000)); await snapshotObjectRepository.restore({ - environment: process.env.NODE_ENV === "production" ? "prod" : "local", + environment: getStorageEnvironment(), snapshot, }); diff --git a/src/shared/jobs/jobMap.ts b/src/shared/jobs/jobMap.ts index ad4f99f2..736cf887 100644 --- a/src/shared/jobs/jobMap.ts +++ b/src/shared/jobs/jobMap.ts @@ -5,6 +5,10 @@ import { REPORTING_JOB_TYPES } from "@/modules/reporting/jobs/jobTypes"; import { SNAPSHOT_JOB_TYPES } from "@/modules/snapshots/jobs/jobTypes"; import { createSnapshotJob } from "@/modules/snapshots/jobs/createSnapshotJob"; import { restoreSnapshotJob } from "@/modules/snapshots/jobs/restoreSnapshotJob"; +import { createSnapshotInterlinearPdfJob } from "@/modules/snapshots/jobs/createSnapshotInterlinearPdfJob"; +import { EXPORT_JOB_TYPES } from "@/modules/export/jobs/jobTypes"; +import exportInterlinearPdfJob from "@/modules/export/jobs/exportInterlinearPdfJob"; +import cleanupExportsJob from "@/modules/export/jobs/cleanupExportsJob"; export type JobHandler = ( job: Job, @@ -26,14 +30,26 @@ const jobMap: Record> = { handler: exportAnalyticsJob, timeout: 60 * 5, // 5 minutes }, + [EXPORT_JOB_TYPES.EXPORT_INTERLINEAR_PDF]: { + handler: exportInterlinearPdfJob, + timeout: 60 * 15, // 15 minutes + }, [SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT]: { handler: createSnapshotJob, timeout: 60 * 15, // 15 minutes }, + [SNAPSHOT_JOB_TYPES.CREATE_SNAPSHOT_INTERLINEAR_PDF]: { + handler: createSnapshotInterlinearPdfJob, + timeout: 60 * 15, // 15 minutes + }, [SNAPSHOT_JOB_TYPES.RESTORE_SNAPSHOT]: { handler: restoreSnapshotJob, timeout: 60 * 15, // 15 minutes }, + [EXPORT_JOB_TYPES.CLEANUP_EXPORTS]: { + handler: cleanupExportsJob, + timeout: 60 * 5, + }, }; export default jobMap; diff --git a/src/shared/jobs/queue.ts b/src/shared/jobs/queue.ts index 0cc8398e..7d6479d9 100644 --- a/src/shared/jobs/queue.ts +++ b/src/shared/jobs/queue.ts @@ -50,11 +50,21 @@ export class SQSQueue implements Queue { } export class LocalQueue implements Queue { - constructor(private readonly functionUrl: string) {} + constructor(private readonly functionUrl?: string) {} async add(job: QueuedJob) { - // Queues are fire and forget so we don't await it's return here - fetch(this.functionUrl, { + const targetUrl = + this.functionUrl && this.functionUrl.trim().length > 0 ? + this.functionUrl + : "http://localhost:8080/2015-03-31/functions/function/invocations"; + + if (!targetUrl) { + console.error("Failed to execute job: JOB_FUNCTION_URL is not set"); + return; + } + + // Queues are fire and forget so we don't await its return here + fetch(targetUrl, { method: "post", body: JSON.stringify({ Records: [{ body: JSON.stringify(job) }] }), }).catch((error) => { @@ -77,4 +87,4 @@ const sqsCredentials = export default process.env.NODE_ENV === "production" ? new SQSQueue(process.env.JOB_QUEUE_URL ?? "", sqsCredentials) -: new LocalQueue(process.env.JOB_FUNCTION_URL ?? ""); +: new LocalQueue(process.env.JOB_FUNCTION_URL); diff --git a/src/shared/s3.ts b/src/shared/s3.ts new file mode 100644 index 00000000..f65b8abd --- /dev/null +++ b/src/shared/s3.ts @@ -0,0 +1,102 @@ +import { S3Client } from "@aws-sdk/client-s3"; +import { Readable } from "stream"; + +let cachedS3Client: S3Client | undefined; + +export function getS3Client(): S3Client { + if (cachedS3Client) return cachedS3Client; + + cachedS3Client = new S3Client({ + region: process.env.AWS_REGION ?? "us-east-1", + endpoint: process.env.AWS_ENDPOINT_URL_S3, + forcePathStyle: process.env.AWS_S3_FORCE_PATH_STYLE === "true", + }); + return cachedS3Client; +} + +export function mergeUint8Arrays(parts: Uint8Array[]): Uint8Array { + const total = parts.reduce((sum, part) => sum + part.byteLength, 0); + const merged = new Uint8Array(total); + let offset = 0; + for (const part of parts) { + merged.set(part, offset); + offset += part.byteLength; + } + return merged; +} + +async function readableToUint8Array(stream: Readable): Promise { + const parts: Uint8Array[] = []; + for await (const chunk of stream) { + if (typeof chunk === "string") { + parts.push(Uint8Array.from(Buffer.from(chunk))); + } else if (chunk instanceof Uint8Array) { + parts.push(chunk); + } else { + parts.push(Uint8Array.from(chunk)); + } + } + return mergeUint8Arrays(parts); +} + +async function webStreamToUint8Array( + stream: ReadableStream, +): Promise { + const reader = stream.getReader(); + const parts: Uint8Array[] = []; + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (value) parts.push(value); + } + return mergeUint8Arrays(parts); +} + +export async function s3BodyToUint8Array( + body: unknown, +): Promise { + if (!body) return undefined; + if (body instanceof Uint8Array) return body; + if (body instanceof Readable) return readableToUint8Array(body); + + if (typeof Blob !== "undefined" && body instanceof Blob) { + return new Uint8Array(await body.arrayBuffer()); + } + + if (typeof (body as ReadableStream).getReader === "function") { + return webStreamToUint8Array(body as ReadableStream); + } + + return undefined; +} + +export async function s3BodyToReadable( + body: unknown, +): Promise { + if (!body) return undefined; + if (body instanceof Readable) return body; + + if (body instanceof Uint8Array) { + return Readable.from([body]); + } + + if (typeof Blob !== "undefined" && body instanceof Blob) { + const bytes = new Uint8Array(await body.arrayBuffer()); + return Readable.from([bytes]); + } + + if (typeof (body as ReadableStream).getReader === "function") { + const webStream = body as ReadableStream; + const maybeFromWeb = ( + Readable as unknown as { fromWeb?: (s: any) => Readable } + ).fromWeb; + if (typeof maybeFromWeb === "function") { + return maybeFromWeb(webStream); + } + const bytes = await webStreamToUint8Array(webStream); + return Readable.from([bytes]); + } + + return undefined; +} diff --git a/src/shared/scriptDetection.ts b/src/shared/scriptDetection.ts new file mode 100644 index 00000000..08999edb --- /dev/null +++ b/src/shared/scriptDetection.ts @@ -0,0 +1,11 @@ +export type Script = "hebrew" | "greek"; + +// Simple heuristic: Hebrew block vs Greek block +export function detectScript(text: string): Script { + const hebrewRegex = /[\u0590-\u05FF]/; + const greekRegex = /[\u0370-\u03FF]/; + if (hebrewRegex.test(text)) return "hebrew"; + if (greekRegex.test(text)) return "greek"; + // Default to Hebrew if ambiguous; adjust as needed + return "hebrew"; +} diff --git a/src/shared/storageEnvironment.ts b/src/shared/storageEnvironment.ts new file mode 100644 index 00000000..87590364 --- /dev/null +++ b/src/shared/storageEnvironment.ts @@ -0,0 +1,5 @@ +export type StorageEnvironment = "prod" | "local"; + +export function getStorageEnvironment(): StorageEnvironment { + return process.env.NODE_ENV === "production" ? "prod" : "local"; +} diff --git a/tests/vitest/testSetup.ts b/tests/vitest/testSetup.ts index 418fa1cd..c5dd8665 100644 --- a/tests/vitest/testSetup.ts +++ b/tests/vitest/testSetup.ts @@ -1,10 +1,32 @@ import { webcrypto } from "node:crypto"; +import { vi } from "vitest"; import "./matchers"; +vi.mock("react", async () => { + const actual = await vi.importActual("react"); + const fallbackCache = + // basic memoizer for tests; avoids relying on Next's experimental cache + actual.cache ?? + ((fn: (...args: Args) => Return) => { + const map = new Map(); + return (...args: Args) => { + const key = JSON.stringify(args); + if (!map.has(key)) { + map.set(key, fn(...args)); + } + return map.get(key)!; + }; + }); + // Ensure classic runtime users can access global React + (globalThis as any).React = actual; + return { ...actual, cache: fallbackCache, default: actual as any }; +}); + // Necessary for @oslo/password to run in tests // We can remove this after we upgrade from node 18 -// @ts-ignore -globalThis.crypto = webcrypto; +if (typeof globalThis.crypto === "undefined") { + Object.defineProperty(globalThis, "crypto", { value: webcrypto }); +} process.env.ORIGIN = "globalbibletools.com"; process.env.LOG_LEVEL = "silent"; diff --git a/vitest.config.mts b/vitest.config.mts index 3678fda9..377e2a77 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -5,13 +5,17 @@ export default defineConfig({ plugins: [tsconfigPaths()], test: { include: ["**/*.{unit,test}.ts?(x)"], + exclude: [ + "**/node_modules/**", + "**/dist/**", + "**/*.localstack.test.ts?(x)", + ], globalSetup: ["./tests/vitest/dbSetup.ts"], setupFiles: ["./tests/vitest/testSetup.ts"], mockReset: true, - }, - resolve: { - alias: { - react: "next/dist/compiled/react/cjs/react.development.js", - }, + environmentMatchGlobs: [ + ["**/*.client.test.ts?(x)", "jsdom"], + ["**/*.react.test.ts?(x)", "jsdom"], + ], }, }); diff --git a/vitest.localstack.config.mts b/vitest.localstack.config.mts new file mode 100644 index 00000000..4bcdeecd --- /dev/null +++ b/vitest.localstack.config.mts @@ -0,0 +1,16 @@ +import { defineConfig } from "vitest/config"; +import tsconfigPaths from "vite-tsconfig-paths"; + +export default defineConfig({ + plugins: [tsconfigPaths()], + test: { + include: ["**/*.localstack.test.ts?(x)"], + globalSetup: ["./tests/vitest/dbSetup.ts"], + setupFiles: ["./tests/vitest/testSetup.ts"], + mockReset: true, + environmentMatchGlobs: [ + ["**/*.client.test.ts?(x)", "jsdom"], + ["**/*.react.test.ts?(x)", "jsdom"], + ], + }, +});