diff --git a/db/tables/applications.ts b/db/tables/applications.ts index f8a39dc..411cf57 100644 --- a/db/tables/applications.ts +++ b/db/tables/applications.ts @@ -53,6 +53,7 @@ export const applicationsRelations = relations( references: [assistantApplicationsTable.id], }), teamApplication: many(teamApplicationsTable), + //interview: many(interviewsTable), }), ); diff --git a/db/tables/interview-schemas.ts b/db/tables/interview-schemas.ts index 9e97344..a22c0bf 100644 --- a/db/tables/interview-schemas.ts +++ b/db/tables/interview-schemas.ts @@ -1,11 +1,12 @@ import { interviewsTable } from "@/db/tables/interviews"; import { mainSchema } from "@/db/tables/schema"; +import type { AnySchema } from "ajv"; import { relations } from "drizzle-orm"; import { json, serial } from "drizzle-orm/pg-core"; export const interviewSchemasTable = mainSchema.table("interviewSchemas", { id: serial("id").primaryKey(), - jsonSchema: json("jsonSchema").notNull(), // used to validate corresponding interviews interviewAnswers + jsonSchema: json("jsonSchema").$type().notNull(), // used to validate corresponding interviews interviewAnswers }); export const interviewScemasRelations = relations( diff --git a/db/tables/interviews.ts b/db/tables/interviews.ts index 3cdc0ab..f9dfc0a 100644 --- a/db/tables/interviews.ts +++ b/db/tables/interviews.ts @@ -2,6 +2,7 @@ import { assistantApplicationsTable } from "@/db/tables/applications"; import { interviewSchemasTable } from "@/db/tables/interview-schemas"; import { mainSchema } from "@/db/tables/schema"; import { teamUsersTable } from "@/db/tables/users"; +import type { Json } from "@/lib/json-schema"; import { relations } from "drizzle-orm"; import { primaryKey } from "drizzle-orm/pg-core"; import { boolean, integer, json, serial, timestamp } from "drizzle-orm/pg-core"; @@ -14,7 +15,7 @@ export const interviewsTable = mainSchema.table("interviews", { interviewSchemaId: integer("interviewSchemaId") .notNull() .references(() => interviewSchemasTable.id), - interviewAnswers: json("interviewAnswers"), + interviewAnswers: json("interviewAnswers").$type(), isCancelled: boolean("isCancelled").notNull(), plannedTime: timestamp("plannedTime").notNull(), finishedTime: timestamp("timeFinished"), diff --git a/lib/json-schema.ts b/lib/json-schema.ts new file mode 100644 index 0000000..253a616 --- /dev/null +++ b/lib/json-schema.ts @@ -0,0 +1,56 @@ +import Ajv, { type ErrorObject, type AnySchema } from "ajv"; +import z from "zod"; + +type JsonSchemaResult = + | { + success: true; + } + | { + success: false; + error: ErrorObject[]; + }; + +const ajv = new Ajv(); + +export function validateJsonSchema( + schema: AnySchema, + data: unknown, +): JsonSchemaResult { + const validator = ajv.compile(schema); + const isValid = validator(data); + if (isValid) { + return { success: true }; + } + return { + success: false, + error: + validator.errors === undefined || validator.errors === null + ? [] + : validator.errors, + }; +} + +export function turnJsonIntoZodSchema(schema: AnySchema) { + return z + .object({}) + .passthrough() + .superRefine((data, ctx) => { + const validationResult = validateJsonSchema(schema, data); + if (!validationResult.success) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "The interview schema is not valid", + params: validationResult.error, + }); + } + + return validationResult.success; + }); +} + +// from: https://www.reddit.com/r/typescript/comments/13mssvc/types_for_json_and_writing_json +type JsonPrimative = string | number | boolean | null; +type JsonArray = Json[]; +type JsonObject = { [key: string]: Json }; +type JsonComposite = JsonArray | JsonObject; +export type Json = JsonPrimative | JsonComposite; diff --git a/lib/time-parsers.ts b/lib/time-parsers.ts index 5858adc..6f9751b 100644 --- a/lib/time-parsers.ts +++ b/lib/time-parsers.ts @@ -1,7 +1,12 @@ import { z } from "zod"; -export const timeStringParser = z.union([z.string().date(), z.string().time()]); +export const timeStringParser = z.union([ + z.string().date(), + z.string().time(), + z.string().datetime(), +]); +// Date here refers to the JS object date, so it allows more specific times than dates export const dateParser = z.date(); export const toDateParser = z .union([timeStringParser, z.date()]) @@ -24,7 +29,7 @@ export const toDatePeriodParser = z }) .pipe(datePeriodParser); -export const pastDateParser = z.date().max(new Date()); -export const futureDateParser = z.date().min(new Date()); +export const pastDateParser = dateParser.max(new Date()); +export const futureDateParser = dateParser.min(new Date()); export type DatePeriod = z.infer; diff --git a/package.json b/package.json index f5249fe..efd64d6 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ }, "license": "ISC", "dependencies": { + "ajv": "^8.17.1", "cors": "^2.8.5", "dotenv": "^16.4.7", "drizzle-orm": "^0.33.0", @@ -52,5 +53,5 @@ "typescript": "^5.8.2", "yaml": "^2.7.1" }, - "packageManager": "pnpm@10.15.0+sha512.486ebc259d3e999a4e8691ce03b5cac4a71cbeca39372a9b762cb500cfdf0873e2cb16abe3d951b1ee2cf012503f027b98b6584e4df22524e0c7450d9ec7aa7b" + "packageManager": "pnpm@10.18.3+sha512.bbd16e6d7286fd7e01f6b3c0b3c932cda2965c06a908328f74663f10a9aea51f1129eea615134bf992831b009eabe167ecb7008b597f40ff9bc75946aadfb08d" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cce3045..f26c070 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: dependencies: + ajv: + specifier: ^8.17.1 + version: 8.17.1 cors: specifier: ^2.8.5 version: 2.8.5 @@ -671,6 +674,9 @@ packages: resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} engines: {node: '>= 0.6'} + ajv@8.17.1: + resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} @@ -977,6 +983,9 @@ packages: resolution: {integrity: sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==} engines: {node: '>= 18'} + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + fast-glob@3.3.3: resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} @@ -984,6 +993,9 @@ packages: fast-safe-stringify@2.1.1: resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} @@ -1110,6 +1122,9 @@ packages: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} hasBin: true + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + lodash.get@4.4.2: resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} deprecated: This package is deprecated. Use the optional chaining (?.) operator instead. @@ -1336,6 +1351,10 @@ packages: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + resolve-pkg-maps@1.0.0: resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} @@ -1874,6 +1893,13 @@ snapshots: mime-types: 3.0.1 negotiator: 1.0.0 + ajv@8.17.1: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + anymatch@3.1.3: dependencies: normalize-path: 3.0.0 @@ -2174,6 +2200,8 @@ snapshots: transitivePeerDependencies: - supports-color + fast-deep-equal@3.1.3: {} + fast-glob@3.3.3: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -2184,6 +2212,8 @@ snapshots: fast-safe-stringify@2.1.1: {} + fast-uri@3.1.0: {} + fastq@1.19.1: dependencies: reusify: 1.1.0 @@ -2326,6 +2356,8 @@ snapshots: dependencies: argparse: 2.0.1 + json-schema-traverse@1.0.0: {} + lodash.get@4.4.2: {} lodash.isequal@4.5.0: {} @@ -2500,6 +2532,8 @@ snapshots: dependencies: picomatch: 2.3.1 + require-from-string@2.0.2: {} + resolve-pkg-maps@1.0.0: {} reusify@1.1.0: {} diff --git a/src/db-access/interviews.ts b/src/db-access/interviews.ts new file mode 100644 index 0000000..ff54c7c --- /dev/null +++ b/src/db-access/interviews.ts @@ -0,0 +1,74 @@ +import { database } from "@/db/setup/query-postgres"; +import { interviewSchemasTable } from "@/db/tables/interview-schemas"; +import { interviewsTable } from "@/db/tables/interviews"; +import type { + NewInterview, + NewInterviewSchema, +} from "@/src/request-handling/interviews"; +import type { + InterviewSchema, + InterviewSchemaKey, +} from "@/src/response-handling/interviews"; +import { inArray } from "drizzle-orm"; +import { type OrmResult, ormError } from "../error/orm-error"; +import type { QueryParameters } from "../request-handling/common"; +import { newDatabaseTransaction } from "./common"; + +export async function selectInterviewSchemaWithId( + id: InterviewSchemaKey[], +): Promise> { + return await newDatabaseTransaction(database, async (tx) => { + const result = await tx + .select() + .from(interviewSchemasTable) + .where(inArray(interviewSchemasTable.id, id)); + if (result.length !== id.length) { + throw ormError("Couln't find all entries"); + } + + return result; + }); +} + +export async function selectInterviewSchemas(listQueries: QueryParameters) { + return await newDatabaseTransaction(database, async (tx) => { + const result = await tx + .select() + .from(interviewSchemasTable) + .limit(listQueries.limit) + .offset(listQueries.offset); + + return result; + }); +} + +export async function insertInterviewSchema( + interviewSchemaRequests: NewInterviewSchema[], +): Promise> { + return await newDatabaseTransaction(database, async (tx) => { + const result = await tx + .insert(interviewSchemasTable) + .values(interviewSchemaRequests) + .returning(); + + if (result.length !== interviewSchemaRequests.length) { + throw ormError("Failed to insert all entries"); + } + + return result; + }); +} + +export async function insertInterview(interviewRequests: NewInterview[]) { + return await newDatabaseTransaction(database, async (tx) => { + const result = await tx + .insert(interviewsTable) + .values(interviewRequests) + .returning(); + + if (result.length !== interviewRequests.length) { + throw ormError("Failed to insert all entries"); + } + return result; + }); +} diff --git a/src/request-handling/interviews.ts b/src/request-handling/interviews.ts new file mode 100644 index 0000000..55cff8d --- /dev/null +++ b/src/request-handling/interviews.ts @@ -0,0 +1,41 @@ +import { interviewsTable } from "@/db/tables/interviews"; +import { turnJsonIntoZodSchema } from "@/lib/json-schema"; +import { + futureDateParser, + timeStringParser, + toDateParser, +} from "@/lib/time-parsers"; +import { serialIdParser } from "@/src/request-handling/common"; +import type { AnySchema } from "ajv"; +import metaSchema from "ajv/dist/refs/json-schema-draft-07.json"; +import { createInsertSchema } from "drizzle-zod"; +import { z } from "zod"; + +export const newInterviewSchemaSchema = z.object({ + jsonSchema: turnJsonIntoZodSchema(metaSchema).transform((v) => v), // If the object passed this json parser we know it is a validJsonSchema +}); + +export const newInterviewSchema = z.object({ + applicationId: serialIdParser, + interviewSchemaId: serialIdParser, + interviewAnswers: z.object({}).passthrough(), // This will be further checked after schema is gotten from database + isCancelled: z.boolean(), + plannedTime: timeStringParser, +}); + +export const newInterviewToInsertSchema = newInterviewSchema + .extend({ + plannedTime: newInterviewSchema.shape.plannedTime + .pipe(toDateParser) + .pipe(futureDateParser), + }) + .pipe(createInsertSchema(interviewsTable)); + +export const newInterviewSchemaToInsertSchema = newInterviewSchemaSchema.extend( + {}, +); // because of the way drizzle-zod works this pipe does wrong type inference and shouldn't be used + +export type NewInterview = z.infer; +export type NewInterviewSchema = z.infer< + typeof newInterviewSchemaToInsertSchema +>; diff --git a/src/response-handling/interviews.ts b/src/response-handling/interviews.ts new file mode 100644 index 0000000..358ea4b --- /dev/null +++ b/src/response-handling/interviews.ts @@ -0,0 +1,10 @@ +import { interviewSchemasTable } from "@/db/tables/interview-schemas"; +import { createSelectSchema } from "drizzle-zod"; +import type { z } from "zod"; + +const interviewSchemaSchema = createSelectSchema(interviewSchemasTable) + .strict() + .readonly(); + +export type InterviewSchema = z.infer; +export type InterviewSchemaKey = InterviewSchema["id"]; diff --git a/src/routers/interviews.ts b/src/routers/interviews.ts new file mode 100644 index 0000000..e6a6b2c --- /dev/null +++ b/src/routers/interviews.ts @@ -0,0 +1,113 @@ +import { validateJsonSchema } from "@/lib/json-schema"; +import type { JSONSchemaType } from "ajv"; +import { Router } from "express"; +import { + insertInterview, + insertInterviewSchema, + selectInterviewSchemaWithId, + selectInterviewSchemas, +} from "../db-access/interviews"; +import { clientError, serverError } from "../error/http-errors"; +import { + toListQueryParser, + toSerialIdParser, +} from "../request-handling/common"; +import { + newInterviewSchemaToInsertSchema, + newInterviewToInsertSchema, +} from "../request-handling/interviews"; + +const interviewsRouter = Router(); + +interviewsRouter.post("/", async (req, res, next) => { + const bodyResult = newInterviewToInsertSchema.safeParse(req.body); + if (!bodyResult.success) { + next(clientError(400, "Invalid input data", bodyResult.error)); + return; + } + const body = bodyResult.data; + if (body.interviewAnswers !== undefined) { + const interviewSchemaResult = await selectInterviewSchemaWithId([ + body.interviewSchemaId, + ]); + + if (!interviewSchemaResult.success) { + next(clientError(404, "Resource not available", bodyResult.error)); + return; + } + + // We assume that jsonschemas already in the database are valid. + const interviewJsonSchema = interviewSchemaResult.data[0] + .jsonSchema as JSONSchemaType; + + const jsonSchemaValidationResult = validateJsonSchema( + interviewJsonSchema, + body.interviewAnswers, + ); + + if (!jsonSchemaValidationResult.success) { + next( + clientError( + 422, + "Invalid request format", + jsonSchemaValidationResult.error, + ), + ); + return; + } + } + + const databaseResult = await insertInterview([body]); + if (!databaseResult.success) { + next(clientError(400, "Database error", databaseResult.error)); + return; + } + + res.json(databaseResult.data); +}); + +interviewsRouter.post("/schema", async (req, res, next) => { + const bodyResult = newInterviewSchemaToInsertSchema.safeParse(req.body); + if (!bodyResult.success) { + next(clientError(400, "Invalid input data", bodyResult.error)); + return; + } + const body = bodyResult.data; + const databaseResult = await insertInterviewSchema([body]); + + if (!databaseResult.success) { + next(clientError(400, "Database error", databaseResult.error)); + return; + } + res.json(databaseResult.data); +}); + +interviewsRouter.get("/schema/:id", async (req, res, next) => { + const idParameterResult = toSerialIdParser.safeParse(req.params.id); + if (!idParameterResult.success) { + next(clientError(400, "Invalid input data", idParameterResult.error)); + return; + } + const schemaResult = await selectInterviewSchemaWithId([ + idParameterResult.data, + ]); + if (!schemaResult.success) { + next(clientError(400, "Database error", schemaResult.error)); + return; + } + res.json(schemaResult.data); +}); + +interviewsRouter.get("/schema", async (req, res, next) => { + const queryResult = toListQueryParser.safeParse(req.query); + if (!queryResult.success) { + next(clientError(400, "Invalid input data", queryResult.error)); + return; + } + const dbResult = await selectInterviewSchemas(queryResult.data); + if (!dbResult.success) { + next(serverError(500, "Data processing error", dbResult.error)); + return; + } + res.json(dbResult.data); +});