Welcome to MCC Gadgets!
Tools for the Minecraft Commands community
diff --git a/src/components/java/patch-notes.tsx b/src/components/java/patch-notes.tsx
index db7c9c0..3aabe3a 100644
--- a/src/components/java/patch-notes.tsx
+++ b/src/components/java/patch-notes.tsx
@@ -10,7 +10,7 @@ import parseHtml, {
import Image from "next/image";
import { createElement, Suspense, type JSX } from "react";
import sanitizeHtml from "sanitize-html";
-import { fromError, isZodErrorLike } from "zod-validation-error";
+import { fromError } from "zod-validation-error";
import { PublishDate } from "~/components/java/publish-date";
import { Alert, AlertDescription, AlertTitle } from "~/components/ui/alert";
import { textContent } from "~/lib/element";
@@ -63,7 +63,7 @@ async function PatchNotesImpl({
if (!maybePatchNotes.success) {
let msg: string;
- if (isZodErrorLike(maybePatchNotes.error)) {
+ if (maybePatchNotes.error instanceof Error) {
msg = fromError(maybePatchNotes.error).toString();
} else {
msg = maybePatchNotes.error;
diff --git a/src/env.js b/src/env.js
index a06f9b9..d11757c 100644
--- a/src/env.js
+++ b/src/env.js
@@ -1,5 +1,18 @@
import { createEnv } from "@t3-oss/env-nextjs";
import { z } from "zod";
+/**
+ * @import { type ZodType } from "zod"
+ */
+
+/**
+ * @template {ZodType} T
+ * @param {T} validator
+ */
+function optionalInDev(validator) {
+ return process.env.NODE_ENV === "production"
+ ? validator
+ : validator.optional();
+}
export const env = createEnv({
/**
@@ -7,15 +20,13 @@ export const env = createEnv({
* isn't built with invalid env vars.
*/
server: {
- POSTGRES_PRISMA_URL: z.string().url(),
- POSTGRES_URL_NON_POOLING: z.string().url(),
+ DATABASE_URL: z.string().url(),
+ TURSO_AUTH_TOKEN: optionalInDev(z.string()),
+ TURSO_ORG_SLUG: optionalInDev(z.string()),
NODE_ENV: z
.enum(["development", "test", "production"])
.default("development"),
- NEXTAUTH_SECRET:
- process.env.NODE_ENV === "production"
- ? z.string()
- : z.string().optional(),
+ NEXTAUTH_SECRET: optionalInDev(z.string()),
NEXTAUTH_URL: z.preprocess(
// This makes Vercel deployments not fail if you don't set NEXTAUTH_URL
// Since NextAuth.js automatically uses the VERCEL_URL if present.
@@ -23,9 +34,10 @@ export const env = createEnv({
// VERCEL_URL doesn't include `https` so it cant be validated as a URL
process.env.VERCEL ? z.string() : z.string().url()
),
- DISCORD_CLIENT_ID: z.string(),
- DISCORD_CLIENT_SECRET: z.string(),
+ DISCORD_CLIENT_ID: optionalInDev(z.string()),
+ DISCORD_CLIENT_SECRET: optionalInDev(z.string()),
VERCEL_GIT_COMMIT_REF: z.string().optional(),
+ VERCEL_GIT_PULL_REQUEST_ID: z.number().int().optional(),
},
/**
@@ -42,14 +54,16 @@ export const env = createEnv({
* middlewares) or client-side so we need to destruct manually.
*/
runtimeEnv: {
- POSTGRES_PRISMA_URL: process.env.POSTGRES_PRISMA_URL,
- POSTGRES_URL_NON_POOLING: process.env.POSTGRES_URL_NON_POOLING,
+ DATABASE_URL: process.env.DATABASE_URL,
+ TURSO_AUTH_TOKEN: process.env.TURSO_AUTH_TOKEN,
+ TURSO_ORG_SLUG: process.env.TURSO_ORG_SLUG,
NODE_ENV: process.env.NODE_ENV,
NEXTAUTH_SECRET: process.env.NEXTAUTH_SECRET,
NEXTAUTH_URL: process.env.NEXTAUTH_URL,
DISCORD_CLIENT_ID: process.env.DISCORD_CLIENT_ID,
DISCORD_CLIENT_SECRET: process.env.DISCORD_CLIENT_SECRET,
VERCEL_GIT_COMMIT_REF: process.env.VERCEL_GIT_COMMIT_REF,
+ VERCEL_GIT_PULL_REQUEST_ID: process.env.VERCEL_GIT_PULL_REQUEST_ID,
},
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially
diff --git a/src/lib/fetch.ts b/src/lib/fetch.ts
index 7f26e2d..e137b03 100644
--- a/src/lib/fetch.ts
+++ b/src/lib/fetch.ts
@@ -1,6 +1,14 @@
import { unstable_cache } from "next/cache";
import SuperJSON from "superjson";
-import { ZodError } from "zod";
+import { ZodError, type z, type ZodType } from "zod";
+import {
+ err,
+ type Err,
+ type Ok,
+ type ParseResult,
+ type Result,
+} from "./result";
+import { fromError } from "zod-validation-error";
SuperJSON.registerClass(ZodError);
@@ -27,3 +35,76 @@ export function cache Promise>(
return deserialized;
}
+
+async function parseJson(res: Response, schema: T) {
+ try {
+ const body = (await res.json()) as unknown;
+ return await schema.safeParseAsync(body);
+ } catch (error) {
+ return err(error as TypeError);
+ }
+}
+
+export type FetchAndParseResult<
+ TSchema extends ZodType,
+ ESchema extends ZodType = never,
+> = ParseResult<
+ TSchema,
+ TypeError | ([ESchema] extends [never] ? string : ParseResult)
+>;
+
+export function fetchAndParseErrToString<
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ TErr extends Err>,
+>(
+ result: TErr,
+ ...[errResultToString]: TErr extends Err
+ ? TOuter extends Ok
+ ? [(value: TInner) => string]
+ : never
+ : []
+) {
+ const e = result.error;
+ if (typeof e === "string") {
+ return e;
+ } else if (e instanceof Error) {
+ return fromError(e).toString();
+ } else if (e.success) {
+ return errResultToString(e.data);
+ } else {
+ return fromError(e.error).toString();
+ }
+}
+
+export async function fetchAndParse(
+ url: string,
+ schema: TSchema,
+ options?: RequestInit,
+): Promise>;
+export async function fetchAndParse<
+ TSchema extends ZodType,
+ ESchema extends ZodType,
+>(
+ url: string,
+ schema: TSchema,
+ options: RequestInit,
+ errorSchema: ESchema,
+): Promise>;
+export async function fetchAndParse<
+ TSchema extends ZodType,
+ ESchema extends ZodType = never,
+>(
+ url: string,
+ schema: TSchema,
+ options: RequestInit = {},
+ errorSchema?: ESchema,
+): Promise | TypeError | string>> {
+ const res = await fetch(url, options);
+ if (res.ok) {
+ return await parseJson(res, schema);
+ } else if (errorSchema) {
+ return await parseJson(res, errorSchema);
+ } else {
+ return err(res.statusText);
+ }
+}
diff --git a/src/lib/result.ts b/src/lib/result.ts
new file mode 100644
index 0000000..726adb1
--- /dev/null
+++ b/src/lib/result.ts
@@ -0,0 +1,40 @@
+import type { z, ZodError, ZodType } from "zod";
+
+export type Ok = {
+ success: true;
+ data: T;
+ error?: never;
+};
+
+export function ok(data: T): Ok {
+ return {
+ success: true,
+ data,
+ };
+}
+
+export type Err = {
+ success: false;
+ data?: never;
+ error: T;
+};
+
+export function err(error: T): Err {
+ return {
+ success: false,
+ error,
+ };
+}
+
+/**
+ * A general success/failure that is structurally compatible with the result of Zod's safeParse
+ */
+export type Result = Ok | Err;
+
+export type ParseResult = Result<
+ z.infer,
+ ZodError> | TErr
+>;
+
+export type ErrType = T extends Err ? E : never;
+export type OkType = T extends Ok ? V : never;
diff --git a/src/lib/utils.ts b/src/lib/utils.ts
index be4904c..6d45093 100644
--- a/src/lib/utils.ts
+++ b/src/lib/utils.ts
@@ -20,3 +20,6 @@ export function capitalise(str: string): string {
export type Modify = Omit & R;
export type WithCn = Modify;
+
+// eslint-disable-next-line @typescript-eslint/ban-types
+export type FuzzyAutocomplete = T | (string & {});
diff --git a/src/server/api/routers/post.ts b/src/server/api/routers/post.ts
index 3994691..7a296c9 100644
--- a/src/server/api/routers/post.ts
+++ b/src/server/api/routers/post.ts
@@ -1,42 +1,39 @@
-import { z } from "zod";
+// import { z } from "zod";
-import {
- createTRPCRouter,
- protectedProcedure,
- publicProcedure,
-} from "~/server/api/trpc";
+// import {
+// createTRPCRouter,
+// protectedProcedure,
+// publicProcedure,
+// } from "~/server/api/trpc";
+// import { posts } from "~/server/db/schema";
-export const postRouter = createTRPCRouter({
- hello: publicProcedure
- .input(z.object({ text: z.string() }))
- .query(({ input }) => {
- return {
- greeting: `Hello ${input.text}`,
- };
- }),
+// export const postRouter = createTRPCRouter({
+// hello: publicProcedure
+// .input(z.object({ text: z.string() }))
+// .query(({ input }) => {
+// return {
+// greeting: `Hello ${input.text}`,
+// };
+// }),
- create: protectedProcedure
- .input(z.object({ name: z.string().min(1) }))
- .mutation(async ({ ctx, input }) => {
- // simulate a slow db call
- await new Promise((resolve) => setTimeout(resolve, 1000));
+// create: protectedProcedure
+// .input(z.object({ name: z.string().min(1) }))
+// .mutation(async ({ ctx, input }) => {
+// await ctx.db.insert(posts).values({
+// name: input.name,
+// createdById: ctx.session.user.id,
+// });
+// }),
- return ctx.db.post.create({
- data: {
- name: input.name,
- createdBy: { connect: { id: ctx.session.user.id } },
- },
- });
- }),
+// getLatest: protectedProcedure.query(async ({ ctx }) => {
+// const post = await ctx.db.query.posts.findFirst({
+// orderBy: (posts, { desc }) => [desc(posts.createdAt)],
+// });
- getLatest: protectedProcedure.query(({ ctx }) => {
- return ctx.db.post.findFirst({
- orderBy: { createdAt: "desc" },
- where: { createdBy: { id: ctx.session.user.id } },
- });
- }),
+// return post ?? null;
+// }),
- getSecretMessage: protectedProcedure.query(() => {
- return "you can now see this secret message!";
- }),
-});
+// getSecretMessage: protectedProcedure.query(() => {
+// return "you can now see this secret message!";
+// }),
+// });
diff --git a/src/server/api/trpc.ts b/src/server/api/trpc.ts
index 486a816..00d924f 100644
--- a/src/server/api/trpc.ts
+++ b/src/server/api/trpc.ts
@@ -11,7 +11,7 @@ import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
-import { getServerAuthSession } from "~/server/auth";
+import { auth } from "~/server/auth";
import { db } from "~/server/db";
/**
@@ -27,7 +27,7 @@ import { db } from "~/server/db";
* @see https://trpc.io/docs/server/context
*/
export const createTRPCContext = async (opts: { headers: Headers }) => {
- const session = await getServerAuthSession();
+ const session = await auth();
return {
db,
@@ -78,6 +78,29 @@ export const createCallerFactory = t.createCallerFactory;
*/
export const createTRPCRouter = t.router;
+/**
+ * Middleware for timing procedure execution and adding an artificial delay in development.
+ *
+ * You can remove this if you don't like it, but it can help catch unwanted waterfalls by simulating
+ * network latency that would occur in production but not in local development.
+ */
+const timingMiddleware = t.middleware(async ({ next, path }) => {
+ const start = Date.now();
+
+ if (t._config.isDev) {
+ // artificial delay in dev
+ const waitMs = Math.floor(Math.random() * 400) + 100;
+ await new Promise((resolve) => setTimeout(resolve, waitMs));
+ }
+
+ const result = await next();
+
+ const end = Date.now();
+ console.log(`[TRPC] ${path} took ${end - start}ms to execute`);
+
+ return result;
+});
+
/**
* Public (unauthenticated) procedure
*
@@ -85,7 +108,7 @@ export const createTRPCRouter = t.router;
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
-export const publicProcedure = t.procedure;
+export const publicProcedure = t.procedure.use(timingMiddleware);
/**
* Protected (authenticated) procedure
@@ -95,14 +118,16 @@ export const publicProcedure = t.procedure;
*
* @see https://trpc.io/docs/procedures
*/
-export const protectedProcedure = t.procedure.use(({ ctx, next }) => {
- if (!ctx.session || !ctx.session.user) {
- throw new TRPCError({ code: "UNAUTHORIZED" });
- }
- return next({
- ctx: {
- // infers the `session` as non-nullable
- session: { ...ctx.session, user: ctx.session.user },
- },
+export const protectedProcedure = t.procedure
+ .use(timingMiddleware)
+ .use(({ ctx, next }) => {
+ if (!ctx.session || !ctx.session.user) {
+ throw new TRPCError({ code: "UNAUTHORIZED" });
+ }
+ return next({
+ ctx: {
+ // infers the `session` as non-nullable
+ session: { ...ctx.session, user: ctx.session.user },
+ },
+ });
});
-});
diff --git a/src/server/auth.ts b/src/server/auth/config.ts
similarity index 73%
rename from src/server/auth.ts
rename to src/server/auth/config.ts
index 117984c..3ae1675 100644
--- a/src/server/auth.ts
+++ b/src/server/auth/config.ts
@@ -1,14 +1,15 @@
-import { PrismaAdapter } from "@auth/prisma-adapter";
-import {
- getServerSession,
- type DefaultSession,
- type NextAuthOptions,
-} from "next-auth";
-import { type Adapter } from "next-auth/adapters";
+import { DrizzleAdapter } from "@auth/drizzle-adapter";
+import { type DefaultSession, type NextAuthConfig } from "next-auth";
import DiscordProvider from "next-auth/providers/discord";
import { env } from "~/env";
import { db } from "~/server/db";
+import {
+ accounts,
+ sessions,
+ users,
+ verificationTokens,
+} from "~/server/db/schema";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
@@ -36,7 +37,7 @@ declare module "next-auth" {
*
* @see https://next-auth.js.org/configuration/options
*/
-export const authOptions: NextAuthOptions = {
+export const authConfig = {
callbacks: {
session: ({ session, user }) => ({
...session,
@@ -46,7 +47,12 @@ export const authOptions: NextAuthOptions = {
},
}),
},
- adapter: PrismaAdapter(db) as Adapter,
+ adapter: DrizzleAdapter(db, {
+ usersTable: users,
+ accountsTable: accounts,
+ sessionsTable: sessions,
+ verificationTokensTable: verificationTokens,
+ }),
providers: [
DiscordProvider({
clientId: env.DISCORD_CLIENT_ID,
@@ -62,11 +68,4 @@ export const authOptions: NextAuthOptions = {
* @see https://next-auth.js.org/providers/github
*/
],
-};
-
-/**
- * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
- *
- * @see https://next-auth.js.org/configuration/nextjs
- */
-export const getServerAuthSession = () => getServerSession(authOptions);
+} satisfies NextAuthConfig;
diff --git a/src/server/auth/index.ts b/src/server/auth/index.ts
new file mode 100644
index 0000000..76c146d
--- /dev/null
+++ b/src/server/auth/index.ts
@@ -0,0 +1,10 @@
+import NextAuth from "next-auth";
+import { cache } from "react";
+
+import { authConfig } from "./config";
+
+const { auth: uncachedAuth, handlers, signIn, signOut } = NextAuth(authConfig);
+
+const auth = cache(uncachedAuth);
+
+export { auth, handlers, signIn, signOut };
diff --git a/src/server/db.ts b/src/server/db.ts
deleted file mode 100644
index 07dc027..0000000
--- a/src/server/db.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-import { PrismaClient } from "@prisma/client";
-
-import { env } from "~/env";
-
-const createPrismaClient = () =>
- new PrismaClient({
- log:
- env.NODE_ENV === "development" ? ["query", "error", "warn"] : ["error"],
- });
-
-const globalForPrisma = globalThis as unknown as {
- prisma: ReturnType | undefined;
-};
-
-export const db = globalForPrisma.prisma ?? createPrismaClient();
-
-if (env.NODE_ENV !== "production") globalForPrisma.prisma = db;
diff --git a/src/server/db/index.ts b/src/server/db/index.ts
new file mode 100644
index 0000000..a46035f
--- /dev/null
+++ b/src/server/db/index.ts
@@ -0,0 +1,58 @@
+import { createClient, type Client } from "@libsql/client";
+import { drizzle } from "drizzle-orm/libsql";
+
+import { env } from "~/env";
+import * as schema from "./schema";
+import { retrieveDatabase } from "./turso";
+import { previewDbId } from "./utils";
+import { fetchAndParseErrToString } from "~/lib/fetch";
+
+/**
+ * Cache the database connection in development. This avoids creating a new connection on every HMR
+ * update.
+ */
+const globalForDb = globalThis as unknown as {
+ client: Client | undefined;
+};
+
+export async function getDbCredentials(): Promise<{
+ url: string;
+ authToken?: string;
+}> {
+ const {
+ VERCEL_GIT_PULL_REQUEST_ID: prNum,
+ VERCEL_GIT_COMMIT_REF: branchName,
+ NODE_ENV,
+ DATABASE_URL,
+ TURSO_AUTH_TOKEN,
+ } = env;
+
+ if (
+ NODE_ENV !== "production" ||
+ typeof prNum === "undefined" ||
+ !branchName ||
+ branchName === "main"
+ ) {
+ return {
+ url: DATABASE_URL,
+ authToken: TURSO_AUTH_TOKEN,
+ };
+ }
+
+ // Fetch hostname for the preview database from the Turso platform API
+ const dbInfo = await retrieveDatabase(previewDbId(prNum, branchName));
+ if (!dbInfo.success) {
+ throw new Error(fetchAndParseErrToString(dbInfo, (e) => e.error));
+ }
+
+ return {
+ url: `https://${dbInfo.data.Hostname}`,
+ authToken: TURSO_AUTH_TOKEN,
+ };
+}
+
+export const client =
+ globalForDb.client ?? createClient(await getDbCredentials());
+if (env.NODE_ENV !== "production") globalForDb.client = client;
+
+export const db = drizzle(client, { schema });
diff --git a/src/server/db/migrations/0000_gigantic_carmella_unuscione.sql b/src/server/db/migrations/0000_gigantic_carmella_unuscione.sql
new file mode 100644
index 0000000..ba4d740
--- /dev/null
+++ b/src/server/db/migrations/0000_gigantic_carmella_unuscione.sql
@@ -0,0 +1,40 @@
+CREATE TABLE `mcc-gadgets_account` (
+ `user_id` text(255) NOT NULL,
+ `type` text(255) NOT NULL,
+ `provider` text(255) NOT NULL,
+ `provider_account_id` text(255) NOT NULL,
+ `refresh_token` text,
+ `access_token` text,
+ `expires_at` integer,
+ `token_type` text(255),
+ `scope` text(255),
+ `id_token` text,
+ `session_state` text(255),
+ PRIMARY KEY(`provider`, `provider_account_id`),
+ FOREIGN KEY (`user_id`) REFERENCES `mcc-gadgets_user`(`id`) ON UPDATE no action ON DELETE cascade
+);
+--> statement-breakpoint
+CREATE INDEX `account_user_id_idx` ON `mcc-gadgets_account` (`user_id`);--> statement-breakpoint
+CREATE TABLE `mcc-gadgets_session` (
+ `session_token` text(255) PRIMARY KEY NOT NULL,
+ `userId` text(255) NOT NULL,
+ `expires` integer NOT NULL,
+ FOREIGN KEY (`userId`) REFERENCES `mcc-gadgets_user`(`id`) ON UPDATE no action ON DELETE cascade
+);
+--> statement-breakpoint
+CREATE INDEX `session_userId_idx` ON `mcc-gadgets_session` (`userId`);--> statement-breakpoint
+CREATE TABLE `mcc-gadgets_user` (
+ `id` text(255) PRIMARY KEY NOT NULL,
+ `name` text(255),
+ `email` text(255),
+ `email_verified` integer,
+ `image` text(255)
+);
+--> statement-breakpoint
+CREATE UNIQUE INDEX `mcc-gadgets_user_email_unique` ON `mcc-gadgets_user` (`email`);--> statement-breakpoint
+CREATE TABLE `mcc-gadgets_verification_token` (
+ `identifier` text(255) NOT NULL,
+ `token` text(255) NOT NULL,
+ `expires` integer NOT NULL,
+ PRIMARY KEY(`identifier`, `token`)
+);
diff --git a/src/server/db/migrations/meta/0000_snapshot.json b/src/server/db/migrations/meta/0000_snapshot.json
new file mode 100644
index 0000000..f0315fd
--- /dev/null
+++ b/src/server/db/migrations/meta/0000_snapshot.json
@@ -0,0 +1,280 @@
+{
+ "version": "6",
+ "dialect": "sqlite",
+ "id": "1682405c-0786-4808-b6b1-d39dfe2f487d",
+ "prevId": "00000000-0000-0000-0000-000000000000",
+ "tables": {
+ "mcc-gadgets_account": {
+ "name": "mcc-gadgets_account",
+ "columns": {
+ "user_id": {
+ "name": "user_id",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "type": {
+ "name": "type",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "provider": {
+ "name": "provider",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "provider_account_id": {
+ "name": "provider_account_id",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "refresh_token": {
+ "name": "refresh_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "access_token": {
+ "name": "access_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "expires_at": {
+ "name": "expires_at",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "token_type": {
+ "name": "token_type",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "scope": {
+ "name": "scope",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "id_token": {
+ "name": "id_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "session_state": {
+ "name": "session_state",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ }
+ },
+ "indexes": {
+ "account_user_id_idx": {
+ "name": "account_user_id_idx",
+ "columns": [
+ "user_id"
+ ],
+ "isUnique": false
+ }
+ },
+ "foreignKeys": {
+ "mcc-gadgets_account_user_id_mcc-gadgets_user_id_fk": {
+ "name": "mcc-gadgets_account_user_id_mcc-gadgets_user_id_fk",
+ "tableFrom": "mcc-gadgets_account",
+ "tableTo": "mcc-gadgets_user",
+ "columnsFrom": [
+ "user_id"
+ ],
+ "columnsTo": [
+ "id"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ }
+ },
+ "compositePrimaryKeys": {
+ "mcc-gadgets_account_provider_provider_account_id_pk": {
+ "columns": [
+ "provider",
+ "provider_account_id"
+ ],
+ "name": "mcc-gadgets_account_provider_provider_account_id_pk"
+ }
+ },
+ "uniqueConstraints": {},
+ "checkConstraints": {}
+ },
+ "mcc-gadgets_session": {
+ "name": "mcc-gadgets_session",
+ "columns": {
+ "session_token": {
+ "name": "session_token",
+ "type": "text(255)",
+ "primaryKey": true,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "userId": {
+ "name": "userId",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "expires": {
+ "name": "expires",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ }
+ },
+ "indexes": {
+ "session_userId_idx": {
+ "name": "session_userId_idx",
+ "columns": [
+ "userId"
+ ],
+ "isUnique": false
+ }
+ },
+ "foreignKeys": {
+ "mcc-gadgets_session_userId_mcc-gadgets_user_id_fk": {
+ "name": "mcc-gadgets_session_userId_mcc-gadgets_user_id_fk",
+ "tableFrom": "mcc-gadgets_session",
+ "tableTo": "mcc-gadgets_user",
+ "columnsFrom": [
+ "userId"
+ ],
+ "columnsTo": [
+ "id"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ }
+ },
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {},
+ "checkConstraints": {}
+ },
+ "mcc-gadgets_user": {
+ "name": "mcc-gadgets_user",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "text(255)",
+ "primaryKey": true,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "name": {
+ "name": "name",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "email": {
+ "name": "email",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "email_verified": {
+ "name": "email_verified",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ },
+ "image": {
+ "name": "image",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": false,
+ "autoincrement": false
+ }
+ },
+ "indexes": {
+ "mcc-gadgets_user_email_unique": {
+ "name": "mcc-gadgets_user_email_unique",
+ "columns": [
+ "email"
+ ],
+ "isUnique": true
+ }
+ },
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {},
+ "checkConstraints": {}
+ },
+ "mcc-gadgets_verification_token": {
+ "name": "mcc-gadgets_verification_token",
+ "columns": {
+ "identifier": {
+ "name": "identifier",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "token": {
+ "name": "token",
+ "type": "text(255)",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ },
+ "expires": {
+ "name": "expires",
+ "type": "integer",
+ "primaryKey": false,
+ "notNull": true,
+ "autoincrement": false
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {
+ "mcc-gadgets_verification_token_identifier_token_pk": {
+ "columns": [
+ "identifier",
+ "token"
+ ],
+ "name": "mcc-gadgets_verification_token_identifier_token_pk"
+ }
+ },
+ "uniqueConstraints": {},
+ "checkConstraints": {}
+ }
+ },
+ "views": {},
+ "enums": {},
+ "_meta": {
+ "schemas": {},
+ "tables": {},
+ "columns": {}
+ },
+ "internal": {
+ "indexes": {}
+ }
+}
\ No newline at end of file
diff --git a/src/server/db/migrations/meta/_journal.json b/src/server/db/migrations/meta/_journal.json
new file mode 100644
index 0000000..4a76a0a
--- /dev/null
+++ b/src/server/db/migrations/meta/_journal.json
@@ -0,0 +1,13 @@
+{
+ "version": "7",
+ "dialect": "sqlite",
+ "entries": [
+ {
+ "idx": 0,
+ "version": "6",
+ "when": 1745979267601,
+ "tag": "0000_gigantic_carmella_unuscione",
+ "breakpoints": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/src/server/db/schema.ts b/src/server/db/schema.ts
new file mode 100644
index 0000000..eafc1c3
--- /dev/null
+++ b/src/server/db/schema.ts
@@ -0,0 +1,110 @@
+import { relations } from "drizzle-orm";
+import {
+ index,
+ int,
+ primaryKey,
+ sqliteTableCreator,
+ text,
+} from "drizzle-orm/sqlite-core";
+import { type AdapterAccount } from "next-auth/adapters";
+
+/**
+ * This is an example of how to use the multi-project schema feature of Drizzle ORM. Use the same
+ * database instance for multiple projects.
+ *
+ * @see https://orm.drizzle.team/docs/goodies#multi-project-schema
+ */
+export const createTable = sqliteTableCreator((name) => `mcc-gadgets_${name}`);
+
+// export const posts = createTable(
+// "post",
+// {
+// id: int("id", { mode: "number" }).primaryKey({ autoIncrement: true }),
+// name: text("name", { length: 256 }),
+// createdById: text("created_by", { length: 255 })
+// .notNull()
+// .references(() => users.id),
+// createdAt: int("created_at", { mode: "timestamp" })
+// .default(sql`(unixepoch())`)
+// .notNull(),
+// updatedAt: int("updated_at", { mode: "timestamp" }).$onUpdate(
+// () => new Date(),
+// ),
+// },
+// (example) => [
+// index("created_by_idx").on(example.createdById),
+// index("name_idx").on(example.name),
+// ],
+// );
+
+export const users = createTable("user", {
+ id: text("id", { length: 255 })
+ .notNull()
+ .primaryKey()
+ .$defaultFn(() => crypto.randomUUID()),
+ name: text("name", { length: 255 }),
+ email: text("email", { length: 255 }).unique(),
+ emailVerified: int("email_verified", { mode: "timestamp_ms" }),
+ image: text("image", { length: 255 }),
+});
+
+export const usersRelations = relations(users, ({ many }) => ({
+ accounts: many(accounts),
+}));
+
+export const accounts = createTable(
+ "account",
+ {
+ userId: text("user_id", { length: 255 })
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+ type: text("type", { length: 255 })
+ .$type()
+ .notNull(),
+ provider: text("provider", { length: 255 }).notNull(),
+ providerAccountId: text("provider_account_id", { length: 255 }).notNull(),
+ refresh_token: text("refresh_token"),
+ access_token: text("access_token"),
+ expires_at: int("expires_at"),
+ token_type: text("token_type", { length: 255 }),
+ scope: text("scope", { length: 255 }),
+ id_token: text("id_token"),
+ session_state: text("session_state", { length: 255 }),
+ },
+ (account) => [
+ primaryKey({
+ columns: [account.provider, account.providerAccountId],
+ }),
+ index("account_user_id_idx").on(account.userId),
+ ],
+);
+
+export const accountsRelations = relations(accounts, ({ one }) => ({
+ user: one(users, { fields: [accounts.userId], references: [users.id] }),
+}));
+
+export const sessions = createTable(
+ "session",
+ {
+ sessionToken: text("session_token", { length: 255 }).notNull().primaryKey(),
+ userId: text("userId", { length: 255 })
+ .notNull()
+ .references(() => users.id, { onDelete: "cascade" }),
+ expires: int("expires", { mode: "timestamp_ms" }).notNull(),
+ },
+ (session) => [index("session_userId_idx").on(session.userId)],
+);
+
+export const sessionsRelations = relations(sessions, ({ one }) => ({
+ user: one(users, { fields: [sessions.userId], references: [users.id] }),
+}));
+
+export const verificationTokens = createTable(
+ "verification_token",
+ {
+ identifier: text("identifier", { length: 255 }).notNull(),
+ token: text("token", { length: 255 }).notNull(),
+ expires: int("expires", { mode: "timestamp_ms" }).notNull(),
+ },
+ (vt) => [primaryKey({ columns: [vt.identifier, vt.token] })],
+);
diff --git a/src/server/db/turso/index.ts b/src/server/db/turso/index.ts
new file mode 100644
index 0000000..7f4c1a9
--- /dev/null
+++ b/src/server/db/turso/index.ts
@@ -0,0 +1,33 @@
+import type { ZodType } from "zod";
+import { env } from "~/env";
+import { fetchAndParse, type FetchAndParseResult } from "~/lib/fetch";
+import { err, type Err } from "~/lib/result";
+import { DATABASE_INFO_SCHEMA, ERROR_RESPONSE_SCHEMA } from "./schemas";
+
+async function tursoFetch(
+ path: string,
+ schema: TSchema,
+): Promise<
+ FetchAndParseResult | Err
+> {
+ const { TURSO_ORG_SLUG, TURSO_AUTH_TOKEN } = env;
+ if (!TURSO_ORG_SLUG || !TURSO_AUTH_TOKEN) {
+ return err("No Turso info, are you running in development?");
+ }
+
+ return await fetchAndParse(
+ `https://api.turso.tech/v1/organizations/${TURSO_ORG_SLUG}/${path}`,
+ schema,
+ {
+ method: "get",
+ headers: new Headers({
+ Authorization: `Bearer ${TURSO_AUTH_TOKEN}`,
+ }),
+ },
+ ERROR_RESPONSE_SCHEMA,
+ );
+}
+
+export async function retrieveDatabase(dbName: string) {
+ return tursoFetch(`databases/${dbName}`, DATABASE_INFO_SCHEMA);
+}
diff --git a/src/server/db/turso/schemas.ts b/src/server/db/turso/schemas.ts
new file mode 100644
index 0000000..0a8f3e0
--- /dev/null
+++ b/src/server/db/turso/schemas.ts
@@ -0,0 +1,34 @@
+import { z } from "zod";
+
+export const DATABASE_INFO_SCHEMA = z.object({
+ Name: z.string(),
+ DbId: z.string().uuid(),
+ Hostname: z.string(), // hostname doesn't have the URL scheme, so can't be validated as url
+ block_reads: z.boolean(),
+ block_writes: z.boolean(),
+ allow_attach: z.boolean(),
+ regions: z.string().array(),
+ primaryRegion: z.string(),
+ type: z.string().default("logical"),
+ version: z.string(),
+ group: z.string(),
+ is_schema: z.boolean(),
+ schema: z.string(),
+ archived: z.boolean(),
+});
+
+export type DatabaseInfo = z.infer;
+
+export const RETRIEVE_DATABASE_RESPONSE_SCHEMA = z.object({
+ database: DATABASE_INFO_SCHEMA,
+});
+
+export type RetrieveDatabaseResponse = z.infer<
+ typeof RETRIEVE_DATABASE_RESPONSE_SCHEMA
+>;
+
+export const ERROR_RESPONSE_SCHEMA = z.object({
+ error: z.string(),
+});
+
+export type ErrorResponse = z.infer;
diff --git a/src/server/db/utils.js b/src/server/db/utils.js
new file mode 100644
index 0000000..6ef9811
--- /dev/null
+++ b/src/server/db/utils.js
@@ -0,0 +1,9 @@
+// This file is intentionally .js instead of .ts, and doesn't import from anywhere else in the project so it can easily be called from github actions
+
+/**
+ * @param {number | undefined} prNum
+ * @param {string} branchName
+ */
+export function previewDbId(prNum, branchName) {
+ return `pr-${prNum}-${branchName}`.toLowerCase().replace(/\W/g, "").substring(0, 64);
+}
diff --git a/src/server/java/versions.ts b/src/server/java/versions.ts
index 442c9fa..c3b7537 100644
--- a/src/server/java/versions.ts
+++ b/src/server/java/versions.ts
@@ -1,5 +1,7 @@
-import { cache } from "~/lib/fetch";
+import { cache, fetchAndParse, type FetchAndParseResult } from "~/lib/fetch";
import { type ZodError, z } from "zod";
+import { err, ok, type Result } from "~/lib/result";
+import type { FuzzyAutocomplete } from "~/lib/utils";
const VERSION_MANIFEST_ENTRY_SCHEMA = z.object({
title: z.string(),
@@ -44,18 +46,11 @@ export const BASE_ASSET_URL = "https://launchercontent.mojang.com";
const BASE_URL = BASE_ASSET_URL + "/v2/";
export const getVersionManifest = cache(
- async (): Promise<
- | { success: true; data: VersionManifest }
- | {
- success: false;
- error: VersionManifestZodError;
- }
- > => {
- const versionsRes = await fetch(BASE_URL + "javaPatchNotes.json");
- const versions = VERSIONS_MANIFEST_SCHEMA.safeParse(
- await versionsRes.json(),
+ async (): Promise> => {
+ const versions = await fetchAndParse(
+ BASE_URL + "javaPatchNotes.json",
+ VERSIONS_MANIFEST_SCHEMA,
);
-
if (versions.success) {
versions.data.entries.sort((a, b) => (a.date < b.date ? 1 : -1));
}
@@ -75,18 +70,11 @@ export type PatchNote = VersionManifestEntry &
z.infer;
export type PatchNoteZodError = ZodError>;
-export enum PatchNotesError {
- VersionNotFound = "Version not found",
-}
-
export type PatchNotesQuery = string | { latest: string[] | string | true };
export async function getPartialVersion(
version: PatchNotesQuery = { latest: true },
-): Promise<
- | { success: true; data: VersionManifestEntry }
- | { success: false; error: VersionManifestZodError | PatchNotesError }
-> {
+): Promise> {
const versions = await getVersionManifest();
if (!versions.success) return versions;
@@ -104,21 +92,19 @@ export async function getPartialVersion(
partialVersion = versions.data.entries.find((v) => includes(v.type));
}
- if (!partialVersion)
- return { success: false, error: PatchNotesError.VersionNotFound };
+ if (!partialVersion) return err("Version not found");
- return { success: true, data: partialVersion };
+ return ok(partialVersion);
}
export const getPatchNotes = cache(
async (
version: PatchNotesQuery = { latest: true },
): Promise<
- | { success: true; data: PatchNote }
- | {
- success: false;
- error: PatchNotesError | PatchNoteZodError | VersionManifestZodError;
- }
+ Result<
+ PatchNote,
+ PatchNoteZodError | VersionManifestZodError | TypeError | string
+ >
> => {
const maybePartialVersion = await getPartialVersion(version);
if (!maybePartialVersion.success) return maybePartialVersion;
@@ -128,10 +114,7 @@ export const getPatchNotes = cache(
const patch = PATCH_NOTE_SCHEMA.safeParse(await patchRes.json());
if (!patch.success) return patch;
- return {
- success: true,
- data: { ...partialVersion, ...patch.data },
- };
+ return ok({ ...partialVersion, ...patch.data });
},
["java", "patch_note"],
{ revalidate: 2 /* m */ * 60 /* s/m */ },
diff --git a/start-database.sh b/start-database.sh
deleted file mode 100644
index b9fd0f6..0000000
--- a/start-database.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env bash
-# Use this script to start a docker container for a local development database
-
-# TO RUN ON WINDOWS:
-# 1. Install WSL (Windows Subsystem for Linux) - https://learn.microsoft.com/en-us/windows/wsl/install
-# 2. Install Docker Desktop for Windows - https://docs.docker.com/docker-for-windows/install/
-# 3. Open WSL - `wsl`
-# 4. Run this script - `./start-database.sh`
-
-# On Linux and macOS you can run this script directly - `./start-database.sh`
-
-DB_CONTAINER_NAME="mcc-tools-postgres"
-
-if ! [ -x "$(command -v docker)" ]; then
- echo -e "Docker is not installed. Please install docker and try again.\nDocker install guide: https://docs.docker.com/engine/install/"
- exit 1
-fi
-
-if [ "$(docker ps -q -f name=$DB_CONTAINER_NAME)" ]; then
- echo "Database container '$DB_CONTAINER_NAME' already running"
- exit 0
-fi
-
-if [ "$(docker ps -q -a -f name=$DB_CONTAINER_NAME)" ]; then
- docker start "$DB_CONTAINER_NAME"
- echo "Existing database container '$DB_CONTAINER_NAME' started"
- exit 0
-fi
-
-# import env variables from .env
-set -a
-source .env
-
-DB_PASSWORD=$(echo "$POSTGRES_PRISMA_URL" | awk -F':' '{print $3}' | awk -F'@' '{print $1}')
-
-if [ "$DB_PASSWORD" = "password" ]; then
- echo "You are using the default database password"
- read -p "Should we generate a random password for you? [y/N]: " -r REPLY
- if ! [[ $REPLY =~ ^[Yy]$ ]]; then
- echo "Please set a password in the .env file and try again"
- exit 1
- fi
- # Generate a random URL-safe password
- DB_PASSWORD=$(openssl rand -base64 12 | tr '+/' '-_')
- sed -i -e "s#:password@#:$DB_PASSWORD@#" .env
-fi
-
-docker run -d \
- --name $DB_CONTAINER_NAME \
- -e POSTGRES_PASSWORD="$DB_PASSWORD" \
- -e POSTGRES_DB=mcc-tools \
- -p 5432:5432 \
- docker.io/postgres && echo "Database container '$DB_CONTAINER_NAME' was successfully created"