diff --git a/bun.lockb b/bun.lockb index d5be3ae..f266a03 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/migrations/0000_large_kylun.sql b/migrations/0000_large_kylun.sql deleted file mode 100644 index 6315866..0000000 --- a/migrations/0000_large_kylun.sql +++ /dev/null @@ -1,22 +0,0 @@ -CREATE TABLE IF NOT EXISTS "posts_table" ( - "id" serial PRIMARY KEY NOT NULL, - "title" text NOT NULL, - "content" text NOT NULL, - "user_id" integer NOT NULL, - "created_at" timestamp DEFAULT now() NOT NULL, - "updated_at" timestamp NOT NULL -); ---> statement-breakpoint -CREATE TABLE IF NOT EXISTS "users_table" ( - "id" serial PRIMARY KEY NOT NULL, - "name" text NOT NULL, - "age" integer NOT NULL, - "email" text NOT NULL, - CONSTRAINT "users_table_email_unique" UNIQUE("email") -); ---> statement-breakpoint -DO $$ BEGIN - ALTER TABLE "posts_table" ADD CONSTRAINT "posts_table_user_id_users_table_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users_table"("id") ON DELETE cascade ON UPDATE no action; -EXCEPTION - WHEN duplicate_object THEN null; -END $$; diff --git a/migrations/meta/0000_snapshot.json b/migrations/meta/0000_snapshot.json deleted file mode 100644 index 1b15d6b..0000000 --- a/migrations/meta/0000_snapshot.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "id": "61e17345-4a42-4d3c-ae27-10232ff75045", - "prevId": "00000000-0000-0000-0000-000000000000", - "version": "7", - "dialect": "postgresql", - "tables": { - "public.posts_table": { - "name": "posts_table", - "schema": "", - "columns": { - "id": { - "name": "id", - "type": "serial", - "primaryKey": true, - "notNull": true - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true - }, - "content": { - "name": "content", - "type": "text", - "primaryKey": false, - "notNull": true - }, - "user_id": { - "name": "user_id", - "type": "integer", - "primaryKey": false, - "notNull": true - }, - "created_at": { - "name": "created_at", - "type": "timestamp", - "primaryKey": false, - "notNull": true, - "default": "now()" - }, - "updated_at": { - "name": "updated_at", - "type": "timestamp", - "primaryKey": false, - "notNull": true - } - }, - "indexes": {}, - "foreignKeys": { - "posts_table_user_id_users_table_id_fk": { - "name": "posts_table_user_id_users_table_id_fk", - "tableFrom": "posts_table", - "tableTo": "users_table", - "columnsFrom": ["user_id"], - "columnsTo": ["id"], - "onDelete": "cascade", - "onUpdate": "no action" - } - }, - "compositePrimaryKeys": {}, - "uniqueConstraints": {} - }, - "public.users_table": { - "name": "users_table", - "schema": "", - "columns": { - "id": { - "name": "id", - "type": "serial", - "primaryKey": true, - "notNull": true - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true - }, - "age": { - "name": "age", - "type": "integer", - "primaryKey": false, - "notNull": true - }, - "email": { - "name": "email", - "type": "text", - "primaryKey": false, - "notNull": true - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": { - "users_table_email_unique": { - "name": "users_table_email_unique", - "nullsNotDistinct": false, - "columns": ["email"] - } - } - } - }, - "enums": {}, - "schemas": {}, - "_meta": { - "columns": {}, - "schemas": {}, - "tables": {} - } -} diff --git a/migrations/meta/_journal.json b/migrations/meta/_journal.json deleted file mode 100644 index 7e41abc..0000000 --- a/migrations/meta/_journal.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": "7", - "dialect": "postgresql", - "entries": [ - { - "idx": 0, - "version": "7", - "when": 1718115001880, - "tag": "0000_large_kylun", - "breakpoints": true - } - ] -} diff --git a/package.json b/package.json index 3da276c..80e47cc 100644 --- a/package.json +++ b/package.json @@ -7,12 +7,15 @@ "format": "biome format . --write" }, "dependencies": { + "@ipld/dag-cbor": "^9.2.1", "@noble/curves": "^1.4.0", + "@scure/base": "^1.1.7", "drizzle-graphql": "^0.8.3", "drizzle-orm": "^0.31.2", "graphql": "^16.8.1", "graphql-yoga": "^5.3.1", "hono": "^4.4.5", + "multiformats": "^13.1.1", "pg": "^8.12.0" }, "devDependencies": { diff --git a/src/db.ts b/src/db.ts deleted file mode 100644 index 13e049d..0000000 --- a/src/db.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { drizzle } from 'drizzle-orm/node-postgres' -import { Pool } from 'pg' -import * as dbSchema from './schema.js' - -const pool = new Pool({ - connectionString: process.env.DATABASE_URL!, -}) - -export const db = drizzle(pool, { schema: dbSchema }) diff --git a/src/index.ts b/src/index.ts index ef00481..bd163e1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,20 +1,41 @@ import { Hono } from 'hono' -import { RiverVmClient } from './rvm/index.js' -import { db } from './db.js' +import type { Message } from './rvm/lib/types.js' +import { River } from './rvm/index.js' +import { isMessage } from './rvm/lib/types.js' const app = new Hono() -const riverVm = new RiverVmClient(db) + +const river = await River.flow() app.post('/message', async (c) => { - // recieve data - const data = await c.req.json() - // verify cryptography of message - const verified = riverVm.verifyMessage(data.message) - if (!verified) return c.json({ message: 'message not verified' }) - // process message - const vmResponse = riverVm.processMessage(data.message) - // return results of message - return c.json({ message: vmResponse }) + try { + // Receive data + const data = await c.req.json() + + if (!isMessage(data.message)) { + return c.json({ error: 'Invalid message format' }, 400) + } + + const message: Message = data.message + + const verified = await river.verifyMessage(message) + + if (!verified) { + return c.json({ error: 'Message not verified' }, 401) + } + const processMessageResponse = await river.processMessage(message) + + return c.json({ result: processMessageResponse }) + } catch (error) { + console.error('Error processing message:', error) + return c.json({ error: 'Internal server error' }, 500) + } +}) + +process.on('SIGINT', async () => { + console.log('Shutting down gracefully') + await river.disconnect() + process.exit(0) }) export default app diff --git a/src/queries.ts b/src/queries.ts deleted file mode 100644 index 9c05cf2..0000000 --- a/src/queries.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { db } from './db.js' -import { type InsertUser, usersTable } from './schema.js' - -export async function createUser(data: InsertUser) { - await db.insert(usersTable).values(data) -} diff --git a/src/rvm/index.ts b/src/rvm/index.ts index a040794..dc1f396 100644 --- a/src/rvm/index.ts +++ b/src/rvm/index.ts @@ -1,78 +1,153 @@ +import { Pool } from "pg"; +import { ed25519ph } from "@noble/curves/ed25519"; +import { base64url } from "@scure/base"; +import { drizzle, type NodePgDatabase } from "drizzle-orm/node-postgres"; +import { eq } from "drizzle-orm"; +import * as dbSchema from "../schema.js"; import { - Message, - ItemAccRejBody, - ItemSubmitBody, isChannelCreateBody, isItemCreateBody, + isItemSubmitBody, + isGenericResponse, MessageTypes, -} from "./types.js"; -import { NodePgDatabase } from "drizzle-orm/node-postgres"; -import { ed25519ph } from "@noble/curves/ed25519"; + CAPTION_MAX_LENGTH, + Message, + GenericResponseBody, + ItemSubmitBody, +} from "./lib/types.js"; +import { messageBodyToBase64Url, makeCid } from "./lib/utils.js" -export class RiverVmClient { - /* - VM SETUP - */ +// OFFICIAL RIVER CLASS - private vmStorage: NodePgDatabase; - constructor(db: NodePgDatabase) { - this.vmStorage = db; +export class River { + private db: NodePgDatabase; + private authDb: NodePgDatabase; + private pool: Pool; + private authPool: Pool; + + private constructor( + db: NodePgDatabase, + authDb: NodePgDatabase, + pool: Pool, + authPool: Pool + ) { + this.db = db; + this.authDb = authDb; + this.pool = pool; + this.authPool = authPool; } - /* - PUBLIC FUNCTIONS - */ + static async flow(): Promise { + const connectionString = process.env.DATABASE_URL!; + const authConnectionString = process.env.AUTH_DATABASE_URL!; + + const pool = new Pool({ connectionString }); + const authPool = new Pool({ connectionString: authConnectionString }); + + try { + const client = await pool.connect(); + client.release(); + const authClient = await authPool.connect(); + authClient.release(); + console.log("Database connections successful"); + } catch (err) { + console.error("Failed to connect to the databases", err); + throw err; + } + + const db = drizzle(pool, { schema: dbSchema }); + const authDb = drizzle(authPool, { schema: dbSchema }); + + return new River(db, authDb, pool, authPool); + } + + async disconnect() { + await this.pool.end(); + await this.authPool.end(); + } + + // simple getters should not replace graphql over authdb + // NOTE: i removed all of these because unncessary code at the moment public async verifyMessage(message: Message): Promise { // 1. lookup user id - const userExists = await this.vmStorage.query.usersTable({ - userId: message.messageData.rid, + const userExists = await this.authDb.query.usersTable.findFirst({ + where: (users, { eq }) => + eq(users.id, message.messageData.rid.toString()), }); if (!userExists) return false; + // 2. lookup signing key - const keyExistsForUserAtTimestamp = this.vmStorage.query.keyTable({ - userId: message.messageData.rid, - signer: message.signer, - timestamp: message.messageData.timestamp, - }); + const keyExistsForUserAtTimestamp = + await this.authDb.query.keyTable.findFirst({ + where: (keys, { and, eq }) => + and( + eq(keys.userid, message.messageData.rid.toString()), + eq(keys.publickey, message.signer) + ), + }); + if (!keyExistsForUserAtTimestamp) return false; + // 3. verify hash of message = message.messageHash - const computedHash = _ourHashingFunction(message.messageData); - if (computedHash != message.hash) return false; + // investigate actual hashing function + + const computedHash = await makeCid(message.messageData); + if (computedHash.toString() !== message.hash.toString()) return false; + // 4. verify signature is valid over hash const valid = ed25519ph.verify(message.sig, message.hash, message.signer); if (!valid) return false; + // 5. return true if all checks passed return true; } public async processMessage(message: Message): Promise { - // return null if invalid or message type if (!Object.values(MessageTypes).includes(message.messageData.type)) return null; - // route message to executor function - let vmResponse = null; - switch (message.messageData.type) { - case MessageTypes.CHANNEL_CREATE: { - vmResponse = this._msg1_channelCreate(message); - } - case MessageTypes.ITEM_CREATE: { - vmResponse = this._msg5_itemCreate(message); - } - case MessageTypes.ITEM_SUBMIT: { - vmResponse = this._msg8_itemSubmit(message); - } - case MessageTypes.ITEM_ACC_REJ: { - vmResponse = this._msg9_itemAccRej(message); - } - } - return vmResponse; + + const handlers: { + [K in MessageTypes]?: (message: Message) => Promise; + } = { + [MessageTypes.CHANNEL_CREATE]: this._msg1_channelCreate, + [MessageTypes.ITEM_CREATE]: this._msg6_itemCreate, + [MessageTypes.ITEM_SUBMIT]: this._msg9_itemSubmit, + [MessageTypes.GENERIC_RESPONSE]: this._msg17_genericResponse, + }; + + const handler = handlers[message.messageData.type]; + if (!handler) return null; // this check should be unncessary because we're checking for valid types at beginning of funciton? + + const result = await handler.call(this, message); + if (!result) return null; + + this._storeValidMessage(result, message); // if handler returned cid we know its valid, store message in messageTable + return result; + } + + private async _storeValidMessage( + messageId: string, + message: Message + ): Promise { + await this.db.insert(dbSchema.messageTable).values({ + id: messageId, + rid: message.messageData.rid, + timestamp: message.messageData.timestamp, + type: message.messageData.type, + body: messageBodyToBase64Url(message.messageData.body), + signer: message.signer, + hashType: message.hashType, + hash: base64url.encode(message.hash), + sigType: message.sigType, + sig: base64url.encode(message.sig), + }); } /* PRIVATE FUNCTIONS only accessible within vm context/runtime - */ + */ /* NAME: CHANNEL_CREATE @@ -86,113 +161,157 @@ export class RiverVmClient { // make sure message data body is correct type if (!isChannelCreateBody(message.messageData.body)) return null; // generate channel id - const channelId = _ourHashingFunction(message.messageData); + const channelId = (await makeCid(message.messageData)).toString(); // update RVM storage - await this.vmStorage.update.channelTable(channelId, message.messageData); - // return channelId in request + await this.db.insert(dbSchema.channelTable).values({ + id: channelId, + content: JSON.stringify(message.messageData.body), + timestamp: Number(message.messageData.timestamp), + createdById: message.messageData.rid.toString(), + uri: message.messageData.body.uri, + // destructure cid to extract name and description? + name: "", + description: "", + }); return channelId; } /* - NAME: ITEM_CREATE - TYPE: 5 - BODY: { - uri: string - } - */ + NAME: ITEM_CREATE + TYPE: 6 + BODY: { + uri: string + } + */ - private async _msg5_itemCreate(message: Message): Promise { + private async _msg6_itemCreate(message: Message): Promise { // make sure message data body is correct type if (!isItemCreateBody(message.messageData.body)) return null; // generate itemId - const itemId = _ourHashingFunction(message.messageData); + const itemId = (await makeCid(message.messageData)).toString(); // update RVM storage - await this.vmStorage.update.itemTable(itemId, message.messageData); - // return itemId in request + await this.db.insert(dbSchema.ItemTable).values({ + id: itemId, + createdById: message.messageData.rid.toString(), + uri: message.messageData.body.uri, + }); return itemId; } /* - NAME: ITEM_SUBMIT - TYPE: 8 - BODY: { - itemId: string - channelId: string - caption?: string - } - */ + NAME: ITEM_SUBMIT + TYPE: 9 + BODY: { + itemId: string + channelId: string + caption?: string + } + */ - private async _msg8_itemSubmit(message: Message): Promise { - // make sure message data body is correct type + private async _msg9_itemSubmit(message: Message): Promise { if (!isItemSubmitBody(message.messageData.body)) return null; - // destructure body object for vis - const { itemId, channelId, caption } = message.messageData + const { itemId, channelId, text } = message.messageData .body as ItemSubmitBody; - // check if item exists - const itemExists = this.vmStorage.query.itemTable(itemId); + + const itemExists = await this.db.query.ItemTable.findFirst({ + where: (items, { eq }) => eq(items.id, itemId), + }); if (!itemExists) return null; - // check if channel exists - const channelExists = this.vmStorage.query.channelTable(channelId); + + const channelExists = await this.db.query.channelTable.findFirst({ + where: (channels, { eq }) => eq(channels.id, channelId), + }); if (!channelExists) return null; - // check caption for max length - if (caption && caption.length > CAPTION_MAX_LENGTH) return null; - // generate submissionId - const submissionId = _ourHashingFunction(message.messageData); - // check if user is owner of channel - const isOwner = this.vmStorage.query.channelTable( - channelId, - message.messageData.rid - ); - // update RVM storage - if (isOwner) { - await this.vmStorage.update.submissionTable( - submissionId, - message.messageData, - OWNER_FLAG - ); - } else { - await this.vmStorage.update.submissionTable( - submissionId, - message.messageData, - null - ); - } - // return submissionId in request + + if (text && text.length > CAPTION_MAX_LENGTH) return null; + + const submissionId = (await makeCid(message.messageData)).toString(); + + // TODO: make this an isOwnerOrMod lookup + const isOwner = await this.db.query.channelTable.findFirst({ + where: (channels, { and, eq }) => + and( + eq(channels.id, channelId), + eq(channels.createdById, message.messageData.rid.toString()) + ), + }); + + await this.db.insert(dbSchema.submissionsTable).values({ + id: submissionId, + content: JSON.stringify(message.messageData.body), + userId: message.messageData.rid.toString(), + status: isOwner ? 3 : 0, // channel owenrs/mods get their submissions automatically set to 2 (0 = pending, 1 = declined, 2 = accepted, 3 = owner/mod) + }); + return submissionId; } /* - NAME: ITEM_ACCREJ - TYPE: 0 - BODY: { - submissionId: string - Response: boolean - caption?: string - } - */ + NAME: GENERIC_RESPONSE + TYPE: 17 + BODY: { + messageId: string + response: boolean + } + */ - private async _msg9_itemAccRej(message: Message): Promise { - // make sure message data body is correct type - if (!isItemAccRejBody(message.messageData.body)) return null; - // destructure body object for vis - const { submissionId, response, caption } = message.messageData - .body as ItemAccRejBody; - // check if submission exists - const submissionExists = this.vmStorage.query.submissionTable(submissionId); - if (!submissionExists) return null; - // check caption for max length - if (caption && caption.length > CAPTION_MAX_LENGTH) return null; - // generate accRejId - const accRejId = _ourHashingFunction(message.messageData); - // check if user is owner/moderator of channel - const isOwnerOrModerator = this.vmStorage.query.channelTable( - submissionExists.channelId, - message.messageData.rid - ); - if (!isOwnerOrModerator) return null; - // update RVM storage - this.vmStorage.update.accRejTable(accRejId, message.messageData); - // return accRejId in request - return accRejId; + private async _msg17_genericResponse( + message: Message + ): Promise { + if (!isGenericResponse(message.messageData.body)) return null; + const { messageId, response } = message.messageData + .body as GenericResponseBody; + + // NOTE: maybe should update messageId format to prepend with messageId + // so that we dont need to keep a global message table and can just + // look up specific message from its corresponding table + // OR perhaps simpler is just some union Message table that joins + // all of the individual message tables for the purpose of this specific query + const messageExists = await this.db.query.messageTable.findFirst({ + where: (messages, { eq }) => eq(messages.id, messageId), + }); + + if (!messageExists) return null; + + const responseId = (await makeCid(message.messageData)).toString(); + + // process things differently depending on type of message + // the genericResponse was targeting + switch (messageExists.type) { + // generic response handler for + case MessageTypes.ITEM_SUBMIT: + // lookup submit message + const submission = await this.db.query.submissionsTable.findFirst({ + where: (submissions, { eq }) => eq(submissions.id, messageId), + }); + // return null if coudnt find submisison + if (!submission) return null; + // return null if submission status not equal to pending + if (submission.status != 0) return null; + // update status field + await this.db + .update(dbSchema.submissionsTable) + .set({ status: response ? 2 : 1 }) // if response == true, set status to accepted (2), if false set to rejected (1) + .where(eq(dbSchema.submissionsTable.id, messageId)); + break; + case MessageTypes.CHANNEL_INVITE_MEMBER: + // TODO: add logic + break; + case MessageTypes.USER_INVITE_FRIEND: + // TODO: add logic + break; + // Default case if no matching case is found + default: + break; + } + + // add this table + await this.db.insert(dbSchema.responsesTable).values({ + id: responseId, + targetMessageId: messageId, + response: response, + }); + + return responseId; } } diff --git a/src/rvm/lib/types.ts b/src/rvm/lib/types.ts new file mode 100644 index 0000000..64843b5 --- /dev/null +++ b/src/rvm/lib/types.ts @@ -0,0 +1,277 @@ +/* + * + * MESSAGE TYPES + * + */ + +export enum HashTypes { + NONE = 0, + BLAKE_3 = 1, +} + +export enum SignatureTypes { + NONE = 0, + ED25519 = 1, + EIP712 = 2, +} + +export enum MessageTypes { + NONE = 0, + CHANNEL_CREATE = 1, + CHANNEL_EDIT = 2, + CHANNEL_DELETE = 3, + CHANNEL_INVITE_MEMBER = 4, + CHANNEL_TRANSFER_OWNER = 5, + ITEM_CREATE = 6, + ITEM_EDIT = 7, + ITEM_DELETE = 8, + ITEM_SUBMIT = 9, + ITEM_REMOVE = 10, + COMMENT_CREATE = 11, + COMMENT_EDIT = 12, + COMMENT_DELETE = 13, + USER_SET_NAME = 14, + USER_SET_DATA = 15, // initially we just support setting a bio capped to specific char count. could eventually support URI schema + USER_INVITE_FRIEND = 16, + GENERIC_RESPONSE = 17, +} + +export type Message = { + signer: string + messageData: MessageData + hashType: HashTypes + hash: Uint8Array + sigType: SignatureTypes + sig: Uint8Array +} + +export function isMessage(data: Message): data is Message { + return ( + typeof data.signer === 'string' && + typeof data.messageData === 'object' && + typeof data.hashType === 'string' && + (data.hashType === 'sha256' || data.hashType === 'sha512') && + data.hash instanceof Uint8Array && + typeof data.sigType === 'string' && + (data.sigType === 'ed25519' || data.sigType === 'secp256k1') && + data.sig instanceof Uint8Array + ) +} + +export type MessageData = { + rid: bigint + timestamp: bigint + type: MessageTypes + body: MessageDataBodyTypes +} + +/* + * + * MESSAGE BODY TYPES + * + */ + +/* + * 1 + */ +export type ChannelCreateBody = { + uri: string +} + +// type guard function +export function isChannelCreateBody(obj: unknown): obj is ChannelCreateBody { + return ( + typeof obj === 'object' && + obj !== null && + 'uri' in obj && + typeof (obj as { uri: unknown }).uri === 'string' + ) +} + +/* + * 2 + */ +export type ChannelEditBody = { + channelId: string + uri: string +} + +/* + * 3 + */ +export type ChannelDeleteBody = { + channelId: string +} + +/* + * 4 + */ +export type ChannelInviteMemberBody = { + channelId: string + memberRid: bigint +} + + +/* + * 5 + */ +export type ChannelTransferOwnerBody = { + channelId: string + transferToRid: bigint +} + +/* + * 6 + */ +export type ItemCreateBody = { + uri: string +} + +// type guard function +export function isItemCreateBody(obj: unknown): obj is ItemCreateBody { + return ( + typeof obj === 'object' && + obj !== null && + 'uri' in obj && + typeof (obj as { uri: unknown }).uri === 'string' + ) +} + +/* + * 7 + */ +export type ItemEditBody = { + itemId: string + uri: string +} + +/* + * 8 + */ +export type ItemDeleteBody = { + itemId: string +} + +/* + * 9 + */ +export type ItemSubmitBody = { + itemId: string + channelId: string + text?: string // MAX 300 CHAR LIMIT +} + +export function isItemSubmitBody(obj: unknown): obj is ItemSubmitBody { + if (typeof obj !== 'object' || obj === null) { + return false + } + + const messageBody = obj as Partial + + return ( + typeof messageBody.itemId === 'string' && + typeof messageBody.channelId === 'string' && + (messageBody.text === undefined || + (typeof messageBody.text === 'string' && + messageBody.text.length <= 300)) + ) +} + +/* + * 10 + */ +export type ItemRemoveBody = { + submissionId: string +} + +/* + * 11 + */ +export type CommentCreateBody = { + targetId: string // Must be SUBMISSION_ID or COMMENT_ID + text: string // MAX_CHAR_LIMIT = 300 +} + +/* + * 12 + */ +export type CommentEditBody = { + commentId: string + text: string // MAX_CHAR_LIMIT = 300 +} + +/* + * 13 + */ +export type CommentDeleteBody = { + commentId: string +} + +/* + * 14 + */ +// TODO: set up the protocol logic for handling this correectly +// based off how we are currently doing it in username DB +export type UserSetNameBody = { + fromRid?: bigint + toRid?: bigint + username?: string // MAX_CHAR_LIMIT = 15 + regex +} + +/* + * 15 + */ +export type UserSetDataBody = { + rid: bigint + data: string // initially just support pure text "bios" of a max length +} + +/* + * 16 + */ +export type UserInviteFriendBody = { + rid: bigint +} + +/* + * 17 + */ +export type GenericResponseBody = { + messageId: string + response: boolean +} + +export function isGenericResponse(obj: unknown): obj is GenericResponseBody { + return ( + typeof obj === 'object' && + obj !== null && + 'messageId' in obj && + typeof (obj as { messageId: unknown }).messageId === 'string' && + 'response' in obj && + ((obj as { response: unknown }).response === 0 || (obj as { response: unknown }).response === 1) + ) +} + +/* + * Message Data Body Union type + */ +export type MessageDataBodyTypes = + | ChannelCreateBody + | ChannelEditBody + | ChannelInviteMemberBody + | ChannelTransferOwnerBody + | ItemCreateBody + | ItemEditBody + | ItemDeleteBody + | ItemSubmitBody + | ItemRemoveBody + | CommentCreateBody + | CommentEditBody + | CommentDeleteBody + | UserSetNameBody + | UserSetDataBody + | UserSetDataBody + | GenericResponseBody + +export const CAPTION_MAX_LENGTH = 300 +export const BIO_MAX_LENGTH = 50 \ No newline at end of file diff --git a/src/rvm/lib/utils.ts b/src/rvm/lib/utils.ts new file mode 100644 index 0000000..b2461ed --- /dev/null +++ b/src/rvm/lib/utils.ts @@ -0,0 +1,66 @@ +import * as dagCbor from '@ipld/dag-cbor' +import * as Block from 'multiformats/block' +import { sha256 } from 'multiformats/hashes/sha2' +import { Message, MessageData, MessageDataBodyTypes, MessageTypes, ItemCreateBody } from './types.js' +import { base64url } from '@scure/base' + +export async function messageToCid(message: Message) { + return await Block.encode({ value: message, codec: dagCbor, hasher: sha256 }) +} + +export async function makeCid(messageData: MessageData) { + return await Block.encode({ + value: messageData, + codec: dagCbor, + hasher: sha256, + }); +} + +export function messageBodyToBase64Url(messageBody: MessageDataBodyTypes): string { + const jsonString = JSON.stringify(messageBody, (key, value) => + typeof value === "bigint" ? value.toString() : value + ); + const encoder = new TextEncoder() + const uint8Array = encoder.encode(jsonString) + return base64url.encode(uint8Array) +} + +export function base64UrlToMessageBody(encodedMessageBody: string): MessageDataBodyTypes { + const uint8Array = base64url.decode(encodedMessageBody); + const decoder = new TextDecoder(); + const json = decoder.decode(uint8Array); + + return JSON.parse(json, (key, value) => { + // Check if the value is a string and can be parsed as a number + if (typeof value === "string" && !isNaN(Number(value))) { + // Convert back to BigInt if it was originally a bigint + if (BigInt(value).toString() === value) { + return BigInt(value); + } + } + return value; + }); +} + +export function formatItemCreateMessage({ + rid, + fileUri, +}: { + rid: bigint; + fileUri: string; +}): Message { + const message: Message = { + signer: "0x", + messageData: { + rid: rid, + timestamp: BigInt(Date.now()), + type: MessageTypes.ITEM_CREATE, + body: { uri: fileUri } as ItemCreateBody, + }, + hashType: 1, + hash: new Uint8Array(0), + sigType: 1, + sig: new Uint8Array(0), + }; + return message; +} \ No newline at end of file diff --git a/src/rvm/sdk.ts b/src/rvm/sdk.ts deleted file mode 100644 index bb546ec..0000000 --- a/src/rvm/sdk.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { ItemCreateBody, MessageTypes } from "./types.js"; -import { MessageData } from "./types.js"; -import { Message } from "./types.js"; - -export function formatItemCreateMessage({rid, fileUri}: {rid: bigint, fileUri: string}): Message { - const message = { - signer: "0x", - messageData: { - rid: rid, - timestamp: BigInt(Date.now()), - type: MessageTypes.ITEM_CREATE, - body: { uri: fileUri } - }, - hashType: 1, - hash: new Uint8Array(0), - sigType: 1, - sig: new Uint8Array(0) - } - return message -} \ No newline at end of file diff --git a/src/rvm/types.ts b/src/rvm/types.ts deleted file mode 100644 index 875a4bc..0000000 --- a/src/rvm/types.ts +++ /dev/null @@ -1,209 +0,0 @@ -/* -* -* MESSAGE TYPES -* -*/ - -export enum MessageTypes { - NONE = 0, - CHANNEL_CREATE = 1, - CHANNEL_EDIT_MEMBERS = 2, - CHANNEL_EDIT_URI = 3, - CHANNEL_TRANSFER_OWNER = 4, - ITEM_CREATE = 5, - ITEM_EDIT = 6, - ITEM_DELETE = 7, - ITEM_SUBMIT = 8, - ITEM_ACC_REJ = 9, - ITEM_REMOVE = 10, - COMMENT_CREATE = 11, - COMMENT_EDIT = 12, - COMMENT_DELETE = 13, - USER_SET_NAME = 14, - USER_SET_URI = 15, - } - - export enum HashTypes { - NONE = 0, - BLAKE_3 = 1, - } - - export enum SignatureTypes { - NONE = 0, - ED25519 = 1, - EIP712 = 2, - } - - export type Message = { - signer: string; - messageData: MessageData; - hashType: HashTypes; - hash: Uint8Array; - sigType: SignatureTypes; - sig: Uint8Array; - }; - - export type MessageData = { - rid: bigint; - timestamp: bigint; - type: MessageTypes; - body: MessageDataBodyTypes; - }; - - - /* - * - * MESSAGE BODY TYPES - * - */ - - /* - * 1 - */ - export type ChannelCreateBody = { - uri: string; - }; - - // type guard function - export function isChannelCreateBody(obj: any): obj is ChannelCreateBody { - return obj && typeof obj === 'object' && typeof obj.uri === 'string'; - } - - /* - * 2 - */ - export type ChannelEditMember = { - channelId: string; - member: { - rid: bigint; - role: 0 | 1 | 2; // 0 = none, 1 = member, 2 = admin - }; - }; - - /* - * 3 - */ - export type ChannelEditUri = { - channelId: string; - uri: string; - }; - - /* - * 4 - */ - export type ChannelTransferOwner = { - channelId: string; - transferToRid: bigint; - }; - - /* - * 5 - */ - export type ItemCreateBody = { - uri: string; - }; - - // type guard function - export function isItemCreateBody(obj: any): obj is ChannelCreateBody { - return obj && typeof obj === 'object' && typeof obj.uri === 'string'; - } - - /* - * 6 - */ - export type ItemEditBody = { - itemId: string; - uri: string; - }; - - /* - * 7 - */ - export type ItemDeleteBody = { - itemId: string; - }; - - /* - * 8 - */ - export type ItemSubmitBody = { - itemId: string; - channelId: string; - caption?: string; // MAX 300 CHAR LIMIT - }; - - /* - * 9 - */ - export type ItemAccRejBody = { - submissionId: string; - response: boolean; // FALSE = rejected, TRUE = accepted - caption?: string; // MAX_CHAR_LIMIT = 300 - }; - - /* - * 10 - */ - export type ItemRemoveBody = { - submissionId: string; - }; - - /* - * 11 - */ - export type CommentCreateBody = { - targetId: string; // Must be SUBMISSION_ID or COMMENT_ID - text: string; // MAX_CHAR_LIMIT = 300 - }; - - /* - * 12 - */ - export type CommentEditBody = { - commentId: string; - text: string; // MAX_CHAR_LIMIT = 300 - }; - - /* - * 13 - */ - export type CommentDeleteBody = { - commentId: string; - }; - - /* - * 14 - */ - export type UserSetNameBody = { - fromId: bigint; - toId: bigint; - username: string; // MAX_CHAR_LIMIT = 15 + regex - }; - - /* - * 15 - */ - export type UserSetUriBody = { - rid: bigint; - uri: string; - }; - - /* - * Message Data Body Union type - */ - export type MessageDataBodyTypes = - | ChannelCreateBody - | ChannelEditMember - | ChannelEditUri - | ChannelTransferOwner - | ItemCreateBody - | ItemEditBody - | ItemDeleteBody - | ItemSubmitBody - | ItemAccRejBody - | ItemRemoveBody - | CommentCreateBody - | CommentEditBody - | CommentDeleteBody - | UserSetNameBody - | UserSetUriBody; \ No newline at end of file diff --git a/src/schema.ts b/src/schema.ts index ddeca3e..2ad1804 100644 --- a/src/schema.ts +++ b/src/schema.ts @@ -1,26 +1,140 @@ -import { integer, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core' +import { + integer, + pgTable, + text, + timestamp, + numeric, + bigint, + boolean +} from 'drizzle-orm/pg-core' -export const usersTable = pgTable('users_table', { - id: serial('id').primaryKey(), +export const usersTable = pgTable('users', { + id: numeric('userid').primaryKey(), + to: text('to'), + recovery: text('recovery'), + timestamp: timestamp('timestamp'), + log_addr: text('log_addr'), + block_num: numeric('block_num'), +}) + +export type InsertUser = typeof usersTable.$inferInsert +export type SelectUser = typeof usersTable.$inferSelect + +export const sessionsTable = pgTable('sessions', { + id: text('id').primaryKey(), + userId: numeric('userid') + .notNull() + .references(() => usersTable.id), + deviceId: text('deviceid').notNull(), + created: timestamp('created'), + expiresAt: timestamp('expiresat').notNull(), +}) + +export type InsertSession = typeof sessionsTable.$inferInsert +export type SelectSession = typeof sessionsTable.$inferSelect + +export const keyTable = pgTable( + 'keys', + { + userid: numeric('userid') + .notNull() + .references(() => usersTable.id), + custodyAddress: text('custodyAddress').notNull(), + deviceid: text('deviceid').notNull(), + publickey: text('publickey').notNull(), + encryptedprivatekey: text('encryptedprivatekey').notNull(), + }, + (table) => ({ + primaryKey: [table.userid, table.custodyAddress, table.deviceid], + }), +) + +export type InsertHash = typeof keyTable.$inferInsert +export type SelectHash = typeof keyTable.$inferSelect + +export const messageTable = pgTable('messages', { + id: text('id').primaryKey(), + rid: bigint('rid', { mode: 'bigint' }), + timestamp: bigint('timestamp', { mode: 'bigint' }), + type: integer('type'), + body: text('body'), // this will be base64url encoded body object + signer: text('signer').notNull(), + hashType: integer('hashtype').notNull(), + hash: text('hash').notNull(), + sigType: integer('sigtype').notNull(), + sig: text('sig').notNull(), +}); + +export type InsertPost = typeof messageTable.$inferInsert +export type SelectPost = typeof messageTable.$inferSelect + +export const channelTable = pgTable('channels', { + id: text('messageid') + .notNull() + .references(() => messageTable.id) + .primaryKey(), + content: text('content').notNull(), + timestamp: integer('timestamp').notNull(), + createdById: numeric('createdbyid') + .notNull() + .references(() => usersTable.id), + uri: text('uri').notNull(), name: text('name').notNull(), - age: integer('age').notNull(), - email: text('email').notNull().unique(), + description: text('description').notNull(), +}) + +export type InsertChannel = typeof channelTable.$inferInsert +export type SelectChannel = typeof channelTable.$inferSelect + +export const ItemTable = pgTable('items', { + id: text('itemId') + .notNull() + .references(() => messageTable.id) + .primaryKey(), + createdById: numeric('createdbyid') + .notNull() + .references(() => usersTable.id), + uri: text('uri').notNull(), + createdAt: timestamp('created_at').notNull().defaultNow(), + updatedAt: timestamp('updated_at') + .notNull() + .$onUpdate(() => new Date()), }) -export const postsTable = pgTable('posts_table', { - id: serial('id').primaryKey(), - title: text('title').notNull(), +export type InsertItem = typeof ItemTable.$inferInsert +export type SelectItem = typeof ItemTable.$inferSelect + +export const submissionsTable = pgTable('submissions', { + id: text('submissionId') + .notNull() + .references(() => messageTable.id) + .primaryKey(), content: text('content').notNull(), - userId: integer('user_id') + userId: numeric('userid') .notNull() - .references(() => usersTable.id, { onDelete: 'cascade' }), + .references(() => usersTable.id), createdAt: timestamp('created_at').notNull().defaultNow(), updatedAt: timestamp('updated_at') .notNull() .$onUpdate(() => new Date()), + status: integer('status') }) -export type InsertUser = typeof usersTable.$inferInsert -export type SelectUser = typeof usersTable.$inferSelect -export type InsertPost = typeof postsTable.$inferInsert -export type SelectPost = typeof postsTable.$inferSelect +export type InsertSubmission = typeof submissionsTable.$inferInsert +export type SelectSubmission = typeof submissionsTable.$inferSelect + +export const responsesTable = pgTable('responses', { + id: text('responses') + .notNull() + .references(() => messageTable.id) + .primaryKey(), + createdAt: timestamp('created_at').notNull().defaultNow(), + // would be nice if this could reference the specific message + // makes me wonder if we should have specific message response types per message that requires a respojnse + targetMessageId: text('targetMessageId') + .notNull(), + response: boolean('response').notNull() +}) + +export type InsertResponses = typeof responsesTable.$inferInsert +export type SelectResponses = typeof responsesTable.$inferSelect diff --git a/src/server.ts b/src/server.ts deleted file mode 100644 index e142355..0000000 --- a/src/server.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { buildSchema } from 'drizzle-graphql' -import { createYoga } from 'graphql-yoga' -import { db } from './db.js' - -const { schema } = buildSchema(db) - -const yoga = createYoga({ - schema, - cors: { - origin: '*', // Allow all origins - credentials: true, // If your client needs to send credentials - } -}) - -const server = Bun.serve({ - fetch: yoga, -}) - -console.info( - `Server is running on ${new URL( - yoga.graphqlEndpoint, - `http://${server.hostname}:${server.port}`, - )}`, -)