diff --git a/backend/cmd/seed-daemon/main.go b/backend/cmd/seed-daemon/main.go index 0bd03add8..fb2e8fe9e 100644 --- a/backend/cmd/seed-daemon/main.go +++ b/backend/cmd/seed-daemon/main.go @@ -5,6 +5,7 @@ import ( "errors" "flag" "os" + "path/filepath" "slices" "strings" "time" @@ -89,7 +90,12 @@ func main() { if keyStoreEnvironment == "" { keyStoreEnvironment = "main" } - ks := core.NewOSKeyStore(keyStoreEnvironment) + var ks core.KeyStore + if os.Getenv("SEED_FILE_KEYSTORE") == "1" { + ks = core.NewFileKeyStore(filepath.Join(cfg.Base.DataDir, "keys.json")) + } else { + ks = core.NewOSKeyStore(keyStoreEnvironment) + } dir, err := storage.Open(cfg.Base.DataDir, nil, ks, cfg.LogLevel) if err != nil { diff --git a/backend/core/file_keystore.go b/backend/core/file_keystore.go new file mode 100644 index 000000000..ec593dd58 --- /dev/null +++ b/backend/core/file_keystore.go @@ -0,0 +1,155 @@ +package core + +import ( + "context" + "encoding/json" + "fmt" + "os" + "sync" +) + +type fileKeyStore struct { + path string + mu sync.RWMutex +} + +type fileKeyData struct { + Keys map[string][]byte `json:"keys"` +} + +func NewFileKeyStore(path string) KeyStore { + return &fileKeyStore{path: path} +} + +func (fks *fileKeyStore) load() (*fileKeyData, error) { + data, err := os.ReadFile(fks.path) + if err != nil { + if os.IsNotExist(err) { + return &fileKeyData{Keys: make(map[string][]byte)}, nil + } + return nil, err + } + var fkd fileKeyData + if err := json.Unmarshal(data, &fkd); err != nil { + return nil, err + } + if fkd.Keys == nil { + fkd.Keys = make(map[string][]byte) + } + return &fkd, nil +} + +func (fks *fileKeyStore) save(fkd *fileKeyData) error { + data, err := json.MarshalIndent(fkd, "", " ") + if err != nil { + return err + } + return os.WriteFile(fks.path, data, 0600) +} + +func (fks *fileKeyStore) GetKey(ctx context.Context, name string) (*KeyPair, error) { + fks.mu.RLock() + defer fks.mu.RUnlock() + + fkd, err := fks.load() + if err != nil { + return nil, err + } + + privBytes, ok := fkd.Keys[name] + if !ok { + return nil, fmt.Errorf("%s: %w", name, errKeyNotFound) + } + + kp := new(KeyPair) + return kp, kp.UnmarshalBinary(privBytes) +} + +func (fks *fileKeyStore) StoreKey(ctx context.Context, name string, kp *KeyPair) error { + if !nameFormat.MatchString(name) { + return fmt.Errorf("invalid name format") + } + if kp == nil { + return fmt.Errorf("can't store empty key") + } + + fks.mu.Lock() + defer fks.mu.Unlock() + + fkd, err := fks.load() + if err != nil { + return err + } + + if _, ok := fkd.Keys[name]; ok { + return fmt.Errorf("Name already exists. Please delete it first") + } + + keyBytes, err := kp.MarshalBinary() + if err != nil { + return err + } + fkd.Keys[name] = keyBytes + return fks.save(fkd) +} + +func (fks *fileKeyStore) ListKeys(ctx context.Context) ([]NamedKey, error) { + fks.mu.RLock() + defer fks.mu.RUnlock() + + fkd, err := fks.load() + if err != nil { + return nil, err + } + + var ret []NamedKey + for name, privBytes := range fkd.Keys { + priv := new(KeyPair) + if err := priv.UnmarshalBinary(privBytes); err != nil { + return nil, err + } + ret = append(ret, NamedKey{Name: name, PublicKey: priv.Principal()}) + } + return ret, nil +} + +func (fks *fileKeyStore) DeleteKey(ctx context.Context, name string) error { + fks.mu.Lock() + defer fks.mu.Unlock() + + fkd, err := fks.load() + if err != nil { + return err + } + + if _, ok := fkd.Keys[name]; !ok { + return errKeyNotFound + } + delete(fkd.Keys, name) + return fks.save(fkd) +} + +func (fks *fileKeyStore) DeleteAllKeys(ctx context.Context) error { + fks.mu.Lock() + defer fks.mu.Unlock() + return fks.save(&fileKeyData{Keys: make(map[string][]byte)}) +} + +func (fks *fileKeyStore) ChangeKeyName(ctx context.Context, currentName, newName string) error { + fks.mu.Lock() + defer fks.mu.Unlock() + + fkd, err := fks.load() + if err != nil { + return err + } + + privBytes, ok := fkd.Keys[currentName] + if !ok { + return errKeyNotFound + } + + delete(fkd.Keys, currentName) + fkd.Keys[newName] = privBytes + return fks.save(fkd) +} diff --git a/frontend/apps/web/app/entry.server.tsx b/frontend/apps/web/app/entry.server.tsx index 225b8ca89..343cac6f9 100644 --- a/frontend/apps/web/app/entry.server.tsx +++ b/frontend/apps/web/app/entry.server.tsx @@ -29,6 +29,7 @@ import { } from './instrumentation.server' import {resolveResource} from './loaders' import {logDebug} from './logger' +import {documentToMarkdown} from './markdown.server' import {ParsedRequest, parseRequest} from './request' import { applyConfigSubscriptions, @@ -263,6 +264,115 @@ function uriEncodedAuthors(authors: string[]) { return authors.map((author) => encodeURIComponent(`hm://${author}`)).join(',') } +/** + * Handle requests with .md extension - return raw markdown + * This enables bots and agents to easily consume SHM content without + * installing CLI tools or parsing HTML/React. + * + * Usage: GET https://hyper.media/hm/z6Mk.../path.md + * Returns: text/markdown with the document content + */ +async function handleMarkdownRequest( + parsedRequest: ParsedRequest, + hostname: string +): Promise { + const {url, pathParts} = parsedRequest + + try { + // Strip .md extension from the last path part + const lastPart = pathParts[pathParts.length - 1] + const strippedPath = [...pathParts.slice(0, -1)] + if (lastPart && lastPart.endsWith('.md')) { + strippedPath.push(lastPart.slice(0, -3)) + } + + // Get service config to resolve account + const serviceConfig = await getConfig(hostname) + const originAccountId = serviceConfig?.registeredAccountUid + + // Build the resource ID + let resourceId: ReturnType | null = null + const version = url.searchParams.get('v') + const latest = url.searchParams.get('l') === '' + + if (strippedPath.length === 0) { + if (originAccountId) { + resourceId = hmId(originAccountId, {path: [], version, latest}) + } + } else if (strippedPath[0] === 'hm') { + resourceId = hmId(strippedPath[1], { + path: strippedPath.slice(2), + version, + latest, + }) + } else if (originAccountId) { + resourceId = hmId(originAccountId, {path: strippedPath, version, latest}) + } + + if (!resourceId) { + return new Response('# Not Found\n\nCould not resolve resource ID.', { + status: 404, + headers: {'Content-Type': 'text/markdown; charset=utf-8'}, + }) + } + + // Fetch the resource + const resource = await resolveResource(resourceId) + + if (resource.type === 'document') { + const md = await documentToMarkdown(resource.document, { + includeMetadata: true, + includeFrontmatter: url.searchParams.has('frontmatter'), + }) + + return new Response(md, { + status: 200, + headers: { + 'Content-Type': 'text/markdown; charset=utf-8', + 'X-Hypermedia-Id': encodeURIComponent(resourceId.id), + 'X-Hypermedia-Version': resource.document.version, + 'X-Hypermedia-Type': 'Document', + 'Cache-Control': 'public, max-age=60', + }, + }) + } else if (resource.type === 'comment') { + // For comments, create a simple markdown response + const content = resource.comment.content || [] + const fakeDoc = { + content, + metadata: {}, + version: resource.comment.version, + authors: [resource.comment.author], + } as any + + const md = await documentToMarkdown(fakeDoc, {includeMetadata: false}) + + return new Response(md, { + status: 200, + headers: { + 'Content-Type': 'text/markdown; charset=utf-8', + 'X-Hypermedia-Id': encodeURIComponent(resourceId.id), + 'X-Hypermedia-Type': 'Comment', + }, + }) + } + + return new Response('# Not Found\n\nResource type not supported.', { + status: 404, + headers: {'Content-Type': 'text/markdown; charset=utf-8'}, + }) + } catch (e) { + console.error('Error handling markdown request:', e) + return new Response( + `# Error\n\nFailed to load resource: ${(e as Error).message}`, + { + status: 500, + headers: {'Content-Type': 'text/markdown; charset=utf-8'}, + } + ) + } +} + async function handleOptionsRequest(request: Request) { const parsedRequest = parseRequest(request) const {hostname} = parsedRequest @@ -353,6 +463,12 @@ export default async function handleRequest( status: 404, }) } + + // Handle .md extension requests - return raw markdown for bots/agents + if (url.pathname.endsWith('.md')) { + return await handleMarkdownRequest(parsedRequest, hostname) + } + if (url.pathname.startsWith('/hm/embed/')) { // allowed to embed anywhere } else { diff --git a/frontend/apps/web/app/markdown.server.ts b/frontend/apps/web/app/markdown.server.ts new file mode 100644 index 000000000..da8cf592c --- /dev/null +++ b/frontend/apps/web/app/markdown.server.ts @@ -0,0 +1,521 @@ +/** + * Server-side markdown converter for Seed Hypermedia documents + * Enables HTTP GET with .md extension to return raw markdown + */ + +import type {BlockNode, Block, Annotation, HMDocument, UnpackedHypermediaId} from '@shm/shared/hm-types' +import {hmId, parseHMUrl, packHmId} from '@shm/shared' +import {grpcClient} from './client.server' +import {resolveResource} from './loaders' +import {serverUniversalClient} from './server-universal-client' + +export type MarkdownOptions = { + includeMetadata?: boolean + includeFrontmatter?: boolean +} + +/** + * Cache for resolved account names to avoid repeated lookups + */ +const accountNameCache = new Map() + +/** + * Cache for resolved embed content to avoid repeated fetches + */ +const embedContentCache = new Map() + +/** + * Resolve account display name from account ID + */ +async function resolveAccountName(accountId: string): Promise { + if (accountNameCache.has(accountId)) { + return accountNameCache.get(accountId)! + } + + try { + const account = await grpcClient.documents.getAccount({ + id: accountId, + }) + + const name = account.metadata?.name || accountId.slice(0, 8) + '...' + accountNameCache.set(accountId, name) + return name + } catch (e) { + console.error('Failed to resolve account name for', accountId, e) + // Fallback to shortened account ID + const fallbackName = accountId.slice(0, 8) + '...' + accountNameCache.set(accountId, fallbackName) + return fallbackName + } +} + +/** + * Convert a document to markdown + */ +export async function documentToMarkdown( + doc: HMDocument, + options?: MarkdownOptions +): Promise { + const lines: string[] = [] + + // Optional frontmatter + if (options?.includeFrontmatter && doc.metadata) { + lines.push('---') + if (doc.metadata.name) lines.push(`title: "${escapeYaml(doc.metadata.name)}"`) + if (doc.metadata.summary) lines.push(`summary: "${escapeYaml(doc.metadata.summary)}"`) + if (doc.authors?.length) lines.push(`authors: [${doc.authors.join(', ')}]`) + lines.push(`version: ${doc.version}`) + lines.push('---') + lines.push('') + } + + // Title from metadata + if (options?.includeMetadata && doc.metadata?.name) { + lines.push(`# ${doc.metadata.name}`) + lines.push('') + } + + // Pre-warm caches: collect all embed/query URLs and resolve in parallel + await prewarmEmbedCache(doc.content || []) + + // Content blocks + for (const node of doc.content || []) { + const blockMd = await blockNodeToMarkdown(node, 0) + if (blockMd) { + lines.push(blockMd) + } + } + + return lines.join('\n') +} + +/** + * Pre-warm the embed content cache by resolving all embeds in parallel. + * This avoids sequential fetches during markdown generation. + */ +async function prewarmEmbedCache(content: BlockNode[]): Promise { + const embedUrls = new Set() + const accountIds = new Set() + + function collectUrls(nodes: BlockNode[]) { + for (const node of nodes) { + const block = node.block + if (block.type === 'Embed' && block.link && !embedContentCache.has(block.link)) { + embedUrls.add(block.link) + } + // Collect mention account IDs from annotations + if (block.annotations) { + for (const ann of block.annotations) { + if (ann.type === 'Link' && ann.link) { + const parsed = parseHMUrl(ann.link) + if (parsed?.uid && (!parsed.path || parsed.path.length === 0) && !accountNameCache.has(parsed.uid)) { + accountIds.add(parsed.uid) + } + } + } + } + if (node.children) collectUrls(node.children) + } + } + + collectUrls(content) + + // Resolve all embeds and account names in parallel + const embedPromises = [...embedUrls].map(async (url) => { + try { + const parsed = parseHMUrl(url) + if (!parsed) return + const resourceId = hmId(parsed.uid, { + path: parsed.path, + version: parsed.version, + latest: parsed.latest, + blockRef: parsed.blockRef, + }) + const resource = await resolveResource(resourceId) + if (resource.type === 'document' && resource.document) { + let content = '' + if (parsed.blockRef) { + const targetBlock = findBlockById(resource.document.content || [], parsed.blockRef) + if (targetBlock) content = targetBlock.text || '' + } else { + content = resource.document.metadata?.name || resource.document.content?.[0]?.block?.text || '' + } + const result = content + ? `> ${content.split('\n').join('\n> ')}` + : `> [Embed: ${url}](${url})` + embedContentCache.set(url, result) + } + } catch (e) { + embedContentCache.set(url, `> [Embed: ${url}](${url})`) + } + }) + + const accountPromises = [...accountIds].map(async (uid) => { + try { + const account = await grpcClient.documents.getAccount({ id: uid }) + accountNameCache.set(uid, account.metadata?.name || uid.slice(0, 8) + '...') + } catch { + accountNameCache.set(uid, uid.slice(0, 8) + '...') + } + }) + + await Promise.all([...embedPromises, ...accountPromises]) +} + +/** + * Convert a block node (with children) to markdown + */ +async function blockNodeToMarkdown( + node: BlockNode, + depth: number +): Promise { + const block = node.block + const children = node.children || [] + + let result = await blockToMarkdown(block, depth) + + // Handle children based on childrenType + const childrenType = block.attributes?.childrenType as string | undefined + + for (const child of children) { + const childMd = await blockNodeToMarkdown(child, depth + 1) + if (childMd) { + if (childrenType === 'Ordered') { + result += '\n' + indent(depth + 1) + '1. ' + childMd.trim() + } else if (childrenType === 'Unordered') { + result += '\n' + indent(depth + 1) + '- ' + childMd.trim() + } else if (childrenType === 'Blockquote') { + result += '\n' + indent(depth + 1) + '> ' + childMd.trim() + } else { + result += '\n' + childMd + } + } + } + + return result +} + +/** + * Convert a single block to markdown + */ +async function blockToMarkdown( + block: Block, + depth: number +): Promise { + const ind = indent(depth) + + switch (block.type) { + case 'Paragraph': + return ind + await applyAnnotations(block.text || '', block.annotations) + + case 'Heading': + // Use depth to determine heading level (max h6) + const level = Math.min(depth + 1, 6) + const hashes = '#'.repeat(level) + return `${hashes} ${await applyAnnotations(block.text || '', block.annotations)}` + + case 'Code': + const lang = (block.attributes?.language as string) || '' + return ind + '```' + lang + '\n' + ind + (block.text || '') + '\n' + ind + '```' + + case 'Math': + return ind + '$$\n' + ind + (block.text || '') + '\n' + ind + '$$' + + case 'Image': + const altText = block.text || 'image' + const imgUrl = formatMediaUrl(block.link || '') + return ind + `![${altText}](${imgUrl})` + + case 'Video': + const videoUrl = formatMediaUrl(block.link || '') + return ind + `[Video](${videoUrl})` + + case 'File': + const fileName = (block.attributes?.name as string) || 'file' + const fileUrl = formatMediaUrl(block.link || '') + return ind + `[${fileName}](${fileUrl})` + + case 'Embed': + return await resolveEmbedBlock(block, ind) + + case 'WebEmbed': + return ind + `[Web Embed](${block.link})` + + case 'Button': + const buttonText = block.text || 'Button' + return ind + `[${buttonText}](${block.link})` + + case 'Query': + return await resolveQueryBlock(block, ind) + + case 'Nostr': + return ind + `[Nostr: ${block.link}](${block.link})` + + default: + if (block.text) { + return ind + block.text + } + return '' + } +} + +/** + * Resolve an embed block by loading the target document and inlining content + */ +async function resolveEmbedBlock(block: Block, indent: string): Promise { + if (!block.link) { + return indent + `> [Embed: No URL]` + } + + // Check cache first for performance + if (embedContentCache.has(block.link)) { + return indent + embedContentCache.get(block.link)! + } + + try { + // Parse the embed URL to get the resource ID + const parsed = parseHMUrl(block.link) + if (!parsed) { + const result = `> [Embed: ${block.link}](${block.link})` + embedContentCache.set(block.link, result) + return indent + result + } + + // Use the existing resolveResource function for consistency + const resourceId = hmId(parsed.uid, { + path: parsed.path, + version: parsed.version, + latest: parsed.latest, + blockRef: parsed.blockRef, + }) + + const resource = await resolveResource(resourceId) + + if (resource.type !== 'document' || !resource.document) { + const result = `> [Embed: ${block.link}](${block.link})` + embedContentCache.set(block.link, result) + return indent + result + } + + // Extract relevant content based on blockRef if present + let content = '' + if (parsed.blockRef) { + // Find the specific block referenced + const targetBlock = findBlockById(resource.document.content || [], parsed.blockRef) + if (targetBlock) { + content = targetBlock.text || '' + } + } else { + // Use the document title or first block + const title = resource.document.metadata?.name + if (title) { + content = title + } else if (resource.document.content?.[0]?.block?.text) { + content = resource.document.content[0].block.text + } + } + + let result: string + if (content) { + // Format as blockquote with proper indentation + result = `> ${content.split('\n').join('\n> ')}` + } else { + result = `> [Embed: ${block.link}](${block.link})` + } + + embedContentCache.set(block.link, result) + return indent + result + + } catch (e) { + console.error('Failed to resolve embed:', block.link, e) + const result = `> [Embed: ${block.link}](${block.link})` + embedContentCache.set(block.link, result) + return indent + result + } +} + +/** + * Resolve a query block by executing the query and generating a list of links + */ +async function resolveQueryBlock(block: Block, ind: string): Promise { + try { + const queryData = (block.attributes as any)?.query + if (!queryData?.includes?.length) { + return ind + `` + } + + // Execute the query using the server universal client + const result = await serverUniversalClient.request('Query', { + includes: queryData.includes, + sort: queryData.sort, + limit: queryData.limit, + }) + + if (!result || !('results' in result) || !result.results?.length) { + return ind + `` + } + + // Format results as a markdown list of links + const lines: string[] = [] + for (const doc of result.results) { + const title = doc.metadata?.name || doc.path?.[doc.path.length - 1] || 'Untitled' + const hmUrl = doc.id ? packHmId(doc.id) : '' + if (hmUrl) { + lines.push(`${ind}- [${title}](${hmUrl})`) + } else { + lines.push(`${ind}- ${title}`) + } + } + return lines.join('\n') + } catch (e) { + console.error('Failed to resolve query:', block, e) + return ind + `` + } +} + +/** + * Helper function to find a block by ID in a document's content + */ +function findBlockById(content: any[], blockId: string): any | null { + for (const node of content) { + if (node.block?.id === blockId) { + return node.block + } + // Recursively search children + if (node.children) { + const found = findBlockById(node.children, blockId) + if (found) return found + } + } + return null +} + +/** + * Apply text annotations (bold, italic, links, etc.) + */ +async function applyAnnotations( + text: string, + annotations: Annotation[] | undefined +): Promise { + if (!annotations || annotations.length === 0) { + return text + } + + // Build a list of markers with positions + type Marker = {pos: number; type: 'open' | 'close'; annotation: Annotation} + const markers: Marker[] = [] + + for (const ann of annotations) { + const starts = ann.starts || [] + const ends = ann.ends || [] + + for (let i = 0; i < starts.length; i++) { + markers.push({pos: starts[i], type: 'open', annotation: ann}) + if (ends[i] !== undefined) { + markers.push({pos: ends[i], type: 'close', annotation: ann}) + } + } + } + + // Sort by position (opens before closes at same position) + markers.sort((a, b) => { + if (a.pos !== b.pos) return a.pos - b.pos + return a.type === 'open' ? -1 : 1 + }) + + // Build result string + let result = '' + let lastPos = 0 + + for (const marker of markers) { + result += text.slice(lastPos, marker.pos) + lastPos = marker.pos + result += await getAnnotationMarker(marker.annotation, marker.type) + } + + result += text.slice(lastPos) + + // Remove object replacement characters (used for inline embeds) + result = result.replace(/\uFFFC/g, '') + + return result +} + +/** + * Get markdown marker for annotation + */ +async function getAnnotationMarker( + ann: Annotation, + type: 'open' | 'close' +): Promise { + switch (ann.type) { + case 'Bold': + return '**' + case 'Italic': + return '_' + case 'Strike': + return '~~' + case 'Code': + return '`' + case 'Underline': + return type === 'open' ? '' : '' + case 'Link': + if (type === 'open') { + // Check if this is a mention (hm:// link to an account with no path) + if (ann.link) { + const parsed = parseHMUrl(ann.link) + if (parsed?.uid && (!parsed.path || parsed.path.length === 0)) { + try { + const name = await resolveAccountName(parsed.uid) + return `[@${name}](${ann.link})` + } catch (e) { + // fallback to link syntax + } + } + } + return '[' + } else { + // If we already emitted the full mention markdown in 'open', skip close + if (ann.link) { + const parsed = parseHMUrl(ann.link) + if (parsed?.uid && (!parsed.path || parsed.path.length === 0)) { + return '' + } + } + return `](${ann.link || ''})` + } + case 'Embed': + if (type === 'open') { + return '[' + } else { + return `](${ann.link || ''})` + } + default: + return '' + } +} + +/** + * Format media URL (handle ipfs:// URLs) + */ +function formatMediaUrl(url: string): string { + if (url.startsWith('ipfs://')) { + const cid = url.slice(7) + return `https://ipfs.io/ipfs/${cid}` + } + return url +} + +/** + * Create indentation string + */ +function indent(depth: number): string { + return ' '.repeat(depth) +} + +/** + * Escape string for YAML frontmatter + */ +function escapeYaml(str: string): string { + return str.replace(/"/g, '\\"').replace(/\n/g, '\\n') +} diff --git a/seed-daemon b/seed-daemon new file mode 100755 index 000000000..0024ca070 Binary files /dev/null and b/seed-daemon differ