diff --git a/package-lock.json b/package-lock.json
index fb6aa0f..de362d3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -4825,16 +4825,6 @@
"@types/node": "*"
}
},
- "node_modules/@types/jszip": {
- "version": "3.4.0",
- "resolved": "https://registry.npmjs.org/@types/jszip/-/jszip-3.4.0.tgz",
- "integrity": "sha512-GFHqtQQP3R4NNuvZH3hNCYD0NbyBZ42bkN7kO3NDrU/SnvIZWMS8Bp38XCsRKBT5BXvgm0y1zqpZWp/ZkRzBzg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "jszip": "*"
- }
- },
"node_modules/@types/node": {
"version": "22.18.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.1.tgz",
@@ -14089,7 +14079,6 @@
"devDependencies": {
"@types/fs-extra": "^11.0.4",
"@types/inquirer": "^9.0.7",
- "@types/jszip": "^3.4.0",
"@types/node": "^22.13.0",
"@types/qrcode-terminal": "^0.12.2",
"tsx": "^4.19.2",
@@ -14098,6 +14087,9 @@
},
"engines": {
"node": ">=18.0.0"
+ },
+ "optionalDependencies": {
+ "@zvec/zvec": "^0.3.0"
}
},
"packages/atxp/node_modules/chalk": {
diff --git a/packages/atxp/package.json b/packages/atxp/package.json
index 9cb9659..8d67d95 100644
--- a/packages/atxp/package.json
+++ b/packages/atxp/package.json
@@ -43,6 +43,9 @@
"ora": "^7.0.1",
"qrcode-terminal": "^0.12.0"
},
+ "optionalDependencies": {
+ "@zvec/zvec": "^0.3.0"
+ },
"devDependencies": {
"@types/fs-extra": "^11.0.4",
"@types/inquirer": "^9.0.7",
diff --git a/packages/atxp/src/commands/backup.ts b/packages/atxp/src/commands/backup.ts
deleted file mode 100644
index b4e78e2..0000000
--- a/packages/atxp/src/commands/backup.ts
+++ /dev/null
@@ -1,269 +0,0 @@
-import chalk from 'chalk';
-import fs from 'fs';
-import JSZip from 'jszip';
-import path from 'path';
-import { getConnection } from '../config.js';
-
-export interface BackupOptions {
- path?: string;
-}
-
-interface BackupFile {
- path: string;
- content: string;
-}
-
-function getAccountsAuth(): { baseUrl: string; token: string } {
- const connection = getConnection();
- if (!connection) {
- console.error(chalk.red('Not logged in.'));
- console.error(`Run: ${chalk.cyan('npx atxp login')}`);
- process.exit(1);
- }
- const url = new URL(connection);
- const token = url.searchParams.get('connection_token');
- if (!token) {
- console.error(chalk.red('Invalid connection string: missing connection_token'));
- process.exit(1);
- }
- return { baseUrl: `${url.protocol}//${url.host}`, token };
-}
-
-function showBackupHelp(): void {
- console.log(chalk.bold('Backup Commands:'));
- console.log();
- console.log(' ' + chalk.cyan('npx atxp backup push --path
') + ' ' + 'Push .md files to server');
- console.log(' ' + chalk.cyan('npx atxp backup pull --path ') + ' ' + 'Pull .md files from server');
- console.log(' ' + chalk.cyan('npx atxp backup status') + ' ' + 'Show backup info');
- console.log();
- console.log(chalk.bold('Details:'));
- console.log(' Backs up all .md files (recursively) from the given directory.');
- console.log(' Files are compressed into a zip archive before upload.');
- console.log(' Each push replaces the previous server snapshot entirely.');
- console.log(' Pull writes server files to the local directory (non-destructive).');
- console.log();
- console.log(chalk.bold('Options:'));
- console.log(' ' + chalk.yellow('--path') + ' ' + 'Directory to push from or pull to (required for push/pull)');
- console.log();
- console.log(chalk.bold('Examples:'));
- console.log(' npx atxp backup push --path ~/.openclaw/workspace-abc/');
- console.log(' npx atxp backup pull --path ~/.openclaw/workspace-abc/');
- console.log(' npx atxp backup status');
-}
-
-export function collectMdFiles(dir: string): BackupFile[] {
- const entries = fs.readdirSync(dir, { recursive: true, withFileTypes: true });
- const files: BackupFile[] = [];
-
- for (const entry of entries) {
- if (entry.isSymbolicLink()) continue;
- if (!entry.isFile()) continue;
- if (!entry.name.endsWith('.md')) continue;
-
- const parentPath = entry.parentPath ?? entry.path;
- const fullPath = path.join(parentPath, entry.name);
- const relativePath = path.relative(dir, fullPath);
- const content = fs.readFileSync(fullPath, 'utf-8');
-
- files.push({ path: relativePath, content });
- }
-
- return files;
-}
-
-async function pushBackup(pathArg: string): Promise {
- if (!pathArg) {
- console.error(chalk.red('Error: --path is required for push'));
- console.error(`Usage: ${chalk.cyan('npx atxp backup push --path ')}`);
- process.exit(1);
- }
-
- const resolvedPath = path.resolve(pathArg);
-
- if (!fs.existsSync(resolvedPath)) {
- console.error(chalk.red(`Error: Directory does not exist: ${resolvedPath}`));
- process.exit(1);
- }
-
- if (!fs.statSync(resolvedPath).isDirectory()) {
- console.error(chalk.red(`Error: Path is not a directory: ${resolvedPath}`));
- process.exit(1);
- }
-
- const { baseUrl, token } = getAccountsAuth();
-
- console.log(chalk.gray(`Collecting .md files from ${resolvedPath}...`));
-
- const files = collectMdFiles(resolvedPath);
-
- if (files.length === 0) {
- console.log(chalk.yellow('No .md files found in the specified directory.'));
- return;
- }
-
- for (const file of files) {
- console.log(chalk.gray(` ${file.path}`));
- }
-
- const totalBytes = files.reduce((sum, f) => sum + Buffer.byteLength(f.content, 'utf-8'), 0);
- console.log(chalk.gray(`\nCompressing ${files.length} file(s) (${formatBytes(totalBytes)})...`));
-
- const zip = new JSZip();
- for (const file of files) {
- zip.file(file.path, file.content);
- }
- const zipBuffer = await zip.generateAsync({ type: 'nodebuffer', compression: 'DEFLATE', compressionOptions: { level: 9 } });
-
- console.log(chalk.gray(`Pushing zip archive (${formatBytes(zipBuffer.length)})...`));
-
- const res = await fetch(`${baseUrl}/backup/files`, {
- method: 'PUT',
- headers: {
- 'Authorization': `Bearer ${token}`,
- 'Content-Type': 'application/zip',
- },
- body: new Uint8Array(zipBuffer),
- });
-
- if (!res.ok) {
- const body = await res.json().catch(() => ({}));
- console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
- process.exit(1);
- }
-
- const data = await res.json() as { fileCount: number; syncedAt: string };
-
- console.log();
- console.log(chalk.green.bold('Backup pushed successfully!'));
- console.log(' ' + chalk.bold('Files:') + ' ' + data.fileCount);
- console.log(' ' + chalk.bold('Synced at:') + ' ' + new Date(data.syncedAt).toLocaleString());
-}
-
-async function pullBackup(pathArg: string): Promise {
- if (!pathArg) {
- console.error(chalk.red('Error: --path is required for pull'));
- console.error(`Usage: ${chalk.cyan('npx atxp backup pull --path ')}`);
- process.exit(1);
- }
-
- const resolvedPath = path.resolve(pathArg);
- const { baseUrl, token } = getAccountsAuth();
-
- console.log(chalk.gray('Pulling backup from server...'));
-
- const res = await fetch(`${baseUrl}/backup/files`, {
- headers: {
- 'Authorization': `Bearer ${token}`,
- 'Accept': 'application/zip',
- },
- });
-
- if (!res.ok) {
- const body = await res.json().catch(() => ({}));
- console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
- process.exit(1);
- }
-
- const zipBuffer = Buffer.from(await res.arrayBuffer());
-
- if (zipBuffer.length === 0) {
- console.log(chalk.yellow('No backup found on server. Push one first with:'));
- console.log(chalk.cyan(' npx atxp backup push --path '));
- return;
- }
-
- console.log(chalk.gray(`Extracting zip archive (${formatBytes(zipBuffer.length)})...`));
-
- const zip = await JSZip.loadAsync(zipBuffer);
- const fileNames = Object.keys(zip.files).filter(name => !zip.files[name].dir);
-
- if (fileNames.length === 0) {
- console.log(chalk.yellow('No backup found on server. Push one first with:'));
- console.log(chalk.cyan(' npx atxp backup push --path '));
- return;
- }
-
- // Create target directory if needed
- fs.mkdirSync(resolvedPath, { recursive: true });
-
- for (const name of fileNames) {
- const content = await zip.files[name].async('string');
- const filePath = path.join(resolvedPath, name);
- const fileDir = path.dirname(filePath);
-
- fs.mkdirSync(fileDir, { recursive: true });
- fs.writeFileSync(filePath, content, 'utf-8');
-
- console.log(chalk.gray(` ${name}`));
- }
-
- console.log();
- console.log(chalk.green.bold('Backup pulled successfully!'));
- console.log(' ' + chalk.bold('Files written:') + ' ' + fileNames.length);
- console.log(' ' + chalk.bold('Directory:') + ' ' + resolvedPath);
-}
-
-async function backupStatus(): Promise {
- const { baseUrl, token } = getAccountsAuth();
-
- const res = await fetch(`${baseUrl}/backup/status`, {
- headers: {
- 'Authorization': `Bearer ${token}`,
- },
- });
-
- if (!res.ok) {
- const body = await res.json().catch(() => ({}));
- console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
- process.exit(1);
- }
-
- const data = await res.json() as { fileCount: number; syncedAt: string; totalBytes: number };
-
- console.log(chalk.bold('Backup Status'));
- console.log();
-
- if (data.fileCount === 0) {
- console.log(chalk.gray('No backup found.'));
- console.log();
- console.log('Create one with: ' + chalk.cyan('npx atxp backup push --path '));
- return;
- }
-
- console.log(' ' + chalk.bold('Files:') + ' ' + data.fileCount);
- console.log(' ' + chalk.bold('Total size:') + ' ' + formatBytes(data.totalBytes));
- console.log(' ' + chalk.bold('Last sync:') + ' ' + new Date(data.syncedAt).toLocaleString());
-}
-
-function formatBytes(bytes: number): string {
- if (bytes < 1024) return `${bytes} B`;
- if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
- return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
-}
-
-export async function backupCommand(subCommand: string, options: BackupOptions): Promise {
- if (!subCommand || subCommand === 'help' || subCommand === '--help' || subCommand === '-h') {
- showBackupHelp();
- return;
- }
-
- switch (subCommand) {
- case 'push':
- await pushBackup(options.path || '');
- break;
-
- case 'pull':
- await pullBackup(options.path || '');
- break;
-
- case 'status':
- await backupStatus();
- break;
-
- default:
- console.error(chalk.red(`Unknown backup command: ${subCommand}`));
- console.log();
- showBackupHelp();
- process.exit(1);
- }
-}
diff --git a/packages/atxp/src/commands/commands.test.ts b/packages/atxp/src/commands/commands.test.ts
index a742d5f..ae7d0db 100644
--- a/packages/atxp/src/commands/commands.test.ts
+++ b/packages/atxp/src/commands/commands.test.ts
@@ -3,7 +3,7 @@ import fs from 'fs';
import JSZip from 'jszip';
import os from 'os';
import path from 'path';
-import { collectMdFiles } from './backup.js';
+import { collectMdFiles, chunkMarkdown, textToVector } from './memory.js';
describe('Tool Commands', () => {
describe('search command', () => {
@@ -171,11 +171,11 @@ describe('Tool Commands', () => {
});
});
- describe('backup command', () => {
+ describe('memory command', () => {
let tmpDir: string;
beforeEach(() => {
- tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'atxp-backup-test-'));
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'atxp-memory-test-'));
});
afterEach(() => {
@@ -313,6 +313,92 @@ describe('Tool Commands', () => {
});
});
+ describe('chunkMarkdown', () => {
+ it('should split markdown by headings', () => {
+ const content = '# Title\nSome intro text.\n\n## Section A\nContent A.\n\n## Section B\nContent B.';
+ const chunks = chunkMarkdown('test.md', content);
+
+ expect(chunks).toHaveLength(3);
+ expect(chunks[0].heading).toBe('Title');
+ expect(chunks[0].text).toContain('Some intro text.');
+ expect(chunks[1].heading).toBe('Section A');
+ expect(chunks[1].text).toContain('Content A.');
+ expect(chunks[2].heading).toBe('Section B');
+ expect(chunks[2].text).toContain('Content B.');
+ });
+
+ it('should use file path as heading for content without headings', () => {
+ const content = 'Just some plain text\nwith multiple lines.';
+ const chunks = chunkMarkdown('notes.md', content);
+
+ expect(chunks).toHaveLength(1);
+ expect(chunks[0].heading).toBe('notes.md');
+ expect(chunks[0].filePath).toBe('notes.md');
+ expect(chunks[0].text).toBe('Just some plain text\nwith multiple lines.');
+ });
+
+ it('should return empty array for empty content', () => {
+ const chunks = chunkMarkdown('empty.md', '');
+ expect(chunks).toHaveLength(0);
+ });
+
+ it('should handle h1, h2, and h3 headings', () => {
+ const content = '# H1\nOne\n## H2\nTwo\n### H3\nThree';
+ const chunks = chunkMarkdown('test.md', content);
+
+ expect(chunks).toHaveLength(3);
+ expect(chunks[0].heading).toBe('H1');
+ expect(chunks[1].heading).toBe('H2');
+ expect(chunks[2].heading).toBe('H3');
+ });
+
+ it('should track start line numbers', () => {
+ const content = '# Title\nLine 2\n\n## Section\nLine 5';
+ const chunks = chunkMarkdown('test.md', content);
+
+ expect(chunks[0].startLine).toBe(1);
+ expect(chunks[1].startLine).toBe(4);
+ });
+ });
+
+ describe('textToVector', () => {
+ it('should return a vector of length 256', () => {
+ const vec = textToVector('hello world');
+ expect(vec).toHaveLength(256);
+ });
+
+ it('should return a normalized vector', () => {
+ const vec = textToVector('the quick brown fox jumps over the lazy dog');
+ const norm = Math.sqrt(vec.reduce((sum, v) => sum + v * v, 0));
+ expect(norm).toBeCloseTo(1.0, 4);
+ });
+
+ it('should return zero vector for empty input', () => {
+ const vec = textToVector('');
+ const allZero = vec.every((v) => v === 0);
+ expect(allZero).toBe(true);
+ });
+
+ it('should produce similar vectors for similar text', () => {
+ const vec1 = textToVector('authentication login flow');
+ const vec2 = textToVector('authentication login process');
+ const vec3 = textToVector('ocean waves sunset beach');
+
+ // Cosine similarity (vectors are already normalized)
+ const sim12 = vec1.reduce((sum, v, i) => sum + v * vec2[i], 0);
+ const sim13 = vec1.reduce((sum, v, i) => sum + v * vec3[i], 0);
+
+ // Similar texts should have higher similarity than dissimilar ones
+ expect(sim12).toBeGreaterThan(sim13);
+ });
+
+ it('should be deterministic', () => {
+ const vec1 = textToVector('test input');
+ const vec2 = textToVector('test input');
+ expect(vec1).toEqual(vec2);
+ });
+ });
+
describe('common command behavior', () => {
it('should construct tool arguments correctly', () => {
const buildArgs = (key: string, value: string) => {
diff --git a/packages/atxp/src/commands/memory.ts b/packages/atxp/src/commands/memory.ts
new file mode 100644
index 0000000..7639547
--- /dev/null
+++ b/packages/atxp/src/commands/memory.ts
@@ -0,0 +1,673 @@
+import chalk from 'chalk';
+import fs from 'fs';
+import JSZip from 'jszip';
+import path from 'path';
+import { getConnection } from '../config.js';
+
+export interface MemoryOptions {
+ path?: string;
+ topk?: number;
+}
+
+interface MemoryFile {
+ path: string;
+ content: string;
+}
+
+// --- Shared auth helper ---
+
+function getAccountsAuth(): { baseUrl: string; token: string } {
+ const connection = getConnection();
+ if (!connection) {
+ console.error(chalk.red('Not logged in.'));
+ console.error(`Run: ${chalk.cyan('npx atxp login')}`);
+ process.exit(1);
+ }
+ const url = new URL(connection);
+ const token = url.searchParams.get('connection_token');
+ if (!token) {
+ console.error(chalk.red('Invalid connection string: missing connection_token'));
+ process.exit(1);
+ }
+ return { baseUrl: `${url.protocol}//${url.host}`, token };
+}
+
+// --- File collection ---
+
+export function collectMdFiles(dir: string): MemoryFile[] {
+ const entries = fs.readdirSync(dir, { recursive: true, withFileTypes: true });
+ const files: MemoryFile[] = [];
+
+ for (const entry of entries) {
+ if (entry.isSymbolicLink()) continue;
+ if (!entry.isFile()) continue;
+ if (!entry.name.endsWith('.md')) continue;
+
+ const parentPath = entry.parentPath ?? entry.path;
+ const fullPath = path.join(parentPath, entry.name);
+ const relativePath = path.relative(dir, fullPath);
+ const content = fs.readFileSync(fullPath, 'utf-8');
+
+ files.push({ path: relativePath, content });
+ }
+
+ return files;
+}
+
+// --- Text chunking ---
+
+interface TextChunk {
+ filePath: string;
+ heading: string;
+ text: string;
+ startLine: number;
+}
+
+export function chunkMarkdown(filePath: string, content: string): TextChunk[] {
+ const lines = content.split('\n');
+ const chunks: TextChunk[] = [];
+ let currentHeading = filePath;
+ let currentLines: string[] = [];
+ let chunkStartLine = 1;
+
+ const flushChunk = () => {
+ const text = currentLines.join('\n').trim();
+ if (text.length > 0) {
+ chunks.push({
+ filePath,
+ heading: currentHeading,
+ text,
+ startLine: chunkStartLine,
+ });
+ }
+ currentLines = [];
+ };
+
+ for (let i = 0; i < lines.length; i++) {
+ const line = lines[i];
+ const headingMatch = line.match(/^(#{1,3})\s+(.+)/);
+
+ if (headingMatch) {
+ flushChunk();
+ currentHeading = headingMatch[2].trim();
+ chunkStartLine = i + 1;
+ currentLines.push(line);
+ } else {
+ if (currentLines.length === 0) {
+ chunkStartLine = i + 1;
+ }
+ currentLines.push(line);
+ }
+ }
+
+ flushChunk();
+ return chunks;
+}
+
+// --- Feature hashing for local embeddings ---
+
+const VECTOR_DIM = 256;
+
+function hashToken(token: string): number {
+ let h = 0x811c9dc5;
+ for (let i = 0; i < token.length; i++) {
+ h ^= token.charCodeAt(i);
+ h = Math.imul(h, 0x01000193);
+ }
+ return h >>> 0;
+}
+
+function tokenize(text: string): string[] {
+ return text
+ .toLowerCase()
+ .replace(/[^a-z0-9\s]/g, ' ')
+ .split(/\s+/)
+ .filter((t) => t.length > 1);
+}
+
+export function textToVector(text: string): number[] {
+ const vec = new Float64Array(VECTOR_DIM);
+ const tokens = tokenize(text);
+
+ // Unigrams
+ for (const token of tokens) {
+ const h = hashToken(token);
+ const idx = h % VECTOR_DIM;
+ const sign = (h & 0x80000000) ? -1 : 1;
+ vec[idx] += sign;
+ }
+
+ // Bigrams for better context
+ for (let i = 0; i < tokens.length - 1; i++) {
+ const bigram = tokens[i] + '_' + tokens[i + 1];
+ const h = hashToken(bigram);
+ const idx = h % VECTOR_DIM;
+ const sign = (h & 0x80000000) ? -1 : 1;
+ vec[idx] += sign * 0.5;
+ }
+
+ // L2 normalize
+ let norm = 0;
+ for (let i = 0; i < VECTOR_DIM; i++) {
+ norm += vec[i] * vec[i];
+ }
+ norm = Math.sqrt(norm);
+ if (norm > 0) {
+ for (let i = 0; i < VECTOR_DIM; i++) {
+ vec[i] /= norm;
+ }
+ }
+
+ return Array.from(vec);
+}
+
+// --- zvec index management ---
+
+const INDEX_DIR_NAME = '.atxp-memory-index';
+
+function getIndexDir(basePath: string): string {
+ return path.join(basePath, INDEX_DIR_NAME);
+}
+
+async function loadZvec() {
+ try {
+ return await import('@zvec/zvec');
+ } catch {
+ console.error(chalk.red('Error: @zvec/zvec is not installed.'));
+ console.error(`Install it with: ${chalk.cyan('npm install @zvec/zvec')}`);
+ process.exit(1);
+ }
+}
+
+async function indexMemory(pathArg: string): Promise {
+ if (!pathArg) {
+ console.error(chalk.red('Error: --path is required for index'));
+ console.error(`Usage: ${chalk.cyan('npx atxp memory index --path ')}`);
+ process.exit(1);
+ }
+
+ const resolvedPath = path.resolve(pathArg);
+
+ if (!fs.existsSync(resolvedPath)) {
+ console.error(chalk.red(`Error: Directory does not exist: ${resolvedPath}`));
+ process.exit(1);
+ }
+
+ if (!fs.statSync(resolvedPath).isDirectory()) {
+ console.error(chalk.red(`Error: Path is not a directory: ${resolvedPath}`));
+ process.exit(1);
+ }
+
+ const zvec = await loadZvec();
+
+ console.log(chalk.gray(`Collecting .md files from ${resolvedPath}...`));
+ const files = collectMdFiles(resolvedPath);
+
+ if (files.length === 0) {
+ console.log(chalk.yellow('No .md files found in the specified directory.'));
+ return;
+ }
+
+ console.log(chalk.gray(`Found ${files.length} .md file(s). Chunking and indexing...`));
+
+ // Chunk all files
+ const allChunks: TextChunk[] = [];
+ for (const file of files) {
+ const chunks = chunkMarkdown(file.path, file.content);
+ allChunks.push(...chunks);
+ }
+
+ if (allChunks.length === 0) {
+ console.log(chalk.yellow('No content to index.'));
+ return;
+ }
+
+ console.log(chalk.gray(` ${allChunks.length} chunk(s) from ${files.length} file(s)`));
+
+ // Create zvec collection
+ const indexDir = getIndexDir(resolvedPath);
+
+ // Remove old index if it exists
+ if (fs.existsSync(indexDir)) {
+ fs.rmSync(indexDir, { recursive: true, force: true });
+ }
+ fs.mkdirSync(indexDir, { recursive: true });
+
+ const collectionPath = path.join(indexDir, 'memories');
+
+ const schema = new zvec.ZVecCollectionSchema({
+ name: 'memories',
+ fields: [
+ { name: 'file_path', dataType: zvec.ZVecDataType.STRING },
+ { name: 'heading', dataType: zvec.ZVecDataType.STRING },
+ { name: 'text', dataType: zvec.ZVecDataType.STRING },
+ { name: 'start_line', dataType: zvec.ZVecDataType.INT32 },
+ ],
+ vectors: [
+ {
+ name: 'embedding',
+ dataType: zvec.ZVecDataType.VECTOR_FP32,
+ dimension: VECTOR_DIM,
+ indexParams: { type: zvec.ZVecIndexType.HNSW },
+ },
+ ],
+ });
+
+ const collection = zvec.ZVecCreateAndOpen(collectionPath, schema);
+
+ try {
+ // Insert chunks in batches
+ const BATCH_SIZE = 100;
+ for (let i = 0; i < allChunks.length; i += BATCH_SIZE) {
+ const batch = allChunks.slice(i, i + BATCH_SIZE);
+ const docs = batch.map((chunk, j) => ({
+ id: `chunk_${i + j}`,
+ fields: {
+ file_path: chunk.filePath,
+ heading: chunk.heading,
+ text: chunk.text,
+ start_line: chunk.startLine,
+ },
+ vectors: {
+ embedding: textToVector(chunk.text),
+ },
+ }));
+
+ collection.insertSync(docs);
+ }
+
+ // Optimize for search performance
+ collection.optimizeSync();
+
+ // Write metadata
+ const meta = {
+ fileCount: files.length,
+ chunkCount: allChunks.length,
+ indexedAt: new Date().toISOString(),
+ vectorDim: VECTOR_DIM,
+ };
+ fs.writeFileSync(path.join(indexDir, 'meta.json'), JSON.stringify(meta, null, 2));
+
+ console.log();
+ console.log(chalk.green.bold('Memory indexed successfully!'));
+ console.log(' ' + chalk.bold('Files:') + ' ' + files.length);
+ console.log(' ' + chalk.bold('Chunks:') + ' ' + allChunks.length);
+ console.log(' ' + chalk.bold('Index:') + ' ' + indexDir);
+ } finally {
+ collection.closeSync();
+ }
+}
+
+async function searchMemory(query: string, pathArg: string, topk: number): Promise {
+ if (!query) {
+ console.error(chalk.red('Error: search query is required'));
+ console.error(`Usage: ${chalk.cyan('npx atxp memory search --path ')}`);
+ process.exit(1);
+ }
+
+ if (!pathArg) {
+ console.error(chalk.red('Error: --path is required for search'));
+ console.error(`Usage: ${chalk.cyan('npx atxp memory search --path ')}`);
+ process.exit(1);
+ }
+
+ const resolvedPath = path.resolve(pathArg);
+ const indexDir = getIndexDir(resolvedPath);
+ const collectionPath = path.join(indexDir, 'memories');
+
+ if (!fs.existsSync(indexDir) || !fs.existsSync(collectionPath)) {
+ console.error(chalk.red('No memory index found. Build one first:'));
+ console.error(chalk.cyan(` npx atxp memory index --path ${pathArg}`));
+ process.exit(1);
+ }
+
+ const zvec = await loadZvec();
+
+ const collection = zvec.ZVecOpen(collectionPath);
+
+ try {
+ const queryVec = textToVector(query);
+
+ const results = collection.querySync({
+ fieldName: 'embedding',
+ vector: queryVec,
+ topk,
+ outputFields: ['file_path', 'heading', 'text', 'start_line'],
+ });
+
+ if (results.length === 0) {
+ console.log(chalk.yellow('No matching memories found.'));
+ return;
+ }
+
+ console.log(chalk.bold(`Found ${results.length} result(s) for "${query}":`));
+ console.log();
+
+ for (let i = 0; i < results.length; i++) {
+ const doc = results[i];
+ const filePath = doc.fields.file_path as string;
+ const heading = doc.fields.heading as string;
+ const text = doc.fields.text as string;
+ const startLine = doc.fields.start_line as number;
+ const score = doc.score;
+
+ console.log(chalk.cyan.bold(` ${i + 1}. ${filePath}:${startLine}`) + chalk.gray(` (score: ${score.toFixed(4)})`));
+ if (heading !== filePath) {
+ console.log(chalk.bold(` ${heading}`));
+ }
+ // Show a preview (first 200 chars)
+ const preview = text.length > 200 ? text.slice(0, 200) + '...' : text;
+ const indented = preview.split('\n').map((l) => ' ' + l).join('\n');
+ console.log(chalk.gray(indented));
+ console.log();
+ }
+ } finally {
+ collection.closeSync();
+ }
+}
+
+// --- Cloud backup operations (preserved from backup.ts) ---
+
+async function pushMemory(pathArg: string): Promise {
+ if (!pathArg) {
+ console.error(chalk.red('Error: --path is required for push'));
+ console.error(`Usage: ${chalk.cyan('npx atxp memory push --path ')}`);
+ process.exit(1);
+ }
+
+ const resolvedPath = path.resolve(pathArg);
+
+ if (!fs.existsSync(resolvedPath)) {
+ console.error(chalk.red(`Error: Directory does not exist: ${resolvedPath}`));
+ process.exit(1);
+ }
+
+ if (!fs.statSync(resolvedPath).isDirectory()) {
+ console.error(chalk.red(`Error: Path is not a directory: ${resolvedPath}`));
+ process.exit(1);
+ }
+
+ const { baseUrl, token } = getAccountsAuth();
+
+ console.log(chalk.gray(`Collecting .md files from ${resolvedPath}...`));
+
+ const files = collectMdFiles(resolvedPath);
+
+ if (files.length === 0) {
+ console.log(chalk.yellow('No .md files found in the specified directory.'));
+ return;
+ }
+
+ for (const file of files) {
+ console.log(chalk.gray(` ${file.path}`));
+ }
+
+ const totalBytes = files.reduce((sum, f) => sum + Buffer.byteLength(f.content, 'utf-8'), 0);
+ console.log(chalk.gray(`\nCompressing ${files.length} file(s) (${formatBytes(totalBytes)})...`));
+
+ const zip = new JSZip();
+ for (const file of files) {
+ zip.file(file.path, file.content);
+ }
+ const zipBuffer = await zip.generateAsync({ type: 'nodebuffer', compression: 'DEFLATE', compressionOptions: { level: 9 } });
+
+ console.log(chalk.gray(`Pushing zip archive (${formatBytes(zipBuffer.length)})...`));
+
+ const res = await fetch(`${baseUrl}/backup/files`, {
+ method: 'PUT',
+ headers: {
+ 'Authorization': `Bearer ${token}`,
+ 'Content-Type': 'application/zip',
+ },
+ body: new Uint8Array(zipBuffer),
+ });
+
+ if (!res.ok) {
+ const body = await res.json().catch(() => ({}));
+ console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
+ process.exit(1);
+ }
+
+ const data = await res.json() as { fileCount: number; syncedAt: string };
+
+ console.log();
+ console.log(chalk.green.bold('Memory pushed successfully!'));
+ console.log(' ' + chalk.bold('Files:') + ' ' + data.fileCount);
+ console.log(' ' + chalk.bold('Synced at:') + ' ' + new Date(data.syncedAt).toLocaleString());
+}
+
+async function pullMemory(pathArg: string): Promise {
+ if (!pathArg) {
+ console.error(chalk.red('Error: --path is required for pull'));
+ console.error(`Usage: ${chalk.cyan('npx atxp memory pull --path ')}`);
+ process.exit(1);
+ }
+
+ const resolvedPath = path.resolve(pathArg);
+ const { baseUrl, token } = getAccountsAuth();
+
+ console.log(chalk.gray('Pulling memory from server...'));
+
+ const res = await fetch(`${baseUrl}/backup/files`, {
+ headers: {
+ 'Authorization': `Bearer ${token}`,
+ 'Accept': 'application/zip',
+ },
+ });
+
+ if (!res.ok) {
+ const body = await res.json().catch(() => ({}));
+ console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
+ process.exit(1);
+ }
+
+ const zipBuffer = Buffer.from(await res.arrayBuffer());
+
+ if (zipBuffer.length === 0) {
+ console.log(chalk.yellow('No memory backup found on server. Push one first with:'));
+ console.log(chalk.cyan(' npx atxp memory push --path '));
+ return;
+ }
+
+ console.log(chalk.gray(`Extracting zip archive (${formatBytes(zipBuffer.length)})...`));
+
+ const zip = await JSZip.loadAsync(zipBuffer);
+ const fileNames = Object.keys(zip.files).filter(name => !zip.files[name].dir);
+
+ if (fileNames.length === 0) {
+ console.log(chalk.yellow('No memory backup found on server. Push one first with:'));
+ console.log(chalk.cyan(' npx atxp memory push --path '));
+ return;
+ }
+
+ // Create target directory if needed
+ fs.mkdirSync(resolvedPath, { recursive: true });
+
+ for (const name of fileNames) {
+ const content = await zip.files[name].async('string');
+ const filePath = path.join(resolvedPath, name);
+ const fileDir = path.dirname(filePath);
+
+ fs.mkdirSync(fileDir, { recursive: true });
+ fs.writeFileSync(filePath, content, 'utf-8');
+
+ console.log(chalk.gray(` ${name}`));
+ }
+
+ console.log();
+ console.log(chalk.green.bold('Memory pulled successfully!'));
+ console.log(' ' + chalk.bold('Files written:') + ' ' + fileNames.length);
+ console.log(' ' + chalk.bold('Directory:') + ' ' + resolvedPath);
+}
+
+async function memoryStatus(): Promise {
+ const { baseUrl, token } = getAccountsAuth();
+
+ const res = await fetch(`${baseUrl}/backup/status`, {
+ headers: {
+ 'Authorization': `Bearer ${token}`,
+ },
+ });
+
+ if (!res.ok) {
+ const body = await res.json().catch(() => ({}));
+ console.error(chalk.red(`Error: ${(body as Record).error || res.statusText}`));
+ process.exit(1);
+ }
+
+ const data = await res.json() as { fileCount: number; syncedAt: string; totalBytes: number };
+
+ console.log(chalk.bold('Memory Status'));
+ console.log();
+
+ // Cloud backup status
+ console.log(chalk.bold.underline('Cloud Backup'));
+ if (data.fileCount === 0) {
+ console.log(chalk.gray(' No backup found.'));
+ console.log(' Create one with: ' + chalk.cyan('npx atxp memory push --path '));
+ } else {
+ console.log(' ' + chalk.bold('Files:') + ' ' + data.fileCount);
+ console.log(' ' + chalk.bold('Total size:') + ' ' + formatBytes(data.totalBytes));
+ console.log(' ' + chalk.bold('Last sync:') + ' ' + new Date(data.syncedAt).toLocaleString());
+ }
+
+ // Check for local index
+ console.log();
+ console.log(chalk.bold.underline('Local Search Index'));
+ // We don't know the path here, so provide guidance
+ console.log(chalk.gray(' Use --path to check a specific directory\'s index:'));
+ console.log(chalk.cyan(' npx atxp memory status --path '));
+}
+
+async function memoryStatusWithPath(pathArg: string): Promise {
+ const resolvedPath = path.resolve(pathArg);
+ const indexDir = getIndexDir(resolvedPath);
+ const metaPath = path.join(indexDir, 'meta.json');
+
+ console.log(chalk.bold('Memory Status'));
+ console.log();
+
+ // Cloud backup status
+ try {
+ const { baseUrl, token } = getAccountsAuth();
+ const res = await fetch(`${baseUrl}/backup/status`, {
+ headers: { 'Authorization': `Bearer ${token}` },
+ });
+
+ if (res.ok) {
+ const data = await res.json() as { fileCount: number; syncedAt: string; totalBytes: number };
+ console.log(chalk.bold.underline('Cloud Backup'));
+ if (data.fileCount === 0) {
+ console.log(chalk.gray(' No backup found.'));
+ } else {
+ console.log(' ' + chalk.bold('Files:') + ' ' + data.fileCount);
+ console.log(' ' + chalk.bold('Total size:') + ' ' + formatBytes(data.totalBytes));
+ console.log(' ' + chalk.bold('Last sync:') + ' ' + new Date(data.syncedAt).toLocaleString());
+ }
+ console.log();
+ }
+ } catch {
+ // No auth or network error — skip cloud status silently
+ }
+
+ // Local index status
+ console.log(chalk.bold.underline('Local Search Index'));
+ if (fs.existsSync(metaPath)) {
+ try {
+ const meta = JSON.parse(fs.readFileSync(metaPath, 'utf-8'));
+ console.log(' ' + chalk.bold('Files:') + ' ' + meta.fileCount);
+ console.log(' ' + chalk.bold('Chunks:') + ' ' + meta.chunkCount);
+ console.log(' ' + chalk.bold('Indexed at:') + ' ' + new Date(meta.indexedAt).toLocaleString());
+ console.log(' ' + chalk.bold('Index path:') + ' ' + indexDir);
+ } catch {
+ console.log(chalk.yellow(' Index metadata is corrupted. Re-index with:'));
+ console.log(chalk.cyan(` npx atxp memory index --path ${pathArg}`));
+ }
+ } else {
+ console.log(chalk.gray(' No local index found.'));
+ console.log(' Create one with: ' + chalk.cyan(`npx atxp memory index --path ${pathArg}`));
+ }
+}
+
+// --- Helpers ---
+
+function formatBytes(bytes: number): string {
+ if (bytes < 1024) return `${bytes} B`;
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
+}
+
+function showMemoryHelp(): void {
+ console.log(chalk.bold('Memory Commands:'));
+ console.log();
+ console.log(chalk.bold.underline('Cloud Backup'));
+ console.log(' ' + chalk.cyan('npx atxp memory push --path ') + ' ' + 'Push .md files to server');
+ console.log(' ' + chalk.cyan('npx atxp memory pull --path ') + ' ' + 'Pull .md files from server');
+ console.log(' ' + chalk.cyan('npx atxp memory status') + ' ' + 'Show backup & index info');
+ console.log();
+ console.log(chalk.bold.underline('Local Search'));
+ console.log(' ' + chalk.cyan('npx atxp memory index --path ') + ' ' + 'Index .md files for search');
+ console.log(' ' + chalk.cyan('npx atxp memory search --path ') + ' Search memories');
+ console.log();
+ console.log(chalk.bold('Details:'));
+ console.log(' push/pull back up all .md files (recursively) to/from ATXP servers.');
+ console.log(' Files are compressed into a zip archive before upload.');
+ console.log(' Each push replaces the previous server snapshot entirely.');
+ console.log(' Pull writes server files to the local directory (non-destructive).');
+ console.log();
+ console.log(' index scans .md files, chunks them by heading, and builds a local');
+ console.log(' vector search index using zvec. search finds relevant memory chunks');
+ console.log(' by similarity. No network access needed for index/search.');
+ console.log();
+ console.log(chalk.bold('Options:'));
+ console.log(' ' + chalk.yellow('--path') + ' ' + 'Directory to operate on (required for push/pull/index/search)');
+ console.log(' ' + chalk.yellow('--topk') + ' ' + 'Number of results to return (default: 10, search only)');
+ console.log();
+ console.log(chalk.bold('Examples:'));
+ console.log(' npx atxp memory push --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory pull --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory index --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory search "authentication flow" --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory status --path ~/.openclaw/workspace-abc/');
+}
+
+// --- Main command router ---
+
+export async function memoryCommand(subCommand: string, options: MemoryOptions, query?: string): Promise {
+ if (!subCommand || subCommand === 'help' || subCommand === '--help' || subCommand === '-h') {
+ showMemoryHelp();
+ return;
+ }
+
+ switch (subCommand) {
+ case 'push':
+ await pushMemory(options.path || '');
+ break;
+
+ case 'pull':
+ await pullMemory(options.path || '');
+ break;
+
+ case 'status':
+ if (options.path) {
+ await memoryStatusWithPath(options.path);
+ } else {
+ await memoryStatus();
+ }
+ break;
+
+ case 'index':
+ await indexMemory(options.path || '');
+ break;
+
+ case 'search':
+ await searchMemory(query || '', options.path || '', options.topk || 10);
+ break;
+
+ default:
+ console.error(chalk.red(`Unknown memory command: ${subCommand}`));
+ console.log();
+ showMemoryHelp();
+ process.exit(1);
+ }
+}
diff --git a/packages/atxp/src/help.ts b/packages/atxp/src/help.ts
index c23edb7..2194f89 100644
--- a/packages/atxp/src/help.ts
+++ b/packages/atxp/src/help.ts
@@ -29,7 +29,7 @@ export function showHelp(): void {
console.log(' ' + chalk.cyan('whoami') + ' ' + 'Show your account info (ID, email, wallet)');
console.log(' ' + chalk.cyan('agent') + ' ' + chalk.yellow('') + ' ' + 'Create and manage agent accounts');
console.log(' ' + chalk.cyan('topup') + ' ' + chalk.yellow('[options]') + ' ' + 'Create a payment link to fund your agent');
- console.log(' ' + chalk.cyan('backup') + ' ' + chalk.yellow('') + ' ' + 'Back up and restore agent .md files');
+ console.log(' ' + chalk.cyan('memory') + ' ' + chalk.yellow('') + ' ' + 'Manage, search, and back up agent memory files');
console.log();
console.log(chalk.bold('PAAS (Platform as a Service):'));
@@ -105,10 +105,12 @@ export function showHelp(): void {
console.log(' npx atxp topup --amount 25 --open # Create link and open in browser');
console.log();
- console.log(chalk.bold('Backup Examples:'));
- console.log(' npx atxp backup push --path ~/.openclaw/workspace-abc/');
- console.log(' npx atxp backup pull --path ~/.openclaw/workspace-abc/');
- console.log(' npx atxp backup status');
+ console.log(chalk.bold('Memory Examples:'));
+ console.log(' npx atxp memory push --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory pull --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory index --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory search "auth flow" --path ~/.openclaw/workspace-abc/');
+ console.log(' npx atxp memory status --path ~/.openclaw/workspace-abc/');
console.log();
console.log(chalk.bold('PAAS Examples:'));
diff --git a/packages/atxp/src/index.ts b/packages/atxp/src/index.ts
index 75b4035..bfb017c 100644
--- a/packages/atxp/src/index.ts
+++ b/packages/atxp/src/index.ts
@@ -19,7 +19,7 @@ import { paasCommand } from './commands/paas/index.js';
import { agentCommand } from './commands/agent.js';
import { whoamiCommand } from './commands/whoami.js';
import { topupCommand } from './commands/topup.js';
-import { backupCommand, type BackupOptions } from './commands/backup.js';
+import { memoryCommand, type MemoryOptions } from './commands/memory.js';
interface DemoOptions {
port: number;
@@ -87,14 +87,14 @@ function parseArgs(): {
paasOptions: PaasOptions;
paasArgs: string[];
toolArgs: string;
- backupOptions: BackupOptions;
+ memoryOptions: MemoryOptions;
} {
const command = process.argv[2];
const subCommand = process.argv[3];
// Check for help flags early - but NOT for paas or email commands (they handle --help internally)
const helpFlag = process.argv.includes('--help') || process.argv.includes('-h');
- if (helpFlag && command !== 'paas' && command !== 'email' && command !== 'agent' && command !== 'topup' && command !== 'backup') {
+ if (helpFlag && command !== 'paas' && command !== 'email' && command !== 'agent' && command !== 'topup' && command !== 'memory' && command !== 'backup') {
return {
command: 'help',
demoOptions: { port: 8017, dir: '', verbose: false, refresh: false },
@@ -104,7 +104,7 @@ function parseArgs(): {
paasOptions: {},
paasArgs: [],
toolArgs: '',
- backupOptions: {},
+ memoryOptions: {},
};
}
@@ -224,9 +224,10 @@ function parseArgs(): {
body: getArgValue('--body', ''),
};
- // Parse backup options
- const backupOptions: BackupOptions = {
+ // Parse memory options
+ const memoryOptions: MemoryOptions = {
path: getArgValue('--path', ''),
+ topk: getArgValue('--topk', '') ? parseInt(getArgValue('--topk', '')!, 10) : undefined,
};
return {
@@ -239,11 +240,11 @@ function parseArgs(): {
paasOptions,
paasArgs,
toolArgs,
- backupOptions,
+ memoryOptions,
};
}
-const { command, subCommand, demoOptions, createOptions, loginOptions, emailOptions, paasOptions, paasArgs, toolArgs, backupOptions } = parseArgs();
+const { command, subCommand, demoOptions, createOptions, loginOptions, emailOptions, paasOptions, paasArgs, toolArgs, memoryOptions } = parseArgs();
// Detect if we're in create mode (npm create atxp or npx atxp create)
const isCreateMode =
@@ -345,8 +346,13 @@ async function main() {
await topupCommand();
break;
+ case 'memory':
+ await memoryCommand(subCommand || '', memoryOptions, process.argv.slice(4).filter((arg) => !arg.startsWith('-')).join(' '));
+ break;
+
case 'backup':
- await backupCommand(subCommand || '', backupOptions);
+ // Backward compatibility: 'backup' is an alias for 'memory'
+ await memoryCommand(subCommand || '', memoryOptions, process.argv.slice(4).filter((arg) => !arg.startsWith('-')).join(' '));
break;
case 'dev':
diff --git a/packages/atxp/src/types/zvec.d.ts b/packages/atxp/src/types/zvec.d.ts
new file mode 100644
index 0000000..12761d2
--- /dev/null
+++ b/packages/atxp/src/types/zvec.d.ts
@@ -0,0 +1,123 @@
+declare module '@zvec/zvec' {
+ export const ZVecDataType: {
+ STRING: number;
+ BOOL: number;
+ INT32: number;
+ INT64: number;
+ UINT32: number;
+ UINT64: number;
+ FLOAT: number;
+ DOUBLE: number;
+ VECTOR_FP16: number;
+ VECTOR_FP32: number;
+ VECTOR_FP64: number;
+ VECTOR_INT8: number;
+ SPARSE_VECTOR_FP16: number;
+ SPARSE_VECTOR_FP32: number;
+ ARRAY_STRING: number;
+ ARRAY_BOOL: number;
+ ARRAY_INT32: number;
+ ARRAY_INT64: number;
+ ARRAY_UINT32: number;
+ ARRAY_UINT64: number;
+ ARRAY_FLOAT: number;
+ ARRAY_DOUBLE: number;
+ };
+
+ export const ZVecIndexType: {
+ FLAT: number;
+ HNSW: number;
+ IVF: number;
+ };
+
+ export interface ZVecFieldSchema {
+ readonly name: string;
+ readonly dataType: number;
+ readonly nullable?: boolean;
+ readonly indexParams?: Record;
+ }
+
+ export interface ZVecVectorSchema {
+ readonly name: string;
+ readonly dataType: number;
+ readonly dimension?: number;
+ readonly indexParams?: { type: number; [key: string]: unknown };
+ }
+
+ export class ZVecCollectionSchema {
+ constructor(params: {
+ name: string;
+ vectors: ZVecVectorSchema | ZVecVectorSchema[];
+ fields?: ZVecFieldSchema | ZVecFieldSchema[];
+ });
+ readonly name: string;
+ field(fieldName: string): ZVecFieldSchema;
+ vector(vectorName: string): ZVecVectorSchema;
+ fields(): ZVecFieldSchema[];
+ vectors(): ZVecVectorSchema[];
+ }
+
+ export interface ZVecDocInput {
+ id: string;
+ vectors?: Record;
+ fields?: Record;
+ }
+
+ export interface ZVecDoc {
+ readonly id: string;
+ readonly vectors: Record;
+ readonly fields: Record;
+ readonly score: number;
+ }
+
+ export interface ZVecQuery {
+ fieldName?: string;
+ topk?: number;
+ vector?: number[] | Float32Array;
+ filter?: string;
+ includeVector?: boolean;
+ outputFields?: string[];
+ params?: Record;
+ }
+
+ export interface ZVecStatus {
+ id: string;
+ success: boolean;
+ message?: string;
+ }
+
+ export interface ZVecCollectionOptions {
+ [key: string]: unknown;
+ }
+
+ export interface ZVecOptimizeOptions {
+ [key: string]: unknown;
+ }
+
+ export class ZVecCollection {
+ readonly path: string;
+ readonly schema: ZVecCollectionSchema;
+ readonly stats: { docCount: number; indexCompleteness: Record };
+ insertSync(docs: ZVecDocInput | ZVecDocInput[]): ZVecStatus | ZVecStatus[];
+ upsertSync(docs: ZVecDocInput | ZVecDocInput[]): ZVecStatus | ZVecStatus[];
+ updateSync(docs: ZVecDocInput | ZVecDocInput[]): ZVecStatus | ZVecStatus[];
+ deleteSync(ids: string | string[]): ZVecStatus | ZVecStatus[];
+ deleteByFilterSync(filter: string): ZVecStatus;
+ querySync(params: ZVecQuery): ZVecDoc[];
+ fetchSync(ids: string | string[]): Record;
+ optimizeSync(options?: ZVecOptimizeOptions): void;
+ closeSync(): void;
+ destroySync(): void;
+ }
+
+ export function ZVecCreateAndOpen(
+ path: string,
+ schema: ZVecCollectionSchema,
+ options?: ZVecCollectionOptions
+ ): ZVecCollection;
+
+ export function ZVecOpen(
+ path: string,
+ options?: ZVecCollectionOptions
+ ): ZVecCollection;
+}
diff --git a/skills/atxp-backup/SKILL.md b/skills/atxp-backup/SKILL.md
deleted file mode 100644
index e31075a..0000000
--- a/skills/atxp-backup/SKILL.md
+++ /dev/null
@@ -1,72 +0,0 @@
----
-name: atxp-backup
-description: Free cloud backup for agent identity and memory files (.md only)
-compatibility: Requires Node.js >=18 and npx
-tags: [backup, identity, memory, agent-files, sync]
-metadata:
- homepage: https://docs.atxp.ai
- source: https://github.com/atxp-dev/cli
- npm: https://www.npmjs.com/package/atxp
- requires:
- binaries: [node, npx]
- node: ">=18"
- env:
- - name: ATXP_CONNECTION
- description: Auth token (created by npx atxp@latest login or agent register)
- required: true
----
-
-# ATXP Backup — Agent Identity & Memory Backup
-
-Back up and restore your agent's `.md` files (SOUL.md, MEMORY.md, memory/*.md, AGENTS.md, USER.md, etc.) to ATXP servers. If your workspace is lost, pull your backup to a fresh machine and resume where you left off.
-
-## Security Model
-
-- **Only `.md` files** are collected and transmitted. No credentials, JSON configs, binaries, or other file types are ever sent.
-- Files are sent to ATXP servers over **HTTPS**, associated with the authenticated agent's identity.
-- `push` **replaces** the server snapshot entirely (latest snapshot only, no history).
-- `pull` is **non-destructive** — it writes server files to the local directory but does not delete local files absent from the server.
-- **Filesystem access**: reads from `--path` directory (push), writes to `--path` directory (pull). No other directories are touched.
-- **No modification** of OpenClaw config or auth files.
-
-## When to Use
-
-| Situation | Command |
-|-----------|---------|
-| After meaningful changes to SOUL.md, MEMORY.md, or at end of session | `push` |
-| Bootstrapping a fresh workspace or recovering from environment loss | `pull` |
-| Verify backup exists before risky operations | `status` |
-
-## Commands Reference
-
-| Command | Description |
-|---------|-------------|
-| `npx atxp@latest backup push --path ` | Recursively collect all `*.md` files from `` and upload to server |
-| `npx atxp@latest backup pull --path ` | Download backup from server and write files to `` |
-| `npx atxp@latest backup status` | Show file count, total size, and last sync time |
-
-### Options
-
-| Option | Required | Description |
-|--------|----------|-------------|
-| `--path ` | Yes (push/pull) | Directory to push from or pull to |
-
-## Path Conventions
-
-Typical OpenClaw workspace paths:
-
-```
-~/.openclaw/workspace-/
-~/.openclaw/workspace-/SOUL.md
-~/.openclaw/workspace-/MEMORY.md
-~/.openclaw/workspace-/memory/
-~/.openclaw/workspace-/AGENTS.md
-~/.openclaw/workspace-/USER.md
-```
-
-## Limitations
-
-- **`.md` files only** — all other file types are ignored during push and not present in pull.
-- **Latest snapshot only** — each push overwrites the previous backup. There is no version history.
-- **Requires ATXP auth** — run `npx atxp@latest login` or `npx atxp@latest agent register` first.
-- **`--path` is required** — there is no auto-detection of workspace location.
diff --git a/skills/atxp-memory/SKILL.md b/skills/atxp-memory/SKILL.md
new file mode 100644
index 0000000..f68eb7a
--- /dev/null
+++ b/skills/atxp-memory/SKILL.md
@@ -0,0 +1,117 @@
+---
+name: atxp-memory
+description: Agent memory management — cloud backup, restore, and local vector search of .md memory files
+compatibility: Requires Node.js >=18 and npx
+tags: [memory, search, backup, identity, agent-files, sync, vector-search, zvec]
+metadata:
+ homepage: https://docs.atxp.ai
+ source: https://github.com/atxp-dev/cli
+ npm: https://www.npmjs.com/package/atxp
+ requires:
+ binaries: [node, npx]
+ node: ">=18"
+ env:
+ - name: ATXP_CONNECTION
+ description: Auth token (created by npx atxp@latest login or agent register). Required for push/pull/status cloud operations. Not required for local index/search.
+ required: false
+ optionalDependencies:
+ - name: "@zvec/zvec"
+ description: Embedded vector database for local memory search (required for index/search subcommands)
+---
+
+# ATXP Memory — Agent Memory Management
+
+Manage your agent's `.md` memory files: back up and restore to/from ATXP cloud servers, and **search your local memories** using zvec vector similarity search.
+
+## Capabilities
+
+| Capability | Description |
+|------------|-------------|
+| **Cloud Backup** | Push/pull `.md` files to ATXP servers for disaster recovery |
+| **Local Search** | Index `.md` files into a local zvec vector database, then search by natural language query |
+| **Status** | View cloud backup info and local index statistics |
+
+## Security Model
+
+- **Only `.md` files** are collected and transmitted (push/pull). No credentials, JSON configs, binaries, or other file types are ever sent.
+- Files are sent to ATXP servers over **HTTPS**, associated with the authenticated agent's identity.
+- `push` **replaces** the server snapshot entirely (latest snapshot only, no history).
+- `pull` is **non-destructive** — it writes server files to the local directory but does not delete local files absent from the server.
+- **Local search index** is stored in a `.atxp-memory-index/` subdirectory inside `--path`. It never leaves the local machine.
+- **index** and **search** do not require authentication or network access.
+- **Filesystem access**: reads from `--path` directory (push/index), writes to `--path` directory (pull) and `--path/.atxp-memory-index/` (index). No other directories are touched.
+- **No modification** of OpenClaw config or auth files.
+
+## When to Use
+
+| Situation | Command |
+|-----------|---------|
+| After meaningful changes to SOUL.md, MEMORY.md, or at end of session | `push` |
+| Bootstrapping a fresh workspace or recovering from environment loss | `pull` |
+| After updating memory files and before starting a task that requires recall | `index` |
+| Looking for relevant context in past memories | `search` |
+| Verify backup exists before risky operations | `status` |
+
+## Commands Reference
+
+| Command | Description |
+|---------|-------------|
+| `npx atxp@latest memory push --path ` | Recursively collect all `*.md` files from `` and upload to server |
+| `npx atxp@latest memory pull --path ` | Download backup from server and write files to `` |
+| `npx atxp@latest memory index --path ` | Chunk `.md` files by heading and build a local zvec search index |
+| `npx atxp@latest memory search --path ` | Search indexed memories by similarity |
+| `npx atxp@latest memory status [--path ]` | Show cloud backup info and/or local index stats |
+
+### Options
+
+| Option | Required | Description |
+|--------|----------|-------------|
+| `--path ` | Yes (push/pull/index/search) | Directory to operate on |
+| `--topk ` | No (search only) | Number of results to return (default: 10) |
+
+## How Local Search Works
+
+1. **Indexing** (`memory index`):
+ - Scans all `.md` files recursively from `--path`
+ - Splits each file into chunks at heading boundaries (h1/h2/h3)
+ - Converts each chunk into a 256-dimensional feature vector using locality-sensitive hashing (unigrams + bigrams)
+ - Stores vectors and metadata in a local zvec database (HNSW index) at `/.atxp-memory-index/`
+
+2. **Searching** (`memory search`):
+ - Converts the query text into the same vector representation
+ - Performs approximate nearest neighbor search via zvec's HNSW index
+ - Returns the top-k most similar chunks with file paths, headings, line numbers, and similarity scores
+
+The search is purely local — no network requests, no API keys, no cost. Re-index after modifying memory files.
+
+## Path Conventions
+
+Typical OpenClaw workspace paths:
+
+```
+~/.openclaw/workspace-/
+~/.openclaw/workspace-/SOUL.md
+~/.openclaw/workspace-/MEMORY.md
+~/.openclaw/workspace-/memory/
+~/.openclaw/workspace-/AGENTS.md
+~/.openclaw/workspace-/USER.md
+```
+
+## Backward Compatibility
+
+The `backup` command is still accepted as an alias for `memory`:
+
+```bash
+npx atxp@latest backup push --path # works, same as memory push
+npx atxp@latest backup pull --path # works, same as memory pull
+npx atxp@latest backup status # works, same as memory status
+```
+
+## Limitations
+
+- **`.md` files only** — all other file types are ignored during push/index and not present in pull.
+- **Latest snapshot only** — each push overwrites the previous backup. There is no version history.
+- **Requires ATXP auth for cloud operations** — run `npx atxp@latest login` or `npx atxp@latest agent register` first.
+- **`--path` is required** — there is no auto-detection of workspace location.
+- **Local search requires @zvec/zvec** — install with `npm install @zvec/zvec` before using index/search.
+- **Feature-hash embeddings** — local search uses statistical text hashing, not neural embeddings. It works well for keyword and phrase matching but is not a full semantic search. For best results, use specific terms from your memory files.
diff --git a/skills/atxp/SKILL.md b/skills/atxp/SKILL.md
index 280e080..ef9d167 100644
--- a/skills/atxp/SKILL.md
+++ b/skills/atxp/SKILL.md
@@ -331,12 +331,12 @@ For agents that are able to configure their own LLM access, the ATXP LLM Gateway
## Related Skills
-### ATXP Backup
+### ATXP Memory
-The **atxp-backup** skill provides free cloud backup for agent identity and memory files (`.md` only). It is packaged as a separate skill because it has a **different security boundary**: it reads and writes files in a user-specified directory (`--path`), whereas this skill only touches `~/.atxp/config`. If your agent needs to persist SOUL.md, MEMORY.md, or other workspace files across environments, install it separately:
+The **atxp-memory** skill provides agent memory management — cloud backup/restore and local vector search of `.md` memory files. It is packaged as a separate skill because it has a **different security boundary**: it reads and writes files in a user-specified directory (`--path`), whereas this skill only touches `~/.atxp/config`. If your agent needs to persist, search, or recover SOUL.md, MEMORY.md, or other workspace files, install it separately:
```bash
-npx skills add atxp-dev/cli --skill atxp-backup
+npx skills add atxp-dev/cli --skill atxp-memory
```
## Support