Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
218 changes: 132 additions & 86 deletions src/lib/init/local-ops.ts
Original file line number Diff line number Diff line change
Expand Up @@ -284,8 +284,10 @@ function safePath(cwd: string, relative: string): string {
* Pre-compute directory listing before the first API call.
* Uses the same parameters the server's discover-context step would request.
*/
export function precomputeDirListing(directory: string): DirEntry[] {
const result = listDir({
export async function precomputeDirListing(
directory: string
): Promise<DirEntry[]> {
const result = await listDir({
type: "local-op",
operation: "list-dir",
cwd: directory,
Expand Down Expand Up @@ -344,7 +346,7 @@ export async function handleLocalOp(
}
}

function listDir(payload: ListDirPayload): LocalOpResult {
async function listDir(payload: ListDirPayload): Promise<LocalOpResult> {
const { cwd, params } = payload;
const targetPath = safePath(cwd, params.path);
const maxDepth = params.maxDepth ?? 3;
Expand All @@ -353,15 +355,15 @@ function listDir(payload: ListDirPayload): LocalOpResult {

const entries: DirEntry[] = [];

// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: walking the directory tree is a complex operation
function walk(dir: string, depth: number): void {
// biome-ignore lint/complexity/noExcessiveCognitiveComplexity: recursive directory walk is inherently complex but straightforward
async function walk(dir: string, depth: number): Promise<void> {
if (entries.length >= maxEntries || depth > maxDepth) {
return;
}

let dirEntries: fs.Dirent[];
try {
dirEntries = fs.readdirSync(dir, { withFileTypes: true });
dirEntries = await fs.promises.readdir(dir, { withFileTypes: true });
} catch {
return;
}
Expand All @@ -372,77 +374,111 @@ function listDir(payload: ListDirPayload): LocalOpResult {
}

const relPath = path.relative(cwd, path.join(dir, entry.name));

// Skip symlinks that escape the project directory
if (entry.isSymbolicLink()) {
try {
safePath(cwd, relPath);
} catch {
continue;
}
}

const type = entry.isDirectory() ? "directory" : "file";
entries.push({ name: entry.name, path: relPath, type });

if (
recursive &&
entry.isDirectory() &&
!entry.isSymbolicLink() &&
!entry.name.startsWith(".") &&
entry.name !== "node_modules"
) {
walk(path.join(dir, entry.name), depth + 1);
await walk(path.join(dir, entry.name), depth + 1);
}
}
}

walk(targetPath, 0);
await walk(targetPath, 0);
return { ok: true, data: { entries } };
}

function readFiles(payload: ReadFilesPayload): LocalOpResult {
const { cwd, params } = payload;
const maxBytes = params.maxBytes ?? MAX_FILE_BYTES;
const files: Record<string, string | null> = {};

for (const filePath of params.paths) {
try {
const absPath = safePath(cwd, filePath);
const stat = fs.statSync(absPath);
let content: string;
if (stat.size > maxBytes) {
// Read only up to maxBytes
async function readSingleFile(
cwd: string,
filePath: string,
maxBytes: number
): Promise<string | null> {
try {
const absPath = safePath(cwd, filePath);
const stat = await fs.promises.stat(absPath);
let content: string;
if (stat.size > maxBytes) {
const fh = await fs.promises.open(absPath, "r");
try {
const buffer = Buffer.alloc(maxBytes);
const fd = fs.openSync(absPath, "r");
try {
fs.readSync(fd, buffer, 0, maxBytes, 0);
} finally {
fs.closeSync(fd);
}
await fh.read(buffer, 0, maxBytes, 0);
content = buffer.toString("utf-8");
} else {
content = fs.readFileSync(absPath, "utf-8");
} finally {
await fh.close();
}
} else {
content = await fs.promises.readFile(absPath, "utf-8");
}

// Minify JSON files by stripping whitespace/formatting
if (filePath.endsWith(".json")) {
try {
content = JSON.stringify(JSON.parse(content));
} catch {
// Not valid JSON (truncated, JSONC, etc.) — send as-is
}
// Minify JSON files by stripping whitespace/formatting
if (filePath.endsWith(".json")) {
try {
content = JSON.stringify(JSON.parse(content));
} catch {
// Not valid JSON (truncated, JSONC, etc.) — send as-is
}

files[filePath] = content;
} catch {
files[filePath] = null;
}

return content;
} catch {
return null;
}
}

async function readFiles(payload: ReadFilesPayload): Promise<LocalOpResult> {
const { cwd, params } = payload;
const maxBytes = params.maxBytes ?? MAX_FILE_BYTES;

const results = await Promise.all(
params.paths.map(async (filePath) => {
const content = await readSingleFile(cwd, filePath, maxBytes);
return [filePath, content] as const;
})
);

const files: Record<string, string | null> = {};
for (const [filePath, content] of results) {
files[filePath] = content;
}

return { ok: true, data: { files } };
}

function fileExistsBatch(payload: FileExistsBatchPayload): LocalOpResult {
async function fileExistsBatch(
payload: FileExistsBatchPayload
): Promise<LocalOpResult> {
const { cwd, params } = payload;
const exists: Record<string, boolean> = {};

for (const filePath of params.paths) {
try {
const absPath = safePath(cwd, filePath);
exists[filePath] = fs.existsSync(absPath);
} catch {
exists[filePath] = false;
}
const results = await Promise.all(
params.paths.map(async (filePath) => {
try {
const absPath = safePath(cwd, filePath);
await fs.promises.access(absPath);
return [filePath, true] as const;
} catch {
return [filePath, false] as const;
}
})
);

const exists: Record<string, boolean> = {};
for (const [filePath, found] of results) {
exists[filePath] = found;
}

return { ok: true, data: { exists } };
Expand Down Expand Up @@ -580,24 +616,56 @@ function applyPatchsetDryRun(payload: ApplyPatchsetPayload): LocalOpResult {
* indentation style is detected and preserved. For `create` actions, a default
* of 2-space indentation is used.
*/
function resolvePatchContent(
async function resolvePatchContent(
absPath: string,
patch: ApplyPatchsetPayload["params"]["patches"][number]
): string {
): Promise<string> {
if (!patch.path.endsWith(".json")) {
return patch.patch;
}
if (patch.action === "modify") {
const existing = fs.readFileSync(absPath, "utf-8");
const existing = await fs.promises.readFile(absPath, "utf-8");
return prettyPrintJson(patch.patch, detectJsonIndent(existing));
}
return prettyPrintJson(patch.patch, DEFAULT_JSON_INDENT);
}

function applyPatchset(
type Patch = ApplyPatchsetPayload["params"]["patches"][number];

const VALID_PATCH_ACTIONS = new Set(["create", "modify", "delete"]);

async function applySinglePatch(absPath: string, patch: Patch): Promise<void> {
switch (patch.action) {
case "create": {
await fs.promises.mkdir(path.dirname(absPath), { recursive: true });
const content = await resolvePatchContent(absPath, patch);
await fs.promises.writeFile(absPath, content, "utf-8");
break;
}
case "modify": {
const content = await resolvePatchContent(absPath, patch);
await fs.promises.writeFile(absPath, content, "utf-8");
break;
}
case "delete": {
try {
await fs.promises.unlink(absPath);
} catch (err) {
if ((err as NodeJS.ErrnoException).code !== "ENOENT") {
throw err;
}
}
break;
}
default:
break;
}
}

async function applyPatchset(
payload: ApplyPatchsetPayload,
dryRun?: boolean
): LocalOpResult {
): Promise<LocalOpResult> {
if (dryRun) {
return applyPatchsetDryRun(payload);
}
Expand All @@ -607,56 +675,34 @@ function applyPatchset(
// Phase 1: Validate all paths and actions before writing anything
for (const patch of params.patches) {
safePath(cwd, patch.path);
if (!["create", "modify", "delete"].includes(patch.action)) {
if (!VALID_PATCH_ACTIONS.has(patch.action)) {
return {
ok: false,
error: `Unknown patch action: "${patch.action}" for path "${patch.path}"`,
};
}
}

// Phase 2: Apply patches
// Phase 2: Apply patches (sequential — later patches may depend on earlier creates)
const applied: Array<{ path: string; action: string }> = [];

for (const patch of params.patches) {
const absPath = safePath(cwd, patch.path);

switch (patch.action) {
case "create": {
const dir = path.dirname(absPath);
fs.mkdirSync(dir, { recursive: true });
const content = resolvePatchContent(absPath, patch);
fs.writeFileSync(absPath, content, "utf-8");
applied.push({ path: patch.path, action: "create" });
break;
}
case "modify": {
if (!fs.existsSync(absPath)) {
return {
ok: false,
error: `Cannot modify "${patch.path}": file does not exist`,
data: { applied },
};
}
const content = resolvePatchContent(absPath, patch);
fs.writeFileSync(absPath, content, "utf-8");
applied.push({ path: patch.path, action: "modify" });
break;
}
case "delete": {
if (fs.existsSync(absPath)) {
fs.unlinkSync(absPath);
}
applied.push({ path: patch.path, action: "delete" });
break;
}
default:
if (patch.action === "modify") {
try {
await fs.promises.access(absPath);
} catch {
return {
ok: false,
error: `Unknown patch action: "${patch.action}" for path "${patch.path}"`,
error: `Cannot modify "${patch.path}": file does not exist`,
data: { applied },
};
}
}

await applySinglePatch(absPath, patch);
applied.push({ path: patch.path, action: patch.action });
}

return { ok: true, data: { applied } };
Expand Down
2 changes: 1 addition & 1 deletion src/lib/init/wizard-runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -623,7 +623,7 @@ export async function runWizard(initialOptions: WizardOptions): Promise<void> {
let run: Awaited<ReturnType<typeof workflow.createRun>>;
let result: WorkflowRunResult;
try {
const dirListing = precomputeDirListing(directory);
const dirListing = await precomputeDirListing(directory);
spin.message("Connecting to wizard...");
run = await workflow.createRun();
result = assertWorkflowResult(
Expand Down
Loading
Loading