diff --git a/.yarn/cache/@babel-generator-npm-7.28.5-fd8f3ae6b1-ae618f0a17.zip b/.yarn/cache/@babel-generator-npm-7.28.5-fd8f3ae6b1-ae618f0a17.zip
new file mode 100644
index 000000000..3b704f2c2
Binary files /dev/null and b/.yarn/cache/@babel-generator-npm-7.28.5-fd8f3ae6b1-ae618f0a17.zip differ
diff --git a/.yarn/cache/@babel-helper-string-parser-npm-7.27.1-d1471e0598-0ae29cc200.zip b/.yarn/cache/@babel-helper-string-parser-npm-7.27.1-d1471e0598-0ae29cc200.zip
new file mode 100644
index 000000000..db113bb54
Binary files /dev/null and b/.yarn/cache/@babel-helper-string-parser-npm-7.27.1-d1471e0598-0ae29cc200.zip differ
diff --git a/.yarn/cache/@babel-helper-validator-identifier-npm-7.28.5-1953d49d2b-8e5d9b0133.zip b/.yarn/cache/@babel-helper-validator-identifier-npm-7.28.5-1953d49d2b-8e5d9b0133.zip
new file mode 100644
index 000000000..c67a0ac56
Binary files /dev/null and b/.yarn/cache/@babel-helper-validator-identifier-npm-7.28.5-1953d49d2b-8e5d9b0133.zip differ
diff --git a/.yarn/cache/@babel-parser-npm-7.28.5-f2345a6b62-8d9bfb437a.zip b/.yarn/cache/@babel-parser-npm-7.28.5-f2345a6b62-8d9bfb437a.zip
new file mode 100644
index 000000000..31eab620f
Binary files /dev/null and b/.yarn/cache/@babel-parser-npm-7.28.5-f2345a6b62-8d9bfb437a.zip differ
diff --git a/.yarn/cache/@babel-types-npm-7.28.5-582d7cca8a-4256bb9fb2.zip b/.yarn/cache/@babel-types-npm-7.28.5-582d7cca8a-4256bb9fb2.zip
new file mode 100644
index 000000000..6938c8cff
Binary files /dev/null and b/.yarn/cache/@babel-types-npm-7.28.5-582d7cca8a-4256bb9fb2.zip differ
diff --git a/.yarn/cache/@emnapi-core-npm-1.7.1-3848c2e48c-260841f6dd.zip b/.yarn/cache/@emnapi-core-npm-1.7.1-3848c2e48c-260841f6dd.zip
new file mode 100644
index 000000000..a6afb0263
Binary files /dev/null and b/.yarn/cache/@emnapi-core-npm-1.7.1-3848c2e48c-260841f6dd.zip differ
diff --git a/.yarn/cache/@emnapi-runtime-npm-1.7.1-42976fbe7a-6fc83f938e.zip b/.yarn/cache/@emnapi-runtime-npm-1.7.1-42976fbe7a-6fc83f938e.zip
new file mode 100644
index 000000000..2b23ed50f
Binary files /dev/null and b/.yarn/cache/@emnapi-runtime-npm-1.7.1-42976fbe7a-6fc83f938e.zip differ
diff --git a/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip b/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip
new file mode 100644
index 000000000..df4de2225
Binary files /dev/null and b/.yarn/cache/@esbuild-darwin-arm64-npm-0.27.2-d675c4a521-10.zip differ
diff --git a/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip b/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip
new file mode 100644
index 000000000..987fc4966
Binary files /dev/null and b/.yarn/cache/@esbuild-darwin-x64-npm-0.27.2-ae63bf405f-10.zip differ
diff --git a/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip b/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip
new file mode 100644
index 000000000..32c8cc871
Binary files /dev/null and b/.yarn/cache/@esbuild-linux-arm64-npm-0.27.2-bf1b0979ac-10.zip differ
diff --git a/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip b/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip
new file mode 100644
index 000000000..eea4066e3
Binary files /dev/null and b/.yarn/cache/@esbuild-linux-x64-npm-0.27.2-11f1a3d9db-10.zip differ
diff --git a/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip b/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip
new file mode 100644
index 000000000..e3bec141d
Binary files /dev/null and b/.yarn/cache/@esbuild-win32-arm64-npm-0.27.2-78a0e828ec-10.zip differ
diff --git a/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip b/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip
new file mode 100644
index 000000000..549aacedc
Binary files /dev/null and b/.yarn/cache/@esbuild-win32-x64-npm-0.27.2-fb03408001-10.zip differ
diff --git a/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip b/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip
new file mode 100644
index 000000000..e130971fd
Binary files /dev/null and b/.yarn/cache/@jridgewell-gen-mapping-npm-0.3.13-9bd96ac800-902f8261dc.zip differ
diff --git a/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip b/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip
new file mode 100644
index 000000000..d61ababcd
Binary files /dev/null and b/.yarn/cache/@jridgewell-trace-mapping-npm-0.3.31-1ae81d75ac-da0283270e.zip differ
diff --git a/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.0-0e9acce7b0-87c7ab4685.zip b/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.0-0e9acce7b0-87c7ab4685.zip
new file mode 100644
index 000000000..f18898529
Binary files /dev/null and b/.yarn/cache/@napi-rs-wasm-runtime-npm-1.1.0-0e9acce7b0-87c7ab4685.zip differ
diff --git a/.yarn/cache/@oxc-project-types-npm-0.101.0-8e969d93d4-43a29933af.zip b/.yarn/cache/@oxc-project-types-npm-0.101.0-8e969d93d4-43a29933af.zip
new file mode 100644
index 000000000..aeb9b821b
Binary files /dev/null and b/.yarn/cache/@oxc-project-types-npm-0.101.0-8e969d93d4-43a29933af.zip differ
diff --git a/.yarn/cache/@oxc-project-types-npm-0.103.0-fbdf59d6b5-3c9a1368fb.zip b/.yarn/cache/@oxc-project-types-npm-0.103.0-fbdf59d6b5-3c9a1368fb.zip
new file mode 100644
index 000000000..ef5e540af
Binary files /dev/null and b/.yarn/cache/@oxc-project-types-npm-0.103.0-fbdf59d6b5-3c9a1368fb.zip differ
diff --git a/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip b/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip
new file mode 100644
index 000000000..7281201bb
Binary files /dev/null and b/.yarn/cache/@quansync-fs-npm-1.0.0-869f097647-8a27892b13.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.53-81c8545753-10.zip b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.53-81c8545753-10.zip
new file mode 100644
index 000000000..840837c02
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.53-81c8545753-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.55-0d736b0ec0-10.zip b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.55-0d736b0ec0-10.zip
new file mode 100644
index 000000000..5fc750814
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-arm64-npm-1.0.0-beta.55-0d736b0ec0-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.53-afe78a815c-10.zip b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.53-afe78a815c-10.zip
new file mode 100644
index 000000000..cfd2f6b0d
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.53-afe78a815c-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.55-d380c34427-10.zip b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.55-d380c34427-10.zip
new file mode 100644
index 000000000..5a1672bf8
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-darwin-x64-npm-1.0.0-beta.55-d380c34427-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.53-5250bb6f3b-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.53-5250bb6f3b-10.zip
new file mode 100644
index 000000000..42ef9f681
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.53-5250bb6f3b-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.55-9d8b2a6c10-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.55-9d8b2a6c10-10.zip
new file mode 100644
index 000000000..8740050bc
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-gnu-npm-1.0.0-beta.55-9d8b2a6c10-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.53-1bf300c71a-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.53-1bf300c71a-10.zip
new file mode 100644
index 000000000..b6ffae765
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.53-1bf300c71a-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.55-32e5dc4380-10.zip b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.55-32e5dc4380-10.zip
new file mode 100644
index 000000000..4bf443059
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-arm64-musl-npm-1.0.0-beta.55-32e5dc4380-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.53-b563c726f2-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.53-b563c726f2-10.zip
new file mode 100644
index 000000000..79c0447db
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.53-b563c726f2-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.55-894a162cbe-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.55-894a162cbe-10.zip
new file mode 100644
index 000000000..a78908a9b
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-gnu-npm-1.0.0-beta.55-894a162cbe-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.53-768a82203f-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.53-768a82203f-10.zip
new file mode 100644
index 000000000..cefd35fdc
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.53-768a82203f-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.55-307f2bf6b9-10.zip b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.55-307f2bf6b9-10.zip
new file mode 100644
index 000000000..8271549cd
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-linux-x64-musl-npm-1.0.0-beta.55-307f2bf6b9-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.53-3931203b8e-10.zip b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.53-3931203b8e-10.zip
new file mode 100644
index 000000000..f21e9dc0c
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.53-3931203b8e-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.55-ff4f2c49fa-10.zip b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.55-ff4f2c49fa-10.zip
new file mode 100644
index 000000000..dd25d6fa3
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-arm64-msvc-npm-1.0.0-beta.55-ff4f2c49fa-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.53-217f392378-10.zip b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.53-217f392378-10.zip
new file mode 100644
index 000000000..fa7ecd435
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.53-217f392378-10.zip differ
diff --git a/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.55-060da45ebd-10.zip b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.55-060da45ebd-10.zip
new file mode 100644
index 000000000..0dcb053a0
Binary files /dev/null and b/.yarn/cache/@rolldown-binding-win32-x64-msvc-npm-1.0.0-beta.55-060da45ebd-10.zip differ
diff --git a/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.53-0e2b6fa8ac-09dab7cbff.zip b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.53-0e2b6fa8ac-09dab7cbff.zip
new file mode 100644
index 000000000..be8dcec4e
Binary files /dev/null and b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.53-0e2b6fa8ac-09dab7cbff.zip differ
diff --git a/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.55-71cd79c55b-46ad40e754.zip b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.55-71cd79c55b-46ad40e754.zip
new file mode 100644
index 000000000..08828924a
Binary files /dev/null and b/.yarn/cache/@rolldown-pluginutils-npm-1.0.0-beta.55-71cd79c55b-46ad40e754.zip differ
diff --git a/.yarn/cache/@types-node-npm-22.19.3-0fc033f9b1-ffee06ce6d.zip b/.yarn/cache/@types-node-npm-22.19.3-0fc033f9b1-ffee06ce6d.zip
new file mode 100644
index 000000000..737822d85
Binary files /dev/null and b/.yarn/cache/@types-node-npm-22.19.3-0fc033f9b1-ffee06ce6d.zip differ
diff --git a/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip b/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip
new file mode 100644
index 000000000..5c915f403
Binary files /dev/null and b/.yarn/cache/@types-ws-npm-8.18.1-61dc106ff0-1ce05e3174.zip differ
diff --git a/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip b/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip
new file mode 100644
index 000000000..500bfe18e
Binary files /dev/null and b/.yarn/cache/@typescript-ata-npm-0.9.8-0663e9063b-c0f9daf781.zip differ
diff --git a/.yarn/cache/@vitest-expect-npm-4.0.16-5603b6a8cc-1da98c86d3.zip b/.yarn/cache/@vitest-expect-npm-4.0.16-5603b6a8cc-1da98c86d3.zip
new file mode 100644
index 000000000..f08d79ef7
Binary files /dev/null and b/.yarn/cache/@vitest-expect-npm-4.0.16-5603b6a8cc-1da98c86d3.zip differ
diff --git a/.yarn/cache/@vitest-mocker-npm-4.0.16-3484557c09-3a34c6571e.zip b/.yarn/cache/@vitest-mocker-npm-4.0.16-3484557c09-3a34c6571e.zip
new file mode 100644
index 000000000..50adfeb70
Binary files /dev/null and b/.yarn/cache/@vitest-mocker-npm-4.0.16-3484557c09-3a34c6571e.zip differ
diff --git a/.yarn/cache/@vitest-pretty-format-npm-4.0.16-c951e2304a-914d5d35fb.zip b/.yarn/cache/@vitest-pretty-format-npm-4.0.16-c951e2304a-914d5d35fb.zip
new file mode 100644
index 000000000..29ef40b28
Binary files /dev/null and b/.yarn/cache/@vitest-pretty-format-npm-4.0.16-c951e2304a-914d5d35fb.zip differ
diff --git a/.yarn/cache/@vitest-runner-npm-4.0.16-6e56de81f5-2aed39bb46.zip b/.yarn/cache/@vitest-runner-npm-4.0.16-6e56de81f5-2aed39bb46.zip
new file mode 100644
index 000000000..e77415f55
Binary files /dev/null and b/.yarn/cache/@vitest-runner-npm-4.0.16-6e56de81f5-2aed39bb46.zip differ
diff --git a/.yarn/cache/@vitest-snapshot-npm-4.0.16-3c523b95f7-30f2977c96.zip b/.yarn/cache/@vitest-snapshot-npm-4.0.16-3c523b95f7-30f2977c96.zip
new file mode 100644
index 000000000..322442d20
Binary files /dev/null and b/.yarn/cache/@vitest-snapshot-npm-4.0.16-3c523b95f7-30f2977c96.zip differ
diff --git a/.yarn/cache/@vitest-spy-npm-4.0.16-50c20d921d-76cbabfdd7.zip b/.yarn/cache/@vitest-spy-npm-4.0.16-50c20d921d-76cbabfdd7.zip
new file mode 100644
index 000000000..9e52a423e
Binary files /dev/null and b/.yarn/cache/@vitest-spy-npm-4.0.16-50c20d921d-76cbabfdd7.zip differ
diff --git a/.yarn/cache/@vitest-utils-npm-4.0.16-d08786c148-07fb3c9686.zip b/.yarn/cache/@vitest-utils-npm-4.0.16-d08786c148-07fb3c9686.zip
new file mode 100644
index 000000000..707fbde3c
Binary files /dev/null and b/.yarn/cache/@vitest-utils-npm-4.0.16-d08786c148-07fb3c9686.zip differ
diff --git a/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip b/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip
new file mode 100644
index 000000000..0719e6447
Binary files /dev/null and b/.yarn/cache/ansis-npm-4.2.0-35ae97bdc2-493e15fad2.zip differ
diff --git a/.yarn/cache/ast-kit-npm-2.2.0-8d8a4e9bb7-82cf2a8c2d.zip b/.yarn/cache/ast-kit-npm-2.2.0-8d8a4e9bb7-82cf2a8c2d.zip
new file mode 100644
index 000000000..cdccb81cc
Binary files /dev/null and b/.yarn/cache/ast-kit-npm-2.2.0-8d8a4e9bb7-82cf2a8c2d.zip differ
diff --git a/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip b/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip
new file mode 100644
index 000000000..982376952
Binary files /dev/null and b/.yarn/cache/birpc-npm-4.0.0-2cc419e494-f4418e2a04.zip differ
diff --git a/.yarn/cache/chai-npm-6.2.1-df1838f7a6-f7917749e2.zip b/.yarn/cache/chai-npm-6.2.1-df1838f7a6-f7917749e2.zip
new file mode 100644
index 000000000..46760a100
Binary files /dev/null and b/.yarn/cache/chai-npm-6.2.1-df1838f7a6-f7917749e2.zip differ
diff --git a/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip b/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip
new file mode 100644
index 000000000..de48a7b82
Binary files /dev/null and b/.yarn/cache/chokidar-npm-5.0.0-2f70d31c86-a1c2a4ee6e.zip differ
diff --git a/.yarn/cache/commander-npm-14.0.2-538b84c387-2d202db5e5.zip b/.yarn/cache/commander-npm-14.0.2-538b84c387-2d202db5e5.zip
new file mode 100644
index 000000000..df1ef15be
Binary files /dev/null and b/.yarn/cache/commander-npm-14.0.2-538b84c387-2d202db5e5.zip differ
diff --git a/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip b/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip
new file mode 100644
index 000000000..df708b6ab
Binary files /dev/null and b/.yarn/cache/defu-npm-6.1.4-c791c7f2cc-aeffdb4730.zip differ
diff --git a/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip b/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip
new file mode 100644
index 000000000..a095b39b8
Binary files /dev/null and b/.yarn/cache/dts-resolver-npm-2.1.3-5deb33a062-9dfa79be6f.zip differ
diff --git a/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip b/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip
new file mode 100644
index 000000000..daca95502
Binary files /dev/null and b/.yarn/cache/empathic-npm-2.0.0-440d97be6e-90f47d93f8.zip differ
diff --git a/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip b/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip
new file mode 100644
index 000000000..18b308c08
Binary files /dev/null and b/.yarn/cache/esbuild-npm-0.27.2-7789e62c6d-7f1229328b.zip differ
diff --git a/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip b/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip
new file mode 100644
index 000000000..20a597e23
Binary files /dev/null and b/.yarn/cache/expect-type-npm-1.3.0-95a4384745-a5fada3d0c.zip differ
diff --git a/.yarn/cache/framer-plugin-npm-3.9.0-beta.0-571984760c-bb41b0770a.zip b/.yarn/cache/framer-plugin-npm-3.9.0-beta.0-571984760c-bb41b0770a.zip
new file mode 100644
index 000000000..53ad56ccf
Binary files /dev/null and b/.yarn/cache/framer-plugin-npm-3.9.0-beta.0-571984760c-bb41b0770a.zip differ
diff --git a/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip b/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip
new file mode 100644
index 000000000..521c2617f
Binary files /dev/null and b/.yarn/cache/get-tsconfig-npm-4.13.0-009b232bdd-3603c6da30.zip differ
diff --git a/.yarn/cache/hookable-npm-5.5.3-82b0342097-c6cec06f69.zip b/.yarn/cache/hookable-npm-5.5.3-82b0342097-c6cec06f69.zip
new file mode 100644
index 000000000..161d861d3
Binary files /dev/null and b/.yarn/cache/hookable-npm-5.5.3-82b0342097-c6cec06f69.zip differ
diff --git a/.yarn/cache/import-without-cache-npm-0.2.4-2e319e6024-ac263dab13.zip b/.yarn/cache/import-without-cache-npm-0.2.4-2e319e6024-ac263dab13.zip
new file mode 100644
index 000000000..ba70023e7
Binary files /dev/null and b/.yarn/cache/import-without-cache-npm-0.2.4-2e319e6024-ac263dab13.zip differ
diff --git a/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip b/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip
new file mode 100644
index 000000000..0701df326
Binary files /dev/null and b/.yarn/cache/jsesc-npm-3.1.0-2f4f998cd7-20bd37a142.zip differ
diff --git a/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip b/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip
new file mode 100644
index 000000000..53485dc72
Binary files /dev/null and b/.yarn/cache/magic-string-npm-0.30.21-9a226cb21e-57d5691f41.zip differ
diff --git a/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip b/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip
new file mode 100644
index 000000000..ca87e6388
Binary files /dev/null and b/.yarn/cache/obug-npm-2.1.1-029730d296-bdcf921336.zip differ
diff --git a/.yarn/cache/prettier-npm-3.7.4-78f94d4194-b4d00ea13b.zip b/.yarn/cache/prettier-npm-3.7.4-78f94d4194-b4d00ea13b.zip
new file mode 100644
index 000000000..fde1fe689
Binary files /dev/null and b/.yarn/cache/prettier-npm-3.7.4-78f94d4194-b4d00ea13b.zip differ
diff --git a/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip b/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip
new file mode 100644
index 000000000..c024033bb
Binary files /dev/null and b/.yarn/cache/quansync-npm-1.0.0-0707dd9045-fba7a8e87a.zip differ
diff --git a/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip b/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip
new file mode 100644
index 000000000..f62693664
Binary files /dev/null and b/.yarn/cache/readdirp-npm-5.0.0-82b01a282e-a17a591b51.zip differ
diff --git a/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip b/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip
new file mode 100644
index 000000000..8e3561c41
Binary files /dev/null and b/.yarn/cache/resolve-pkg-maps-npm-1.0.0-135b70c854-0763150adf.zip differ
diff --git a/.yarn/cache/rolldown-npm-1.0.0-beta.53-db59d0aaea-40713f7a30.zip b/.yarn/cache/rolldown-npm-1.0.0-beta.53-db59d0aaea-40713f7a30.zip
new file mode 100644
index 000000000..04429b8c8
Binary files /dev/null and b/.yarn/cache/rolldown-npm-1.0.0-beta.53-db59d0aaea-40713f7a30.zip differ
diff --git a/.yarn/cache/rolldown-npm-1.0.0-beta.55-f6b6b87530-74e194192b.zip b/.yarn/cache/rolldown-npm-1.0.0-beta.55-f6b6b87530-74e194192b.zip
new file mode 100644
index 000000000..24a5985eb
Binary files /dev/null and b/.yarn/cache/rolldown-npm-1.0.0-beta.55-f6b6b87530-74e194192b.zip differ
diff --git a/.yarn/cache/rolldown-plugin-dts-npm-0.18.4-b1bc8cfadd-d6157bdfa7.zip b/.yarn/cache/rolldown-plugin-dts-npm-0.18.4-b1bc8cfadd-d6157bdfa7.zip
new file mode 100644
index 000000000..247e1a297
Binary files /dev/null and b/.yarn/cache/rolldown-plugin-dts-npm-0.18.4-b1bc8cfadd-d6157bdfa7.zip differ
diff --git a/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip b/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip
new file mode 100644
index 000000000..c94393ee4
Binary files /dev/null and b/.yarn/cache/semver-npm-7.7.3-9cf7b3b46c-8dbc3168e0.zip differ
diff --git a/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip b/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip
new file mode 100644
index 000000000..8803cc08f
Binary files /dev/null and b/.yarn/cache/std-env-npm-3.10.0-30d3e2646f-19c9cda4f3.zip differ
diff --git a/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip b/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip
new file mode 100644
index 000000000..4e06ed7d2
Binary files /dev/null and b/.yarn/cache/tinyexec-npm-1.0.2-321b713e56-cb709ed424.zip differ
diff --git a/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip b/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip
new file mode 100644
index 000000000..55660b3fd
Binary files /dev/null and b/.yarn/cache/tinyrainbow-npm-3.0.3-06ed35d14d-169cc63c15.zip differ
diff --git a/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip b/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip
new file mode 100644
index 000000000..c9ef40137
Binary files /dev/null and b/.yarn/cache/tree-kill-npm-1.2.2-3da0e5a759-49117f5f41.zip differ
diff --git a/.yarn/cache/tsdown-npm-0.17.4-ea0f38adf5-1fe104c1e0.zip b/.yarn/cache/tsdown-npm-0.17.4-ea0f38adf5-1fe104c1e0.zip
new file mode 100644
index 000000000..a8d97eea9
Binary files /dev/null and b/.yarn/cache/tsdown-npm-0.17.4-ea0f38adf5-1fe104c1e0.zip differ
diff --git a/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip b/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip
new file mode 100644
index 000000000..27615df9a
Binary files /dev/null and b/.yarn/cache/tsx-npm-4.21.0-3bc9626d81-7afedeff85.zip differ
diff --git a/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip b/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip
new file mode 100644
index 000000000..0eabff58d
Binary files /dev/null and b/.yarn/cache/typescript-npm-5.9.3-48715be868-c089d9d3da.zip differ
diff --git a/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip b/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip
new file mode 100644
index 000000000..6cd392703
Binary files /dev/null and b/.yarn/cache/typescript-patch-6fda4d02cf-696e1b017b.zip differ
diff --git a/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip b/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip
new file mode 100644
index 000000000..3f7ff8f9e
Binary files /dev/null and b/.yarn/cache/unconfig-core-npm-7.4.2-b40a0ca292-837d196508.zip differ
diff --git a/.yarn/cache/unrun-npm-0.2.20-33a1198309-a0d33e12f8.zip b/.yarn/cache/unrun-npm-0.2.20-33a1198309-a0d33e12f8.zip
new file mode 100644
index 000000000..1a660837c
Binary files /dev/null and b/.yarn/cache/unrun-npm-0.2.20-33a1198309-a0d33e12f8.zip differ
diff --git a/.yarn/cache/vite-npm-7.3.0-70284f6792-044490133a.zip b/.yarn/cache/vite-npm-7.3.0-70284f6792-044490133a.zip
new file mode 100644
index 000000000..34732b2f6
Binary files /dev/null and b/.yarn/cache/vite-npm-7.3.0-70284f6792-044490133a.zip differ
diff --git a/.yarn/cache/vitest-npm-4.0.16-09dd6df1e3-22b3806988.zip b/.yarn/cache/vitest-npm-4.0.16-09dd6df1e3-22b3806988.zip
new file mode 100644
index 000000000..f5fb56a29
Binary files /dev/null and b/.yarn/cache/vitest-npm-4.0.16-09dd6df1e3-22b3806988.zip differ
diff --git a/.yarn/cache/ws-npm-8.18.3-665d39209d-725964438d.zip b/.yarn/cache/ws-npm-8.18.3-665d39209d-725964438d.zip
new file mode 100644
index 000000000..f50dd5806
Binary files /dev/null and b/.yarn/cache/ws-npm-8.18.3-665d39209d-725964438d.zip differ
diff --git a/packages/code-link-cli/README.md b/packages/code-link-cli/README.md
new file mode 100644
index 000000000..66b29ab73
--- /dev/null
+++ b/packages/code-link-cli/README.md
@@ -0,0 +1,3 @@
+# Framer Code Link CLI
+
+Two-way syncing Framer of code components between Framer and your computer.
diff --git a/packages/code-link-cli/package.json b/packages/code-link-cli/package.json
new file mode 100644
index 000000000..2cab23509
--- /dev/null
+++ b/packages/code-link-cli/package.json
@@ -0,0 +1,40 @@
+{
+ "name": "framer-code-link",
+ "version": "0.4.3",
+ "description": "CLI tool for syncing Framer code components - controller-centric architecture",
+ "main": "dist/index.mjs",
+ "type": "module",
+ "bin": "./dist/index.mjs",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "dev": "NODE_ENV=development tsx src/index.ts",
+ "build": "tsdown",
+ "start": "node dist/index.mjs",
+ "test": "vitest run"
+ },
+ "keywords": [
+ "framer",
+ "sync",
+ "code-components"
+ ],
+ "author": "",
+ "license": "MIT",
+ "dependencies": {
+ "@typescript/ata": "^0.9.8",
+ "chokidar": "^5.0.0",
+ "commander": "^14.0.2",
+ "prettier": "^3.7.4",
+ "typescript": "^5.9.3",
+ "ws": "^8.18.3"
+ },
+ "devDependencies": {
+ "@code-link/shared": "workspace:*",
+ "@types/node": "^22.19.2",
+ "@types/ws": "^8.18.1",
+ "tsdown": "^0.17.4",
+ "tsx": "^4.21.0",
+ "vitest": "^4.0.15"
+ }
+}
diff --git a/packages/code-link-cli/src/controller.test.ts b/packages/code-link-cli/src/controller.test.ts
new file mode 100644
index 000000000..65782d8a8
--- /dev/null
+++ b/packages/code-link-cli/src/controller.test.ts
@@ -0,0 +1,904 @@
+import { describe, it, expect } from "vitest"
+import { transition } from "./controller.js"
+import { createHashTracker } from "./utils/hash-tracker.js"
+
+import type { WebSocket } from "ws"
+import { filterEchoedFiles } from "./helpers/files.js"
+
+describe("Sync State Machine", () => {
+ // Connection Lifecycle Tests
+ describe("Connection Lifecycle", () => {
+ it("transitions from disconnected to handshaking on HANDSHAKE", () => {
+ const initialState = {
+ mode: "disconnected" as const,
+ socket: null,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const mockSocket = {} as WebSocket
+ const result = transition(initialState, {
+ type: "HANDSHAKE",
+ socket: mockSocket,
+ projectInfo: { projectId: "test-id", projectName: "Test Project" },
+ })
+
+ expect(result.state.mode).toBe("handshaking")
+ expect(result.state.socket).toBe(mockSocket)
+ expect(result.effects).toHaveLength(3)
+ expect(result.effects[0]).toMatchObject({ type: "INIT_WORKSPACE" })
+ expect(result.effects[1]).toMatchObject({ type: "LOAD_PERSISTED_STATE" })
+ expect(result.effects[2]).toMatchObject({
+ type: "SEND_MESSAGE",
+ payload: { type: "request-files" },
+ })
+ })
+
+ it("ignores handshake when not in disconnected mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "HANDSHAKE",
+ socket: {} as WebSocket,
+ projectInfo: { projectId: "test-id", projectName: "Test Project" },
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects).toHaveLength(1)
+ expect(result.effects[0]).toMatchObject({
+ type: "LOG",
+ level: "warn",
+ })
+ })
+
+ it("transitions to disconnected and persists state on DISCONNECT", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map([
+ [
+ "Test.tsx",
+ {
+ localHash: "abc123",
+ lastSyncedHash: "abc123",
+ lastRemoteTimestamp: Date.now(),
+ },
+ ],
+ ]),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, { type: "DISCONNECT" })
+
+ expect(result.state.mode).toBe("disconnected")
+ expect(result.state.socket).toBe(null)
+ expect(result.effects).toHaveLength(2)
+ expect(result.effects[0]).toMatchObject({ type: "PERSIST_STATE" })
+ expect(result.effects[1]).toMatchObject({
+ type: "LOG",
+ level: "debug",
+ })
+ })
+ })
+
+ // File Synchronization Tests
+ describe("File Synchronization", () => {
+ it("transitions to snapshot_processing on FILE_LIST and emits DETECT_CONFLICTS", () => {
+ const initialState = {
+ mode: "handshaking" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const remoteFiles = [
+ { name: "Test.tsx", content: "remote content", modifiedAt: Date.now() },
+ ]
+
+ const result = transition(initialState, {
+ type: "FILE_LIST",
+ files: remoteFiles,
+ })
+
+ expect(result.state.mode).toBe("snapshot_processing")
+ expect(result.state.pendingRemoteChanges).toEqual(remoteFiles)
+ expect(result.effects).toHaveLength(2)
+ expect(result.effects[0]).toMatchObject({
+ type: "LOG",
+ level: "debug",
+ })
+ expect(result.effects[1]).toMatchObject({
+ type: "DETECT_CONFLICTS",
+ remoteFiles,
+ })
+ })
+
+ it("ignores FILE_LIST when not in handshaking mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "FILE_LIST",
+ files: [],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects).toHaveLength(1)
+ expect(result.effects[0]).toMatchObject({
+ type: "LOG",
+ level: "warn",
+ })
+ })
+
+ it("applies remote FILE_CHANGE immediately in watching mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const file = {
+ name: "Test.tsx",
+ content: "new content",
+ modifiedAt: Date.now(),
+ }
+
+ const result = transition(initialState, {
+ type: "FILE_CHANGE",
+ file,
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some((e) => e.type === "WRITE_FILES")).toBe(true)
+ })
+
+ it("queues remote FILE_CHANGE during snapshot processing", () => {
+ const initialState = {
+ mode: "snapshot_processing" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const file = {
+ name: "Test.tsx",
+ content: "new content",
+ modifiedAt: Date.now(),
+ }
+
+ const result = transition(initialState, {
+ type: "FILE_CHANGE",
+ file,
+ })
+
+ expect(result.state.mode).toBe("snapshot_processing")
+ expect(result.state.pendingRemoteChanges).toHaveLength(1)
+ expect(result.state.pendingRemoteChanges).toContainEqual(file)
+ expect(result.effects.some((e) => e.type === "WRITE_FILES")).toBe(false)
+ })
+
+ it("emits SEND_LOCAL_CHANGE for local file add/change in watching mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "change",
+ relativePath: "Test.tsx",
+ content: "export const Test = () =>
Test
",
+ },
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some((e) => e.type === "SEND_LOCAL_CHANGE")).toBe(
+ true
+ )
+ const sendEffect = result.effects.find(
+ (e) => e.type === "SEND_LOCAL_CHANGE"
+ )
+ expect(sendEffect).toMatchObject({
+ type: "SEND_LOCAL_CHANGE",
+ fileName: "Test.tsx",
+ content: "export const Test = () => Test
",
+ })
+ })
+
+ it("ignores local WATCHER_EVENT when not in watching mode", () => {
+ const initialState = {
+ mode: "handshaking" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "change",
+ relativePath: "Test.tsx",
+ content: "content",
+ },
+ })
+
+ expect(result.effects.some((e) => e.type === "SEND_LOCAL_CHANGE")).toBe(
+ false
+ )
+ })
+
+ it("ignores local WATCHER_EVENT when disconnected", () => {
+ const initialState = {
+ mode: "disconnected" as const,
+ socket: null,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "change",
+ relativePath: "Test.tsx",
+ content: "content",
+ },
+ })
+
+ expect(result.effects.some((e) => e.type === "SEND_LOCAL_CHANGE")).toBe(
+ false
+ )
+ })
+
+ it("emits LIST_LOCAL_FILES on REQUEST_FILES when in watching mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "REQUEST_FILES",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(result.effects.some((e) => e.type === "LIST_LOCAL_FILES")).toBe(
+ true
+ )
+ })
+
+ it("rejects REQUEST_FILES when disconnected", () => {
+ const initialState = {
+ mode: "disconnected" as const,
+ socket: null,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "REQUEST_FILES",
+ })
+
+ expect(result.state.mode).toBe("disconnected")
+ expect(result.effects.some((e) => e.type === "LIST_LOCAL_FILES")).toBe(
+ false
+ )
+ expect(
+ result.effects.some((e) => e.type === "LOG" && e.level === "warn")
+ ).toBe(true)
+ })
+
+ it("updates file metadata on FILE_SYNCED_CONFIRMATION", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map([
+ [
+ "Test.tsx",
+ {
+ baseRemoteHash: "abc123",
+ lastRemoteTimestamp: 1000,
+ },
+ ],
+ ]),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "FILE_SYNCED_CONFIRMATION",
+ fileName: "Test.tsx",
+ remoteModifiedAt: 2000,
+ })
+
+ expect(
+ result.effects.some((e) => e.type === "UPDATE_FILE_METADATA")
+ ).toBe(true)
+ })
+
+ it("creates metadata entry on FILE_SYNCED_CONFIRMATION if file not tracked", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "FILE_SYNCED_CONFIRMATION",
+ fileName: "NewFile.tsx",
+ remoteModifiedAt: 3000,
+ })
+
+ // Should not throw - creates new entry
+ expect(result.state.mode).toBe("watching")
+ })
+ })
+
+ // Deletion Safety Tests
+ // Remote → Local: Auto-applies (Framer is source of truth)
+ // Local → Remote: Requires confirmation (protects source of truth)
+ describe("Deletion Safety", () => {
+ it("auto-applies remote deletions to local filesystem", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map([
+ [
+ "Test.tsx",
+ {
+ localHash: "abc123",
+ lastSyncedHash: "abc123",
+ lastRemoteTimestamp: Date.now(),
+ },
+ ],
+ ]),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "Test.tsx",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ // CRITICAL: Remote deletions immediately emit DELETE_LOCAL_FILES
+ expect(result.effects.some((e) => e.type === "DELETE_LOCAL_FILES")).toBe(
+ true
+ )
+ const deleteEffect = result.effects.find(
+ (e) => e.type === "DELETE_LOCAL_FILES"
+ )
+ expect(deleteEffect).toMatchObject({
+ type: "DELETE_LOCAL_FILES",
+ names: ["Test.tsx"],
+ })
+ expect(result.effects.some((e) => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("auto-applies remote deletions during snapshot processing", () => {
+ const initialState = {
+ mode: "snapshot_processing" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "Test.tsx",
+ })
+
+ expect(result.state.mode).toBe("snapshot_processing")
+ expect(result.effects.some((e) => e.type === "DELETE_LOCAL_FILES")).toBe(
+ true
+ )
+ })
+
+ it("rejects remote deletions while disconnected", () => {
+ const initialState = {
+ mode: "disconnected" as const,
+ socket: null,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "REMOTE_FILE_DELETE",
+ fileName: "Test.tsx",
+ })
+
+ expect(result.state.mode).toBe("disconnected")
+ expect(result.effects.some((e) => e.type === "DELETE_LOCAL_FILES")).toBe(
+ false
+ )
+ expect(
+ result.effects.some((e) => e.type === "LOG" && e.level === "warn")
+ ).toBe(true)
+ })
+
+ it("prompts user before propagating local delete to Framer", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "WATCHER_EVENT",
+ event: {
+ kind: "delete",
+ relativePath: "Test.tsx",
+ },
+ })
+
+ // CRITICAL: Local deletes do NOT immediately send to Framer
+ // They emit REQUEST_LOCAL_DELETE_DECISION to ask user first
+ expect(
+ result.effects.some((e) => e.type === "REQUEST_LOCAL_DELETE_DECISION")
+ ).toBe(true)
+ // Should NOT have SEND_MESSAGE with file-delete
+ expect(
+ result.effects.some(
+ (e) =>
+ e.type === "SEND_MESSAGE" &&
+ "payload" in e &&
+ e.payload?.type === "file-delete"
+ )
+ ).toBe(false)
+ })
+
+ it("sends delete to Framer only after user approval", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map([
+ [
+ "Test.tsx",
+ {
+ localHash: "abc123",
+ lastSyncedHash: "abc123",
+ lastRemoteTimestamp: Date.now(),
+ },
+ ],
+ ]),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "LOCAL_DELETE_APPROVED",
+ fileName: "Test.tsx",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ // After approval, the delete is applied locally
+ expect(result.effects.some((e) => e.type === "DELETE_LOCAL_FILES")).toBe(
+ true
+ )
+ expect(result.effects.some((e) => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("does NOT send delete to Framer when user rejects - restores file instead", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "LOCAL_DELETE_REJECTED",
+ fileName: "Test.tsx",
+ content: "restored content",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ // File is restored locally
+ expect(result.effects.some((e) => e.type === "WRITE_FILES")).toBe(true)
+ const writeEffect = result.effects.find((e) => e.type === "WRITE_FILES")
+ expect(writeEffect).toMatchObject({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: "Test.tsx",
+ content: "restored content",
+ },
+ ],
+ })
+ // Should NOT send delete to Framer
+ expect(
+ result.effects.some(
+ (e) =>
+ e.type === "SEND_MESSAGE" &&
+ "payload" in e &&
+ e.payload?.type === "file-delete"
+ )
+ ).toBe(false)
+ })
+ })
+
+ // Conflict Resolution Tests
+ describe("Conflict Resolution", () => {
+ it("applies safe writes and transitions to watching when no conflicts", () => {
+ const initialState = {
+ mode: "snapshot_processing" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [],
+ safeWrites: [
+ {
+ name: "Test.tsx",
+ content: "new content",
+ modifiedAt: Date.now(),
+ },
+ ],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect("pendingConflicts" in result.state).toBe(false)
+ expect(result.effects.length).toBeGreaterThan(2)
+ expect(result.effects.some((e) => e.type === "WRITE_FILES")).toBe(true)
+ expect(result.effects.some((e) => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("transitions to conflict_resolution when manual conflicts exist", () => {
+ const initialState = {
+ mode: "snapshot_processing" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "local content",
+ remoteContent: "remote content",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICTS_DETECTED",
+ conflicts: [conflict],
+ safeWrites: [],
+ localOnly: [],
+ })
+
+ expect(result.state.mode).toBe("conflict_resolution")
+ if (result.state.mode === "conflict_resolution") {
+ expect(result.state.pendingConflicts).toHaveLength(1)
+ }
+ expect(
+ result.effects.some((e) => e.type === "REQUEST_CONFLICT_VERSIONS")
+ ).toBe(true)
+ })
+
+ it("applies all remote versions when user picks remote", () => {
+ const conflict1 = {
+ fileName: "Test1.tsx",
+ localContent: "local 1",
+ remoteContent: "remote 1",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+ const conflict2 = {
+ fileName: "Test2.tsx",
+ localContent: "local 2",
+ remoteContent: "remote 2",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+
+ const initialState = {
+ mode: "conflict_resolution" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingConflicts: [conflict1, conflict2],
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICTS_RESOLVED",
+ resolution: "remote",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect("pendingConflicts" in result.state).toBe(false)
+
+ const writeEffects = result.effects.filter(
+ (e) => e.type === "WRITE_FILES"
+ )
+ expect(writeEffects).toHaveLength(2)
+ expect(writeEffects[0]).toMatchObject({
+ type: "WRITE_FILES",
+ files: [{ name: "Test1.tsx", content: "remote 1" }],
+ })
+ expect(writeEffects[1]).toMatchObject({
+ type: "WRITE_FILES",
+ files: [{ name: "Test2.tsx", content: "remote 2" }],
+ })
+ expect(result.effects.some((e) => e.type === "PERSIST_STATE")).toBe(true)
+ })
+
+ it("sends all local versions when user picks local", () => {
+ const conflict1 = {
+ fileName: "Test1.tsx",
+ localContent: "local 1",
+ remoteContent: "remote 1",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+ const conflict2 = {
+ fileName: "Test2.tsx",
+ localContent: "local 2",
+ remoteContent: "remote 2",
+ localModifiedAt: Date.now(),
+ remoteModifiedAt: Date.now() + 1000,
+ }
+
+ const initialState = {
+ mode: "conflict_resolution" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingConflicts: [conflict1, conflict2],
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICTS_RESOLVED",
+ resolution: "local",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect("pendingConflicts" in result.state).toBe(false)
+
+ const sendEffects = result.effects.filter(
+ (e) => e.type === "SEND_MESSAGE"
+ )
+ expect(sendEffects).toHaveLength(2)
+ expect(sendEffects[0]).toMatchObject({
+ payload: {
+ type: "file-change",
+ fileName: "Test1.tsx",
+ content: "local 1",
+ },
+ })
+ expect(sendEffects[1]).toMatchObject({
+ payload: {
+ type: "file-change",
+ fileName: "Test2.tsx",
+ content: "local 2",
+ },
+ })
+ })
+
+ it("ignores resolution when not in conflict_resolution mode", () => {
+ const initialState = {
+ mode: "watching" as const,
+ socket: {} as WebSocket,
+ files: new Map(),
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICTS_RESOLVED",
+ resolution: "remote",
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(
+ result.effects.some((e) => e.type === "LOG" && e.level === "warn")
+ ).toBe(true)
+ })
+
+ it("auto-applies local changes when remote is unchanged", () => {
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "local content",
+ remoteContent: "remote content",
+ localModifiedAt: 1000,
+ remoteModifiedAt: 2000,
+ lastSyncedAt: 5_000,
+ localClean: false,
+ }
+
+ const initialState = {
+ mode: "conflict_resolution" as const,
+ socket: {} as WebSocket,
+ pendingConflicts: [conflict],
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: 5_000 }],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(
+ result.effects.some((effect) => effect.type === "SEND_LOCAL_CHANGE")
+ ).toBe(true)
+ expect(
+ result.effects.some((effect) => effect.type === "PERSIST_STATE")
+ ).toBe(true)
+ })
+
+ it("auto-applies remote changes when local is clean", () => {
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "local content",
+ remoteContent: "remote content",
+ localModifiedAt: 1000,
+ remoteModifiedAt: 2000,
+ lastSyncedAt: 5_000,
+ localClean: true,
+ }
+
+ const initialState = {
+ mode: "conflict_resolution" as const,
+ socket: {} as WebSocket,
+ pendingConflicts: [conflict],
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: 10_000 }],
+ })
+
+ expect(result.state.mode).toBe("watching")
+ expect(
+ result.effects.some((effect) => effect.type === "WRITE_FILES")
+ ).toBe(true)
+ })
+
+ it("requests manual decisions when both sides changed", () => {
+ const conflict = {
+ fileName: "Test.tsx",
+ localContent: "local content",
+ remoteContent: "remote content",
+ localModifiedAt: 1000,
+ remoteModifiedAt: 2000,
+ lastSyncedAt: 5_000,
+ localClean: false,
+ }
+
+ const initialState = {
+ mode: "conflict_resolution" as const,
+ socket: {} as WebSocket,
+ pendingConflicts: [conflict],
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const result = transition(initialState, {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: [{ fileName: "Test.tsx", latestRemoteVersionMs: 9_000 }],
+ })
+
+ expect(result.state.mode).toBe("conflict_resolution")
+ expect(
+ result.effects.some(
+ (effect) => effect.type === "REQUEST_CONFLICT_DECISIONS"
+ )
+ ).toBe(true)
+ if (result.state.mode === "conflict_resolution") {
+ expect(result.state.pendingConflicts).toHaveLength(1)
+ }
+ })
+ })
+
+ // Echo Prevention Tests
+ describe("Echo Prevention", () => {
+ it("skips inbound file-change that matches last local send", () => {
+ const hashTracker = createHashTracker()
+ hashTracker.remember("Hey.tsx", "content")
+
+ const filtered = filterEchoedFiles(
+ [
+ {
+ name: "Hey.tsx",
+ content: "content",
+ modifiedAt: Date.now(),
+ },
+ ],
+ hashTracker
+ )
+
+ expect(filtered).toHaveLength(0)
+ })
+
+ it("keeps inbound change when content differs", () => {
+ const hashTracker = createHashTracker()
+ hashTracker.remember("Hey.tsx", "old content")
+
+ const filtered = filterEchoedFiles(
+ [
+ {
+ name: "Hey.tsx",
+ content: "new content",
+ modifiedAt: Date.now(),
+ },
+ ],
+ hashTracker
+ )
+
+ expect(filtered).toHaveLength(1)
+ expect(filtered[0]?.content).toBe("new content")
+ })
+ })
+})
diff --git a/packages/code-link-cli/src/controller.ts b/packages/code-link-cli/src/controller.ts
new file mode 100644
index 000000000..d2a954c78
--- /dev/null
+++ b/packages/code-link-cli/src/controller.ts
@@ -0,0 +1,1404 @@
+/**
+ * CLI Controller
+ *
+ * All runtime state and orchestrates the sync lifecycle.
+ * Helpers should provide data, nevering hold control or callbacks.
+ */
+
+import fs from "fs/promises"
+import type { WebSocket } from "ws"
+import type {
+ Config,
+ IncomingMessage,
+ OutgoingMessage,
+ FileInfo,
+ Conflict,
+ WatcherEvent,
+ ConflictVersionData,
+} from "./types.js"
+import { initConnection, sendMessage } from "./helpers/connection.js"
+import { initWatcher } from "./helpers/watcher.js"
+import {
+ listFiles,
+ detectConflicts,
+ writeRemoteFiles,
+ deleteLocalFile,
+ readFileSafe,
+ autoResolveConflicts,
+ filterEchoedFiles,
+} from "./helpers/files.js"
+import { Installer } from "./helpers/installer.js"
+import { createHashTracker } from "./utils/hash-tracker.js"
+import {
+ info,
+ warn,
+ error,
+ success,
+ debug,
+ status,
+ fileDown,
+ fileUp,
+ fileDelete,
+ scheduleDisconnectMessage,
+ cancelDisconnectMessage,
+ didShowDisconnect,
+ wasRecentlyDisconnected,
+ resetDisconnectState,
+} from "./utils/logging.js"
+import { hashFileContent } from "./utils/state-persistence.js"
+import {
+ FileMetadataCache,
+ type FileSyncMetadata,
+} from "./utils/file-metadata-cache.js"
+import { UserActionCoordinator } from "./helpers/user-actions.js"
+import { validateIncomingChange } from "./helpers/sync-validator.js"
+import { findOrCreateProjectDir } from "./utils/project.js"
+import { pluralize, shortProjectHash } from "@code-link/shared"
+
+/**
+ * Explicit sync lifecycle modes
+ */
+export type SyncMode =
+ | "disconnected"
+ | "handshaking"
+ | "snapshot_processing"
+ | "conflict_resolution"
+ | "watching"
+
+/**
+ * Pending operation for echo suppression and replay
+ */
+type PendingOperation =
+ | { id: string; type: "write"; file: string; hash: string }
+ | { id: string; type: "delete"; file: string; previousHash?: string }
+
+/**
+ * Shared state that persists across all lifecycle modes
+ */
+interface SyncStateBase {
+ pendingRemoteChanges: FileInfo[]
+ pendingOperations: Map
+ nextOperationId: number
+}
+
+type DisconnectedState = SyncStateBase & {
+ mode: "disconnected"
+ socket: null
+}
+
+type HandshakingState = SyncStateBase & {
+ mode: "handshaking"
+ socket: WebSocket
+}
+
+type SnapshotProcessingState = SyncStateBase & {
+ mode: "snapshot_processing"
+ socket: WebSocket
+}
+
+type ConflictResolutionState = SyncStateBase & {
+ mode: "conflict_resolution"
+ socket: WebSocket
+ pendingConflicts: Conflict[]
+}
+
+type WatchingState = SyncStateBase & {
+ mode: "watching"
+ socket: WebSocket
+}
+
+export type SyncState =
+ | DisconnectedState
+ | HandshakingState
+ | SnapshotProcessingState
+ | ConflictResolutionState
+ | WatchingState
+
+/**
+ * Events that drive state transitions
+ */
+type SyncEvent =
+ | {
+ type: "HANDSHAKE"
+ socket: WebSocket
+ projectInfo: { projectId: string; projectName: string }
+ }
+ | { type: "REQUEST_FILES" }
+ | { type: "FILE_LIST"; files: FileInfo[] }
+ | {
+ type: "CONFLICTS_DETECTED"
+ conflicts: Conflict[]
+ safeWrites: FileInfo[]
+ localOnly: FileInfo[]
+ }
+ | { type: "FILE_CHANGE"; file: FileInfo; fileMeta?: FileSyncMetadata }
+ | { type: "REMOTE_FILE_DELETE"; fileName: string }
+ | { type: "LOCAL_DELETE_APPROVED"; fileName: string }
+ | { type: "LOCAL_DELETE_REJECTED"; fileName: string; content: string }
+ | {
+ type: "CONFLICTS_RESOLVED"
+ resolution: "local" | "remote"
+ }
+ | {
+ type: "FILE_SYNCED_CONFIRMATION"
+ fileName: string
+ remoteModifiedAt: number
+ }
+ | { type: "DISCONNECT" }
+ | { type: "WATCHER_EVENT"; event: WatcherEvent }
+ | {
+ type: "CONFLICT_VERSION_RESPONSE"
+ versions: ConflictVersionData[]
+ }
+
+/**
+ * Side effects emitted by transitions
+ */
+type Effect =
+ | {
+ type: "INIT_WORKSPACE"
+ projectInfo: { projectId: string; projectName: string }
+ }
+ | { type: "LOAD_PERSISTED_STATE" }
+ | { type: "SEND_MESSAGE"; payload: OutgoingMessage }
+ | { type: "LIST_LOCAL_FILES" }
+ | { type: "DETECT_CONFLICTS"; remoteFiles: FileInfo[] }
+ | {
+ type: "WRITE_FILES"
+ files: FileInfo[]
+ silent?: boolean
+ skipEcho?: boolean
+ }
+ | { type: "DELETE_LOCAL_FILES"; names: string[] }
+ | { type: "REQUEST_CONFLICT_DECISIONS"; conflicts: Conflict[] }
+ | { type: "REQUEST_CONFLICT_VERSIONS"; conflicts: Conflict[] }
+ | {
+ type: "REQUEST_DELETE_CONFIRMATION"
+ fileName: string
+ requireConfirmation: boolean
+ }
+ | {
+ type: "UPDATE_FILE_METADATA"
+ fileName: string
+ remoteModifiedAt: number
+ }
+ | {
+ type: "SEND_LOCAL_CHANGE"
+ fileName: string
+ content: string
+ }
+ | {
+ type: "REQUEST_LOCAL_DELETE_DECISION"
+ fileName: string
+ requireConfirmation: boolean
+ }
+ | { type: "PERSIST_STATE" }
+ | {
+ type: "SYNC_COMPLETE"
+ totalCount: number
+ updatedCount: number
+ unchangedCount: number
+ }
+ | {
+ type: "LOG"
+ level: "info" | "debug" | "warn" | "success"
+ message: string
+ }
+
+/** Log helper */
+function log(
+ level: "info" | "debug" | "warn" | "success",
+ message: string
+): Effect {
+ return { type: "LOG", level, message }
+}
+
+/**
+ * Pure state transition function
+ * Takes current state + event, returns new state + effects to execute
+ */
+function transition(
+ state: SyncState,
+ event: SyncEvent
+): { state: SyncState; effects: Effect[] } {
+ const effects: Effect[] = []
+
+ switch (event.type) {
+ case "HANDSHAKE": {
+ if (state.mode !== "disconnected") {
+ effects.push(
+ log("warn", `Received HANDSHAKE in mode ${state.mode}, ignoring`)
+ )
+ return { state, effects }
+ }
+
+ effects.push(
+ { type: "INIT_WORKSPACE", projectInfo: event.projectInfo },
+ { type: "LOAD_PERSISTED_STATE" },
+ { type: "SEND_MESSAGE", payload: { type: "request-files" } }
+ )
+
+ return {
+ state: {
+ ...state,
+ mode: "handshaking",
+ socket: event.socket,
+ },
+ effects,
+ }
+ }
+
+ case "FILE_SYNCED_CONFIRMATION": {
+ // Remote confirms they received our local change
+ effects.push(log("debug", `Remote confirmed sync: ${event.fileName}`), {
+ type: "UPDATE_FILE_METADATA",
+ fileName: event.fileName,
+ remoteModifiedAt: event.remoteModifiedAt,
+ })
+
+ return { state, effects }
+ }
+
+ case "DISCONNECT": {
+ effects.push(
+ { type: "PERSIST_STATE" },
+ log("debug", "Disconnected, persisting state")
+ )
+
+ if (state.mode === "conflict_resolution") {
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "disconnected",
+ socket: null,
+ },
+ effects,
+ }
+ }
+
+ return {
+ state: {
+ ...state,
+ mode: "disconnected",
+ socket: null,
+ },
+ effects,
+ }
+ }
+
+ case "REQUEST_FILES": {
+ // Plugin is asking for our local file list
+ // Valid in any mode except disconnected
+ if (state.mode === "disconnected") {
+ effects.push(
+ log("warn", "Received REQUEST_FILES while disconnected, ignoring")
+ )
+ return { state, effects }
+ }
+
+ effects.push(log("debug", "Plugin requested file list"), {
+ type: "LIST_LOCAL_FILES",
+ })
+
+ return { state, effects }
+ }
+
+ case "FILE_LIST": {
+ if (state.mode !== "handshaking") {
+ effects.push(
+ log("warn", `Received FILE_LIST in mode ${state.mode}, ignoring`)
+ )
+ return { state, effects }
+ }
+
+ effects.push(
+ log("debug", `Received file list: ${event.files.length} files`)
+ )
+
+ // During initial file list, detect conflicts between remote snapshot and local files
+ effects.push({
+ type: "DETECT_CONFLICTS",
+ remoteFiles: event.files,
+ })
+
+ // Transition to snapshot_processing - conflict detection effect will determine next mode
+ return {
+ state: {
+ ...state,
+ mode: "snapshot_processing",
+ pendingRemoteChanges: event.files,
+ },
+ effects,
+ }
+ }
+
+ case "CONFLICTS_DETECTED": {
+ if (state.mode !== "snapshot_processing") {
+ effects.push(
+ log(
+ "warn",
+ `Received CONFLICTS_DETECTED in mode ${state.mode}, ignoring`
+ )
+ )
+ return { state, effects }
+ }
+
+ const { conflicts, safeWrites, localOnly } = event
+
+ // detectConflicts returns:
+ // - safeWrites = files we can apply (remote-only or local unchanged)
+ // - conflicts = files that need manual resolution (content or deletion conflicts)
+ // - localOnly = files to upload
+ // (unchanged files have metadata recorded in DETECT_CONFLICTS executor)
+
+ // Apply safe writes
+ if (safeWrites.length > 0) {
+ effects.push(
+ log("debug", `Applying ${safeWrites.length} safe writes`),
+ { type: "WRITE_FILES", files: safeWrites, silent: true }
+ )
+ }
+
+ // Upload local-only files
+ if (localOnly.length > 0) {
+ effects.push(
+ log("debug", `Uploading ${localOnly.length} local-only files`)
+ )
+ for (const file of localOnly) {
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-change",
+ fileName: file.name,
+ content: file.content,
+ },
+ })
+ }
+ }
+
+ // If conflicts remain, request remote version data before surfacing to user
+ if (conflicts.length > 0) {
+ effects.push(
+ log(
+ "debug",
+ `${pluralize(conflicts.length, "conflict")} require version check`
+ ),
+ { type: "REQUEST_CONFLICT_VERSIONS", conflicts }
+ )
+
+ return {
+ state: {
+ ...state,
+ mode: "conflict_resolution",
+ pendingConflicts: conflicts,
+ },
+ effects,
+ }
+ }
+
+ // No conflicts - transition to watching
+ const remoteTotal = state.pendingRemoteChanges.length
+ const totalCount = remoteTotal + localOnly.length
+ const updatedCount = safeWrites.length + localOnly.length
+ const unchangedCount = Math.max(0, remoteTotal - safeWrites.length)
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount,
+ updatedCount,
+ unchangedCount,
+ }
+ )
+
+ return {
+ state: {
+ ...state,
+ mode: "watching",
+ pendingRemoteChanges: [],
+ },
+ effects,
+ }
+ }
+
+ case "FILE_CHANGE": {
+ // Use helper to validate the incoming change
+ const validation = validateIncomingChange(event.fileMeta, state.mode)
+
+ if (validation.action === "queue") {
+ effects.push(
+ log(
+ "debug",
+ `Queueing file change: ${event.file.name} (${validation.reason})`
+ )
+ )
+
+ return {
+ state: {
+ ...state,
+ pendingRemoteChanges: [...state.pendingRemoteChanges, event.file],
+ },
+ effects,
+ }
+ }
+
+ if (validation.action === "reject") {
+ effects.push(
+ log(
+ "warn",
+ `Rejected file change: ${event.file.name} (${validation.reason})`
+ )
+ )
+ return { state, effects }
+ }
+
+ // Apply the change
+ effects.push(log("debug", `Applying remote change: ${event.file.name}`), {
+ type: "WRITE_FILES",
+ files: [event.file],
+ skipEcho: true,
+ })
+
+ return { state, effects }
+ }
+
+ case "REMOTE_FILE_DELETE": {
+ // Reject if not connected
+ if (state.mode === "disconnected") {
+ effects.push(
+ log("warn", `Rejected delete while disconnected: ${event.fileName}`)
+ )
+ return { state, effects }
+ }
+
+ // Remote deletes should always be applied immediately
+ // (the file is already gone from Framer)
+ effects.push(
+ log("debug", `Remote delete applied: ${event.fileName}`),
+ { type: "DELETE_LOCAL_FILES", names: [event.fileName] },
+ { type: "PERSIST_STATE" }
+ )
+
+ return { state, effects }
+ }
+
+ case "LOCAL_DELETE_APPROVED": {
+ // User confirmed the delete - apply it
+ effects.push(
+ log("debug", `Delete confirmed: ${event.fileName}`),
+ { type: "DELETE_LOCAL_FILES", names: [event.fileName] },
+ { type: "PERSIST_STATE" }
+ )
+
+ return { state, effects }
+ }
+
+ case "LOCAL_DELETE_REJECTED": {
+ // User cancelled - restore the file
+ effects.push(log("debug", `Delete cancelled: ${event.fileName}`))
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: event.fileName,
+ content: event.content,
+ modifiedAt: Date.now(),
+ },
+ ],
+ })
+
+ return { state, effects }
+ }
+
+ case "CONFLICTS_RESOLVED": {
+ // Only valid in conflict_resolution mode
+ if (state.mode !== "conflict_resolution") {
+ effects.push(
+ log(
+ "warn",
+ `Received CONFLICTS_RESOLVED in mode ${state.mode}, ignoring`
+ )
+ )
+ return { state, effects }
+ }
+
+ // User picked one resolution for ALL conflicts
+ if (event.resolution === "remote") {
+ // Apply all remote versions (or delete locally if remote is null)
+ for (const conflict of state.pendingConflicts) {
+ if (conflict.remoteContent === null) {
+ // Remote deleted this file - delete locally
+ effects.push({
+ type: "DELETE_LOCAL_FILES",
+ names: [conflict.fileName],
+ })
+ } else {
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: conflict.fileName,
+ content: conflict.remoteContent,
+ modifiedAt: conflict.remoteModifiedAt,
+ },
+ ],
+ silent: true,
+ })
+ }
+ }
+ effects.push(log("success", "Keeping Framer changes"))
+ } else {
+ // Send all local versions (or delete from Framer if local is null)
+ for (const conflict of state.pendingConflicts) {
+ if (conflict.localContent === null) {
+ // Local deleted this file - delete from Framer
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-delete",
+ fileNames: [conflict.fileName],
+ },
+ })
+ } else {
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-change",
+ fileName: conflict.fileName,
+ content: conflict.localContent,
+ },
+ })
+ }
+ }
+ effects.push(log("success", "Keeping local changes"))
+ }
+
+ // All conflicts resolved - transition to watching
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount: state.pendingConflicts.length,
+ updatedCount: state.pendingConflicts.length,
+ unchangedCount: 0,
+ }
+ )
+
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "watching",
+ },
+ effects,
+ }
+ }
+
+ case "WATCHER_EVENT": {
+ // Local file system change detected
+ const { kind, relativePath, content } = event.event
+
+ // Only process changes in watching mode
+ if (state.mode !== "watching") {
+ effects.push(
+ log(
+ "debug",
+ `Ignoring watcher event in ${state.mode} mode: ${kind} ${relativePath}`
+ )
+ )
+ return { state, effects }
+ }
+
+ switch (kind) {
+ case "add":
+ case "change": {
+ if (content === undefined) {
+ effects.push(
+ log("warn", `Watcher event missing content: ${relativePath}`)
+ )
+ return { state, effects }
+ }
+
+ effects.push({
+ type: "SEND_LOCAL_CHANGE",
+ fileName: relativePath,
+ content,
+ })
+ break
+ }
+
+ case "delete": {
+ effects.push(log("debug", `Local delete detected: ${relativePath}`), {
+ type: "REQUEST_LOCAL_DELETE_DECISION",
+ fileName: relativePath,
+ requireConfirmation: true, // Will be overridden by config in effect
+ })
+ break
+ }
+ }
+
+ return { state, effects }
+ }
+
+ case "CONFLICT_VERSION_RESPONSE": {
+ if (state.mode !== "conflict_resolution") {
+ effects.push(
+ log(
+ "warn",
+ `Received CONFLICT_VERSION_RESPONSE in mode ${state.mode}, ignoring`
+ )
+ )
+ return { state, effects }
+ }
+
+ const { autoResolvedLocal, autoResolvedRemote, remainingConflicts } =
+ autoResolveConflicts(state.pendingConflicts, event.versions)
+
+ if (autoResolvedLocal.length > 0) {
+ effects.push(
+ log(
+ "debug",
+ `Auto-resolved ${autoResolvedLocal.length} local changes`
+ )
+ )
+ for (const conflict of autoResolvedLocal) {
+ if (conflict.localContent === null) {
+ // Local deleted - delete from Framer
+ effects.push({
+ type: "SEND_MESSAGE",
+ payload: {
+ type: "file-delete",
+ fileNames: [conflict.fileName],
+ },
+ })
+ } else {
+ effects.push({
+ type: "SEND_LOCAL_CHANGE",
+ fileName: conflict.fileName,
+ content: conflict.localContent,
+ })
+ }
+ }
+ }
+
+ if (autoResolvedRemote.length > 0) {
+ effects.push(
+ log(
+ "debug",
+ `Auto-resolved ${autoResolvedRemote.length} remote changes`
+ )
+ )
+ for (const conflict of autoResolvedRemote) {
+ if (conflict.remoteContent === null) {
+ // Remote deleted - delete locally
+ effects.push({
+ type: "DELETE_LOCAL_FILES",
+ names: [conflict.fileName],
+ })
+ } else {
+ effects.push({
+ type: "WRITE_FILES",
+ files: [
+ {
+ name: conflict.fileName,
+ content: conflict.remoteContent,
+ modifiedAt: conflict.remoteModifiedAt ?? Date.now(),
+ },
+ ],
+ silent: true, // Auto-resolved during initial sync - no individual indicators
+ })
+ }
+ }
+ }
+
+ if (remainingConflicts.length > 0) {
+ effects.push(
+ log(
+ "warn",
+ `${pluralize(remainingConflicts.length, "conflict")} require resolution`
+ ),
+ { type: "REQUEST_CONFLICT_DECISIONS", conflicts: remainingConflicts }
+ )
+
+ return {
+ state: {
+ ...state,
+ pendingConflicts: remainingConflicts,
+ },
+ effects,
+ }
+ }
+
+ const resolvedCount = autoResolvedLocal.length + autoResolvedRemote.length
+ effects.push(
+ { type: "PERSIST_STATE" },
+ {
+ type: "SYNC_COMPLETE",
+ totalCount: resolvedCount,
+ updatedCount: resolvedCount,
+ unchangedCount: 0,
+ }
+ )
+
+ const { pendingConflicts: _discarded, ...rest } = state
+ return {
+ state: {
+ ...rest,
+ mode: "watching",
+ pendingRemoteChanges: [],
+ },
+ effects,
+ }
+ }
+
+ default: {
+ effects.push(log("warn", `Unhandled event type in transition`))
+ return { state, effects }
+ }
+ }
+}
+
+/**
+ * Effect executor - interprets effects and calls helpers
+ * Returns additional events that should be processed (e.g., CONFLICTS_DETECTED after DETECT_CONFLICTS)
+ */
+async function executeEffect(
+ effect: Effect,
+ context: {
+ config: Config
+ hashTracker: ReturnType
+ installer: Installer | null
+ fileMetadataCache: FileMetadataCache
+ userActions: UserActionCoordinator
+ syncState: SyncState
+ }
+): Promise {
+ const {
+ config,
+ hashTracker,
+ installer,
+ fileMetadataCache,
+ userActions,
+ syncState,
+ } = context
+
+ switch (effect.type) {
+ case "INIT_WORKSPACE": {
+ // Initialize project directory if not already set
+ if (!config.projectDir) {
+ const projectName =
+ config.explicitName ?? effect.projectInfo.projectName
+
+ config.projectDir = await findOrCreateProjectDir(
+ config.projectHash,
+ projectName,
+ config.explicitDir
+ )
+
+ // May allow customization of file directory in the future
+ config.filesDir = `${config.projectDir}/files`
+ debug(`Files directory: ${config.filesDir}`)
+ await fs.mkdir(config.filesDir, { recursive: true })
+ }
+ return []
+ }
+
+ case "LOAD_PERSISTED_STATE": {
+ if (config.projectDir) {
+ await fileMetadataCache.initialize(config.projectDir)
+ debug(`Loaded persisted metadata for ${fileMetadataCache.size()} files`)
+ }
+ return []
+ }
+
+ case "LIST_LOCAL_FILES": {
+ if (!config.filesDir) {
+ return []
+ }
+
+ // List all local files and send to plugin
+ const files = await listFiles(config.filesDir)
+
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-list",
+ files,
+ })
+ }
+
+ return []
+ }
+
+ case "DETECT_CONFLICTS": {
+ if (!config.filesDir) {
+ return []
+ }
+
+ // Use existing helper to detect conflicts
+ const { conflicts, writes, localOnly, unchanged } = await detectConflicts(
+ effect.remoteFiles,
+ config.filesDir,
+ { persistedState: fileMetadataCache.getPersistedState() }
+ )
+
+ // Record metadata for unchanged files so watcher add events get skipped
+ // (chokidar ignoreInitial=false fires late adds that would otherwise re-upload)
+ for (const file of unchanged) {
+ fileMetadataCache.recordRemoteWrite(
+ file.name,
+ file.content,
+ file.modifiedAt ?? Date.now()
+ )
+ }
+
+ // Return CONFLICTS_DETECTED event to continue the flow
+ return [
+ {
+ type: "CONFLICTS_DETECTED",
+ conflicts,
+ safeWrites: writes,
+ localOnly,
+ },
+ ]
+ }
+
+ case "SEND_MESSAGE": {
+ if (syncState.socket) {
+ const sent = await sendMessage(syncState.socket, effect.payload)
+ if (!sent) {
+ warn(`Failed to send message: ${effect.payload.type}`)
+ }
+ } else {
+ warn(`No socket available to send: ${effect.payload.type}`)
+ }
+ return []
+ }
+
+ case "WRITE_FILES": {
+ if (config.filesDir) {
+ // skipEcho skip writes that match hashTracker (inbound echo)
+ // it is opt-in: some callers still need side-effects (metadata/logs)
+ // even when content matches the last hash tracked in-memory.
+ const filesToWrite =
+ effect.skipEcho === true
+ ? filterEchoedFiles(effect.files, hashTracker)
+ : effect.files
+
+ if (effect.skipEcho && filesToWrite.length !== effect.files.length) {
+ const skipped = effect.files.length - filesToWrite.length
+ debug(`Skipped ${pluralize(skipped, "echoed change")}`)
+ }
+
+ if (filesToWrite.length === 0) {
+ return []
+ }
+
+ await writeRemoteFiles(
+ filesToWrite,
+ config.filesDir,
+ hashTracker,
+ installer ?? undefined
+ )
+ for (const file of filesToWrite) {
+ if (!effect.silent) {
+ fileDown(file.name)
+ }
+ const remoteTimestamp = file.modifiedAt ?? Date.now()
+ fileMetadataCache.recordRemoteWrite(
+ file.name,
+ file.content,
+ remoteTimestamp
+ )
+ }
+ }
+ return []
+ }
+
+ case "DELETE_LOCAL_FILES": {
+ if (config.filesDir) {
+ for (const fileName of effect.names) {
+ await deleteLocalFile(fileName, config.filesDir, hashTracker)
+ fileDelete(fileName)
+ fileMetadataCache.recordDelete(fileName)
+ }
+ }
+ return []
+ }
+
+ case "REQUEST_CONFLICT_DECISIONS": {
+ await userActions.requestConflictDecisions(
+ syncState.socket,
+ effect.conflicts
+ )
+
+ return []
+ }
+
+ case "REQUEST_CONFLICT_VERSIONS": {
+ if (!syncState.socket) {
+ warn("Cannot request conflict versions without active socket")
+ return []
+ }
+
+ const persistedState = fileMetadataCache.getPersistedState()
+ const versionRequests = effect.conflicts.map((conflict) => {
+ const persisted = persistedState.get(conflict.fileName)
+ return {
+ fileName: conflict.fileName,
+ lastSyncedAt: conflict.lastSyncedAt ?? persisted?.timestamp,
+ }
+ })
+
+ debug(
+ `Requesting remote version data for ${pluralize(versionRequests.length, "file")}`
+ )
+
+ await sendMessage(syncState.socket, {
+ type: "conflict-version-request",
+ conflicts: versionRequests,
+ })
+
+ return []
+ }
+
+ case "REQUEST_DELETE_CONFIRMATION": {
+ if (syncState.socket) {
+ // Send delete request to plugin
+ await sendMessage(syncState.socket, {
+ type: "file-delete",
+ fileNames: [effect.fileName],
+ requireConfirmation: effect.requireConfirmation,
+ })
+ }
+ // Response will come via delete-confirmed or delete-cancelled message
+ return []
+ }
+
+ case "UPDATE_FILE_METADATA": {
+ if (!config.filesDir || !config.projectDir) {
+ return []
+ }
+
+ // Read current file content to compute hash
+ const currentContent = await readFileSafe(
+ effect.fileName,
+ config.filesDir
+ )
+
+ if (currentContent !== null) {
+ const contentHash = hashFileContent(currentContent)
+ fileMetadataCache.recordSyncedSnapshot(
+ effect.fileName,
+ contentHash,
+ effect.remoteModifiedAt
+ )
+ }
+
+ return []
+ }
+
+ case "SEND_LOCAL_CHANGE": {
+ const contentHash = hashFileContent(effect.content)
+ const metadata = fileMetadataCache.get(effect.fileName)
+
+ // Skip if file matches last confirmed remote content
+ if (metadata?.lastSyncedHash === contentHash) {
+ debug(
+ `Skipping local change for ${effect.fileName}: matches last synced content`
+ )
+ return []
+ }
+
+ // Echo prevention: skip if we just wrote this exact content
+ if (hashTracker.shouldSkip(effect.fileName, effect.content)) {
+ return []
+ }
+
+ debug(`Local change detected: ${effect.fileName}`)
+
+ try {
+ // Send change to plugin
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-change",
+ fileName: effect.fileName,
+ content: effect.content,
+ })
+ fileUp(effect.fileName)
+ }
+
+ // Only remember hash after successful send (prevents re-sending on failure)
+ hashTracker.remember(effect.fileName, effect.content)
+
+ // Trigger type installer
+ if (installer) {
+ installer.process(effect.fileName, effect.content)
+ }
+ } catch (err) {
+ warn(`Failed to push ${effect.fileName}`)
+ }
+
+ return []
+ }
+
+ case "REQUEST_LOCAL_DELETE_DECISION": {
+ // Echo prevention: skip if this is a remote-initiated delete
+ const shouldSkip = hashTracker.shouldSkipDelete(effect.fileName)
+
+ if (shouldSkip) {
+ // Clear the delete marker now that we've caught the echo
+ hashTracker.clearDelete(effect.fileName)
+ return []
+ }
+
+ try {
+ const shouldDelete = await userActions.requestDeleteDecision(
+ syncState.socket,
+ {
+ fileName: effect.fileName,
+ requireConfirmation: !config.dangerouslyAutoDelete,
+ }
+ )
+
+ if (shouldDelete) {
+ hashTracker.forget(effect.fileName)
+ fileMetadataCache.recordDelete(effect.fileName)
+
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, {
+ type: "file-delete",
+ fileNames: [effect.fileName],
+ })
+ }
+ }
+ } catch (err) {
+ console.warn(`Failed to handle deletion for ${effect.fileName}:`, err)
+ }
+
+ return []
+ }
+
+ case "PERSIST_STATE": {
+ await fileMetadataCache.flush()
+ return []
+ }
+
+ case "SYNC_COMPLETE": {
+ const wasDisconnected = wasRecentlyDisconnected()
+
+ // Notify plugin that sync is complete
+ if (syncState.socket) {
+ await sendMessage(syncState.socket, { type: "sync-complete" })
+ }
+
+ if (wasDisconnected) {
+ // Only show reconnect message if we actually showed the disconnect notice
+ if (didShowDisconnect()) {
+ success(
+ `Reconnected, synced ${effect.totalCount} files (${effect.updatedCount} updated, ${effect.unchangedCount} unchanged)`
+ )
+ status("Watching for changes...")
+ }
+ resetDisconnectState()
+ return []
+ }
+
+ success(
+ `Synced ${effect.totalCount} files (${effect.updatedCount} updated, ${effect.unchangedCount} unchanged)`
+ )
+ status("Watching for changes...")
+ return []
+ }
+
+ case "LOG": {
+ const logFns = { info, warn, success, debug }
+ const logFn = logFns[effect.level]
+ logFn(effect.message)
+ return []
+ }
+ }
+}
+
+/**
+ * Starts the sync controller with the given configuration
+ */
+export async function start(config: Config): Promise {
+ status("Waiting for Plugin connection...")
+
+ const hashTracker = createHashTracker()
+ const fileMetadataCache = new FileMetadataCache()
+ let installer: Installer | null = null
+
+ // State machine state
+ let syncState: SyncState = {
+ mode: "disconnected",
+ socket: null,
+ pendingRemoteChanges: [],
+ pendingOperations: new Map(),
+ nextOperationId: 1,
+ }
+
+ const userActions = new UserActionCoordinator()
+
+ // State Machine Helper
+ // Process events through state machine and execute effects recursively
+ async function processEvent(event: SyncEvent) {
+ const socketState = syncState.socket?.readyState
+ debug(
+ `[STATE] Processing event: ${event.type} (mode: ${syncState.mode}, socket: ${socketState ?? "none"})`
+ )
+
+ const result = transition(syncState, event)
+ syncState = result.state
+
+ if (result.effects.length > 0) {
+ debug(
+ `[STATE] Event produced ${result.effects.length} effects: ${result.effects.map((e) => e.type).join(", ")}`
+ )
+ }
+
+ // Execute all effects and process any follow-up events
+ for (const effect of result.effects) {
+ // Check socket state before each effect
+ const currentSocketState = syncState.socket?.readyState
+ if (currentSocketState !== undefined && currentSocketState !== 1) {
+ debug(
+ `[STATE] Socket not open (state: ${currentSocketState}) before executing ${effect.type}`
+ )
+ }
+
+ const followUpEvents = await executeEffect(effect, {
+ config,
+ hashTracker,
+ installer,
+ fileMetadataCache,
+ userActions,
+ syncState,
+ })
+
+ // Recursively process follow-up events
+ for (const followUpEvent of followUpEvents) {
+ await processEvent(followUpEvent)
+ }
+ }
+ }
+
+ // WebSocket Connection
+ const connection = await initConnection(config.port)
+
+ // Handle initial handshake
+ connection.on("handshake", (client: WebSocket, message) => {
+ debug(`Received handshake: ${message.projectName} (${message.projectId})`)
+
+ // Validate project hash (normalize both to short hash for comparison)
+ const expectedShort = shortProjectHash(config.projectHash)
+ const receivedShort = shortProjectHash(message.projectId)
+ if (receivedShort !== expectedShort) {
+ warn(
+ `Project ID mismatch: expected ${expectedShort}, got ${receivedShort}`
+ )
+ client.close()
+ return
+ }
+
+ void (async () => {
+ // Process handshake through state machine
+ await processEvent({
+ type: "HANDSHAKE",
+ socket: client,
+ projectInfo: {
+ projectId: message.projectId,
+ projectName: message.projectName,
+ },
+ })
+
+ // Initialize installer if needed
+ if (config.projectDir && !installer) {
+ installer = new Installer({
+ projectDir: config.projectDir,
+ allowUnsupportedNpm: config.allowUnsupportedNpm,
+ })
+ await installer.initialize()
+ // Start file watcher now that we have a directory
+ startWatcher()
+ }
+
+ // Cancel any pending disconnect message (fast reconnect)
+ cancelDisconnectMessage()
+
+ // Only show "Connected" on initial connection, not reconnects
+ // Reconnect confirmation happens in SYNC_COMPLETE
+ const wasDisconnected = wasRecentlyDisconnected()
+ if (!wasDisconnected && !didShowDisconnect()) {
+ success(`Connected to ${message.projectName}`)
+ }
+ })()
+ })
+
+ // Message Handler
+ async function handleMessage(message: IncomingMessage) {
+ // Ensure project is initialized before handling messages
+ if (!config.projectDir || !installer) {
+ warn("Received message before handshake completed - ignoring")
+ return
+ }
+
+ let event: SyncEvent | null = null
+
+ // Map incoming messages to state machine events
+ switch (message.type) {
+ case "request-files":
+ event = { type: "REQUEST_FILES" }
+ break
+
+ case "file-list": {
+ debug(`Received file list: ${message.files.length} files`)
+ event = { type: "FILE_LIST", files: message.files }
+ break
+ }
+
+ case "file-change":
+ event = {
+ type: "FILE_CHANGE",
+ file: {
+ name: message.fileName,
+ content: message.content,
+ // Remote modifiedAt is expensive to compute (requires getVerions API call), so we
+ // use local receipt time. Conflict detection uses content hashes, not timestamps.
+ modifiedAt: Date.now(),
+ },
+ fileMeta: fileMetadataCache.get(message.fileName),
+ }
+ break
+
+ case "file-delete": {
+ // Remote deletes are always applied immediately (file is already gone from Framer)
+ for (const fileName of message.fileNames) {
+ await processEvent({
+ type: "REMOTE_FILE_DELETE",
+ fileName,
+ })
+ }
+ return
+ }
+
+ case "delete-confirmed": {
+ const unmatched: string[] = []
+
+ for (const fileName of message.fileNames) {
+ const handled = userActions.handleConfirmation(
+ `delete:${fileName}`,
+ true
+ )
+
+ if (!handled) {
+ unmatched.push(fileName)
+ }
+ }
+
+ for (const fileName of unmatched) {
+ await processEvent({ type: "LOCAL_DELETE_APPROVED", fileName })
+ }
+
+ return
+ }
+
+ case "delete-cancelled": {
+ for (const file of message.files) {
+ userActions.handleConfirmation(`delete:${file.fileName}`, false)
+
+ await processEvent({
+ type: "LOCAL_DELETE_REJECTED",
+ fileName: file.fileName,
+ content: file.content ?? "",
+ })
+ }
+
+ return
+ }
+
+ case "file-synced":
+ event = {
+ type: "FILE_SYNCED_CONFIRMATION",
+ fileName: message.fileName,
+ remoteModifiedAt: message.remoteModifiedAt,
+ }
+ break
+
+ case "conflicts-resolved":
+ event = {
+ type: "CONFLICTS_RESOLVED",
+ resolution: message.resolution,
+ }
+ break
+
+ case "conflict-version-response":
+ event = {
+ type: "CONFLICT_VERSION_RESPONSE",
+ versions: message.versions,
+ }
+ break
+
+ default:
+ warn(`Unhandled message type: ${message.type}`)
+ return
+ }
+
+ await processEvent(event)
+ }
+
+ connection.on("message", (message: IncomingMessage) => {
+ void (async () => {
+ try {
+ await handleMessage(message)
+ } catch (err) {
+ error("Error handling message:", err)
+ }
+ })()
+ })
+
+ connection.on("disconnect", () => {
+ // Schedule disconnect message with delay - if reconnect happens quickly, we skip it
+ scheduleDisconnectMessage(() => {
+ status("Disconnected, waiting to reconnect...")
+ })
+ void (async () => {
+ await processEvent({ type: "DISCONNECT" })
+ userActions.cleanup()
+ })()
+ })
+
+ connection.on("error", (err) => {
+ error("Error on WebSocket connection:", err)
+ })
+
+ // File Watcher Setup
+ // Watcher will be initialized after handshake when filesDir is set
+ let watcher: ReturnType | null = null
+
+ const startWatcher = () => {
+ if (!config.filesDir || watcher) return
+ watcher = initWatcher(config.filesDir)
+
+ watcher.on("change", (event) => {
+ void processEvent({ type: "WATCHER_EVENT", event })
+ })
+ }
+
+ // Graceful shutdown
+ process.on("SIGINT", () => {
+ console.log() // newline after ^C
+ status("Shutting down...")
+ void (async () => {
+ if (watcher) {
+ await watcher.close()
+ }
+ connection.close()
+ process.exit(0)
+ })()
+ })
+}
+
+// Export for testing
+export { transition }
diff --git a/packages/code-link-cli/src/helpers/connection.ts b/packages/code-link-cli/src/helpers/connection.ts
new file mode 100644
index 000000000..ee0d2e7ec
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/connection.ts
@@ -0,0 +1,188 @@
+/**
+ * WebSocket connection helper
+ *
+ * Wrapper around ws.Server that normalizes handshake and surfaces callbacks.
+ */
+
+import { WebSocketServer, WebSocket } from "ws"
+import type { IncomingMessage, OutgoingMessage } from "../types.js"
+import { debug, error } from "../utils/logging.js"
+
+export interface ConnectionCallbacks {
+ onHandshake: (
+ client: WebSocket,
+ message: { projectId: string; projectName: string }
+ ) => void
+ onMessage: (message: IncomingMessage) => void
+ onDisconnect: () => void
+ onError: (error: Error) => void
+}
+
+export interface Connection {
+ on(event: "handshake", handler: ConnectionCallbacks["onHandshake"]): void
+ on(event: "message", handler: ConnectionCallbacks["onMessage"]): void
+ on(event: "disconnect", handler: ConnectionCallbacks["onDisconnect"]): void
+ on(event: "error", handler: ConnectionCallbacks["onError"]): void
+ close(): void
+}
+
+/**
+ * Initializes a WebSocket server and returns a connection interface
+ * Returns a Promise that resolves when the server is ready, or rejects on startup errors
+ */
+export function initConnection(port: number): Promise {
+ return new Promise((resolve, reject) => {
+ const wss = new WebSocketServer({ port })
+ const handlers: Partial = {}
+ let connectionId = 0
+ let isReady = false
+
+ // Handle server-level errors (e.g., EADDRINUSE)
+ wss.on("error", (err: NodeJS.ErrnoException) => {
+ if (!isReady) {
+ // Startup error - reject the promise with a helpful message
+ if (err.code === "EADDRINUSE") {
+ error(`Port ${port} is already in use.`)
+ error(
+ `This usually means another instance of Code Link is already running.`
+ )
+ error(``)
+ error(`To fix this:`)
+ error(
+ ` 1. Close any other terminal running Code Link for this project`
+ )
+ error(` 2. Or run: lsof -i :${port} | grep LISTEN`)
+ error(` Then kill the process: kill -9 `)
+ reject(new Error(`Port ${port} is already in use`))
+ } else {
+ error(`Failed to start WebSocket server: ${err.message}`)
+ reject(err)
+ }
+ return
+ }
+ // Runtime error - log but don't crash
+ error(`WebSocket server error: ${err.message}`)
+ })
+
+ // Server is ready when it starts listening
+ wss.on("listening", () => {
+ isReady = true
+ debug(`WebSocket server listening on port ${port}`)
+
+ wss.on("connection", (ws: WebSocket) => {
+ const connId = ++connectionId
+ let handshakeReceived = false
+ debug(`Client connected (conn ${connId})`)
+
+ ws.on("message", (data: Buffer) => {
+ try {
+ const message = JSON.parse(data.toString()) as IncomingMessage
+
+ // Special handling for handshake
+ if (message.type === "handshake") {
+ debug(`Received handshake (conn ${connId})`)
+ handshakeReceived = true
+ handlers.onHandshake?.(ws, message)
+ } else if (handshakeReceived) {
+ handlers.onMessage?.(message)
+ } else {
+ // Ignore messages before handshake - plugin will send full snapshot after
+ debug(
+ `Ignoring ${message.type} before handshake (conn ${connId})`
+ )
+ }
+ } catch (err) {
+ error(`Failed to parse message:`, err)
+ }
+ })
+
+ ws.on("close", (code, reason) => {
+ debug(
+ `Client disconnected (code: ${code}, reason: ${reason.toString()})`
+ )
+ handlers.onDisconnect?.()
+ })
+
+ ws.on("error", (err) => {
+ error(`WebSocket error:`, err)
+ })
+ })
+
+ resolve({
+ on(event, handler) {
+ switch (event) {
+ case "handshake":
+ handlers.onHandshake = handler as ConnectionCallbacks["onHandshake"]
+ break
+ case "message":
+ handlers.onMessage = handler as ConnectionCallbacks["onMessage"]
+ break
+ case "disconnect":
+ handlers.onDisconnect = handler as ConnectionCallbacks["onDisconnect"]
+ break
+ case "error":
+ handlers.onError = handler as ConnectionCallbacks["onError"]
+ break
+ }
+ },
+
+ close(): void {
+ wss.close()
+ },
+ } satisfies Connection)
+ })
+ })
+}
+
+/**
+ * WebSocket readyState constants for reference
+ */
+const READY_STATE = {
+ CONNECTING: 0,
+ OPEN: 1,
+ CLOSING: 2,
+ CLOSED: 3,
+} as const
+
+function readyStateToString(state: number): string {
+ switch (state) {
+ case 0:
+ return "CONNECTING"
+ case 1:
+ return "OPEN"
+ case 2:
+ return "CLOSING"
+ case 3:
+ return "CLOSED"
+ default:
+ return `UNKNOWN(${state})`
+ }
+}
+
+/**
+ * Sends a message to a connected socket
+ * Returns false if the socket is not open (instead of throwing)
+ */
+export function sendMessage(
+ socket: WebSocket,
+ message: OutgoingMessage
+): Promise {
+ return new Promise((resolve) => {
+ // Check socket state before attempting to send
+ if (socket.readyState !== READY_STATE.OPEN) {
+ const stateStr = readyStateToString(socket.readyState)
+ debug(`Cannot send ${message.type}: socket is ${stateStr}`)
+ resolve(false)
+ return
+ }
+
+ socket.send(JSON.stringify(message), (err) => {
+ if (err) {
+ debug(`Send error for ${message.type}: ${err.message}`)
+ resolve(false)
+ } else {
+ resolve(true)
+ }
+ })
+ })
+}
diff --git a/packages/code-link-cli/src/helpers/files.test.ts b/packages/code-link-cli/src/helpers/files.test.ts
new file mode 100644
index 000000000..df72f7329
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/files.test.ts
@@ -0,0 +1,347 @@
+import fs from "fs/promises"
+import os from "os"
+import path from "path"
+import { describe, it, expect } from "vitest"
+import { autoResolveConflicts, detectConflicts } from "./files.js"
+import type { Conflict } from "../types.js"
+import { hashFileContent } from "../utils/state-persistence.js"
+
+function makeConflict(overrides: Partial = {}): Conflict {
+ return {
+ fileName: overrides.fileName ?? "Test.tsx",
+ localContent:
+ "localContent" in overrides ? overrides.localContent : "local",
+ remoteContent:
+ "remoteContent" in overrides ? overrides.remoteContent : "remote",
+ localModifiedAt: overrides.localModifiedAt ?? Date.now(),
+ remoteModifiedAt: overrides.remoteModifiedAt ?? Date.now(),
+ lastSyncedAt: overrides.lastSyncedAt ?? Date.now(),
+ localClean: overrides.localClean,
+ }
+}
+
+// Auto-Resolve Conflicts Tests
+describe("autoResolveConflicts", () => {
+ it("classifies conflicts as local when remote unchanged and local changed", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: false,
+ })
+
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: 5_000 }]
+ )
+
+ expect(result.autoResolvedLocal).toHaveLength(1)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("classifies conflicts as remote when local is clean and remote changed", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: 10_000 }]
+ )
+
+ expect(result.autoResolvedRemote).toHaveLength(1)
+ expect(result.autoResolvedLocal).toHaveLength(0)
+ })
+
+ it("keeps conflicts that have both sides changed", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: false,
+ })
+
+ const result = autoResolveConflicts(
+ [conflict],
+ [{ fileName: conflict.fileName, latestRemoteVersionMs: 7_500 }]
+ )
+
+ expect(result.remainingConflicts).toHaveLength(1)
+ expect(result.autoResolvedLocal).toHaveLength(0)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ })
+
+ it("keeps conflicts when version data is missing", () => {
+ const conflict = makeConflict({
+ lastSyncedAt: 5_000,
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ expect(result.remainingConflicts).toHaveLength(1)
+ })
+
+ it("auto-resolves remote deletion when local is clean", () => {
+ const conflict = makeConflict({
+ remoteContent: null, // Deleted in Framer
+ localClean: true,
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // Remote deletion with clean local -> auto-resolve to remote (delete locally)
+ expect(result.autoResolvedRemote).toHaveLength(1)
+ expect(result.remainingConflicts).toHaveLength(0)
+ })
+
+ it("keeps conflict when remote deleted but local modified", () => {
+ const conflict = makeConflict({
+ remoteContent: null, // Deleted in Framer
+ localClean: false, // But local was modified
+ })
+
+ const result = autoResolveConflicts([conflict], [])
+
+ // User must decide: keep local changes or accept deletion
+ expect(result.remainingConflicts).toHaveLength(1)
+ expect(result.autoResolvedRemote).toHaveLength(0)
+ })
+})
+
+// Detect Conflicts Tests
+describe("detectConflicts", () => {
+ it("marks conflicts as localClean when local matches persisted state", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ const localContent = "local content"
+ await fs.writeFile(path.join(filesDir, "Test.tsx"), localContent, "utf-8")
+
+ const persistedState = new Map([
+ [
+ "Test.tsx",
+ { contentHash: hashFileContent(localContent), timestamp: 1_000 },
+ ],
+ ])
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "Test.tsx",
+ content: "remote content",
+ modifiedAt: 2_000,
+ },
+ ],
+ filesDir,
+ { persistedState }
+ )
+
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localClean).toBe(true)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects remote-only files as safe writes (new files to download)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // No local files, one remote file
+ const result = await detectConflicts(
+ [
+ {
+ name: "NewFromFramer.tsx",
+ content: "export const New = () => New
",
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Remote-only file should be a safe write
+ expect(result.writes).toHaveLength(1)
+ expect(result.writes[0]?.name).toBe("NewFromFramer.tsx")
+ expect(result.conflicts).toHaveLength(0)
+ expect(result.localOnly).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects local-only files (new files to upload)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Create a local file that doesn't exist in remote
+ await fs.writeFile(
+ path.join(filesDir, "LocalOnly.tsx"),
+ "export const Local = () => Local
",
+ "utf-8"
+ )
+
+ const result = await detectConflicts(
+ [], // No remote files
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Local-only file should be detected
+ expect(result.localOnly).toHaveLength(1)
+ expect(result.localOnly[0]?.name).toBe("LocalOnly.tsx")
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("handles case-insensitive file matching (macOS/Windows compat)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Local file with different casing than remote
+ await fs.writeFile(
+ path.join(filesDir, "mycomponent.tsx"),
+ "local content",
+ "utf-8"
+ )
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "MyComponent.tsx", // Different casing
+ content: "remote content",
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // Should detect as conflict, not as two separate files
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.localOnly).toHaveLength(0)
+ expect(result.writes).toHaveLength(0)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects local deletion while offline as conflict", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // File was previously synced but now missing locally
+ const persistedState = new Map([
+ [
+ "DeletedLocally.tsx",
+ { contentHash: hashFileContent("old content"), timestamp: 1_000 },
+ ],
+ ])
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "DeletedLocally.tsx",
+ content: "remote content still exists",
+ modifiedAt: 2_000,
+ },
+ ],
+ filesDir,
+ { persistedState }
+ )
+
+ // Should be a conflict: local=null (deleted), remote=content
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localContent).toBe(null)
+ expect(result.conflicts[0]?.remoteContent).toBe(
+ "remote content still exists"
+ )
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("detects remote deletion while offline as conflict", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ // Local file still exists
+ await fs.writeFile(
+ path.join(filesDir, "DeletedRemotely.tsx"),
+ "local content still exists",
+ "utf-8"
+ )
+
+ // File was previously synced
+ const persistedState = new Map([
+ [
+ "DeletedRemotely.tsx",
+ {
+ contentHash: hashFileContent("local content still exists"),
+ timestamp: 1_000,
+ },
+ ],
+ ])
+
+ const result = await detectConflicts(
+ [], // File no longer in remote
+ filesDir,
+ { persistedState }
+ )
+
+ // Should be a conflict: local=content, remote=null (deleted)
+ expect(result.conflicts).toHaveLength(1)
+ expect(result.conflicts[0]?.localContent).toBe(
+ "local content still exists"
+ )
+ expect(result.conflicts[0]?.remoteContent).toBe(null)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+
+ it("treats identical content as unchanged (no write needed)", async () => {
+ const tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "cl-test-"))
+ try {
+ const filesDir = path.join(tmpRoot, "files")
+ await fs.mkdir(filesDir, { recursive: true })
+
+ const content = "export const Same = () => Same
"
+ await fs.writeFile(path.join(filesDir, "Same.tsx"), content, "utf-8")
+
+ const result = await detectConflicts(
+ [
+ {
+ name: "Same.tsx",
+ content, // Same content
+ modifiedAt: Date.now(),
+ },
+ ],
+ filesDir,
+ { persistedState: new Map() }
+ )
+
+ // No write needed, no conflict
+ expect(result.writes).toHaveLength(0)
+ expect(result.conflicts).toHaveLength(0)
+ expect(result.unchanged).toHaveLength(1)
+ } finally {
+ await fs.rm(tmpRoot, { recursive: true, force: true })
+ }
+ })
+})
diff --git a/packages/code-link-cli/src/helpers/files.ts b/packages/code-link-cli/src/helpers/files.ts
new file mode 100644
index 000000000..c6a0e020e
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/files.ts
@@ -0,0 +1,462 @@
+/**
+ * File operations helper
+ *
+ * Single place that understands disk + conflicts. Provides:
+ * - listFiles: returns current filesystem state
+ * - detectConflicts: compares remote vs local and returns conflicts + safe writes
+ * - writeRemoteFiles: applies writes/deletes from remote
+ * - deleteLocalFile: removes a file from disk
+ *
+ * Controller decides WHEN to call these, but never computes conflicts itself.
+ */
+
+import fs from "fs/promises"
+import path from "path"
+import type {
+ FileInfo,
+ ConflictResolution,
+ Conflict,
+ ConflictVersionData,
+} from "../types.js"
+import type { createHashTracker, HashTracker } from "../utils/hash-tracker.js"
+import { normalizePath, sanitizeFilePath } from "@code-link/shared"
+import { warn, debug } from "../utils/logging.js"
+import {
+ hashFileContent,
+ type PersistedFileState,
+} from "../utils/state-persistence.js"
+
+const SUPPORTED_EXTENSIONS = [".ts", ".tsx", ".js", ".jsx", ".json"]
+const DEFAULT_EXTENSION = ".tsx"
+const DEFAULT_REMOTE_DRIFT_MS = 2000
+
+/** Normalize file name for case-insensitive comparison (macOS/Windows compat) */
+function normalizeForComparison(fileName: string): string {
+ return fileName.toLowerCase()
+}
+
+/**
+ * Lists all supported files in the files directory
+ */
+export async function listFiles(filesDir: string): Promise {
+ const files: FileInfo[] = []
+
+ async function walk(currentDir: string): Promise {
+ const entries = await fs.readdir(currentDir, { withFileTypes: true })
+
+ for (const entry of entries) {
+ const entryPath = path.join(currentDir, entry.name)
+
+ if (entry.isDirectory()) {
+ await walk(entryPath)
+ continue
+ }
+
+ if (!isSupportedExtension(entry.name)) continue
+
+ const relativePath = path.relative(filesDir, entryPath)
+ const normalizedPath = normalizePath(relativePath)
+ // Don't capitalize when listing existing files - preserve their actual names
+ const sanitizedPath = sanitizeFilePath(normalizedPath, false).path
+
+ try {
+ const [content, stats] = await Promise.all([
+ fs.readFile(entryPath, "utf-8"),
+ fs.stat(entryPath),
+ ])
+
+ files.push({
+ name: sanitizedPath,
+ content,
+ modifiedAt: stats.mtimeMs,
+ })
+ } catch (err) {
+ warn(`Failed to read ${entryPath}:`, err)
+ }
+ }
+ }
+
+ try {
+ await walk(filesDir)
+ } catch (err) {
+ warn("Failed to list files:", err)
+ }
+
+ return files
+}
+
+/**
+ * Detects conflicts between remote files and local filesystem
+ * Returns conflicts that need user resolution and safe writes that can be applied
+ */
+export interface ConflictDetectionOptions {
+ preferRemote?: boolean
+ detectConflicts?: boolean
+ persistedState?: Map
+}
+
+export async function detectConflicts(
+ remoteFiles: FileInfo[],
+ filesDir: string,
+ options: ConflictDetectionOptions = {}
+): Promise {
+ const conflicts: Conflict[] = []
+ const writes: FileInfo[] = []
+ const localOnly: FileInfo[] = []
+ const unchanged: FileInfo[] = []
+ const detect = options.detectConflicts ?? true
+ const preferRemote = options.preferRemote ?? false
+ const persistedState = options.persistedState
+
+ const getPersistedState = (fileName: string) =>
+ persistedState?.get(normalizeForComparison(fileName)) ??
+ persistedState?.get(fileName)
+
+ debug(`Detecting conflicts for ${String(remoteFiles.length)} remote files`)
+
+ // Build a snapshot of all local files (keyed by lowercase for case-insensitive matching)
+ const localFiles = await listFiles(filesDir)
+ const localFileMap = new Map(
+ localFiles.map((f) => [normalizeForComparison(f.name), f])
+ )
+
+ // Build a set of remote file names for quick lookup (lowercase keys)
+ const remoteFileMap = new Map(
+ remoteFiles.map((f) => {
+ const normalized = resolveRemoteReference(filesDir, f.name)
+ return [normalizeForComparison(normalized.relativePath), f]
+ })
+ )
+
+ // Track which files we've processed (lowercase for case-insensitive matching)
+ const processedFiles = new Set()
+
+ // Process remote files (remote-only or both sides)
+ for (const remote of remoteFiles) {
+ const normalized = resolveRemoteReference(filesDir, remote.name)
+ const normalizedKey = normalizeForComparison(normalized.relativePath)
+ const local = localFileMap.get(normalizedKey)
+ processedFiles.add(normalizedKey)
+
+ const persisted = getPersistedState(normalized.relativePath)
+ const localHash = local ? hashFileContent(local.content) : null
+ const localMatchesPersisted =
+ !!persisted && !!local && localHash === persisted.contentHash
+
+ if (!local) {
+ // File exists in remote but not locally
+ if (persisted) {
+ // File was previously synced but now missing locally → deleted locally while offline
+ // This is a conflict: local=null (deleted), remote=content
+ debug(
+ `Conflict: ${normalized.relativePath} deleted locally while offline`
+ )
+ conflicts.push({
+ fileName: normalized.relativePath,
+ localContent: null,
+ remoteContent: remote.content,
+ remoteModifiedAt: remote.modifiedAt,
+ lastSyncedAt: persisted.timestamp,
+ })
+ } else {
+ // New file from remote (never synced before): download
+ writes.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ }
+ continue
+ }
+
+ if (local.content === remote.content) {
+ // Content matches - no disk write needed but track for metadata
+ unchanged.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ continue
+ }
+
+ if (!detect || preferRemote) {
+ writes.push({
+ name: normalized.relativePath,
+ content: remote.content,
+ modifiedAt: remote.modifiedAt,
+ })
+ continue
+ }
+
+ // Check if local file is "clean" (matches last persisted state)
+ // If so, we can safely overwrite it with remote changes
+ // Both sides have the file with different content -> conflict
+ const localClean = persisted ? localMatchesPersisted : undefined
+ conflicts.push({
+ fileName: normalized.relativePath,
+ localContent: local.content,
+ remoteContent: remote.content,
+ localModifiedAt: local.modifiedAt,
+ remoteModifiedAt: remote.modifiedAt,
+ lastSyncedAt: persisted?.timestamp,
+ localClean,
+ })
+ }
+
+ // Process local-only files (not present in remote)
+ for (const local of localFiles) {
+ const localKey = normalizeForComparison(local.name)
+ if (!processedFiles.has(localKey)) {
+ const persisted = getPersistedState(local.name)
+ if (persisted) {
+ // File was previously synced but now missing from remote → deleted in Framer
+ const localHash = hashFileContent(local.content)
+ const localClean = localHash === persisted.contentHash
+ debug(
+ `Conflict: ${local.name} deleted in Framer (localClean=${String(localClean)})`
+ )
+ conflicts.push({
+ fileName: local.name,
+ localContent: local.content,
+ remoteContent: null,
+ localModifiedAt: local.modifiedAt,
+ lastSyncedAt: persisted.timestamp,
+ localClean,
+ })
+ } else {
+ // New local file (never synced before): upload later
+ localOnly.push({
+ name: local.name,
+ content: local.content,
+ modifiedAt: local.modifiedAt,
+ })
+ }
+ }
+ }
+
+ // Check for files in persisted state that are missing from BOTH sides
+ // These were deleted on both sides while offline - auto-clean them (no conflict)
+ if (persistedState) {
+ for (const [fileName] of persistedState) {
+ const normalizedKey = normalizeForComparison(fileName)
+ const inLocal = localFileMap.has(normalizedKey)
+ const inRemote = remoteFileMap.has(normalizedKey)
+ if (!inLocal && !inRemote) {
+ debug(`[AUTO-RESOLVE] ${fileName}: deleted on both sides, no conflict`)
+ // No action needed - the file is gone from both sides
+ // The persisted state will be cleaned up when we persist
+ }
+ }
+ }
+
+ return { conflicts, writes, localOnly, unchanged }
+}
+
+export interface AutoResolveResult {
+ autoResolvedLocal: Conflict[]
+ autoResolvedRemote: Conflict[]
+ remainingConflicts: Conflict[]
+}
+
+export function autoResolveConflicts(
+ conflicts: Conflict[],
+ versions: ConflictVersionData[],
+ options: { remoteDriftMs?: number } = {}
+): AutoResolveResult {
+ const versionMap = new Map(
+ versions.map((version) => [version.fileName, version.latestRemoteVersionMs])
+ )
+ const remoteDriftMs = options.remoteDriftMs ?? DEFAULT_REMOTE_DRIFT_MS
+
+ const autoResolvedLocal: Conflict[] = []
+ const autoResolvedRemote: Conflict[] = []
+ const remainingConflicts: Conflict[] = []
+
+ for (const conflict of conflicts) {
+ const latestRemoteVersionMs = versionMap.get(conflict.fileName)
+ const lastSyncedAt = conflict.lastSyncedAt
+ const localClean = conflict.localClean === true
+
+ debug(`Auto-resolve checking ${conflict.fileName}`)
+
+ // Remote deletion: file deleted in Framer
+ if (conflict.remoteContent === null) {
+ if (localClean) {
+ debug(` Remote deleted, local clean -> REMOTE (delete locally)`)
+ autoResolvedRemote.push(conflict)
+ } else {
+ debug(` Remote deleted, local modified -> conflict`)
+ remainingConflicts.push(conflict)
+ }
+ continue
+ }
+
+ if (!latestRemoteVersionMs) {
+ debug(` No remote version data, keeping conflict`)
+ remainingConflicts.push(conflict)
+ continue
+ }
+
+ if (!lastSyncedAt) {
+ debug(` No last sync timestamp, keeping conflict`)
+ remainingConflicts.push(conflict)
+ continue
+ }
+
+ debug(` Remote: ${new Date(latestRemoteVersionMs).toISOString()}`)
+ debug(` Synced: ${new Date(lastSyncedAt).toISOString()}`)
+
+ const remoteUnchanged =
+ latestRemoteVersionMs <= lastSyncedAt + remoteDriftMs
+ // localClean already declared above for remote deletion handling
+
+ if (remoteUnchanged && !localClean) {
+ debug(` Remote unchanged, local changed -> LOCAL`)
+ autoResolvedLocal.push(conflict)
+ } else if (localClean && !remoteUnchanged) {
+ debug(` Local unchanged, remote changed -> REMOTE`)
+ autoResolvedRemote.push(conflict)
+ } else if (remoteUnchanged && localClean) {
+ debug(` Both unchanged, skipping`)
+ } else {
+ debug(` Both changed, real conflict`)
+ remainingConflicts.push(conflict)
+ }
+ }
+
+ return {
+ autoResolvedLocal,
+ autoResolvedRemote,
+ remainingConflicts,
+ }
+}
+
+/**
+ * Writes remote files to disk and updates hash tracker to prevent echoes
+ * CRITICAL: Update hashTracker BEFORE writing to disk
+ */
+export async function writeRemoteFiles(
+ files: FileInfo[],
+ filesDir: string,
+ hashTracker: HashTracker,
+ installer?: { process: (fileName: string, content: string) => void }
+): Promise {
+ debug(`Writing ${files.length} remote files`)
+
+ for (const file of files) {
+ try {
+ const normalized = resolveRemoteReference(filesDir, file.name)
+ const fullPath = normalized.absolutePath
+
+ // Ensure directory exists
+ await fs.mkdir(path.dirname(fullPath), { recursive: true })
+
+ // CRITICAL ORDER: Update hash tracker FIRST (in memory)
+ hashTracker.remember(normalized.relativePath, file.content)
+
+ // THEN write to disk
+ await fs.writeFile(fullPath, file.content, "utf-8")
+
+ debug(`Wrote file: ${normalized.relativePath}`)
+
+ // Trigger type installer if available
+ installer?.process(normalized.relativePath, file.content)
+ } catch (err) {
+ warn(`Failed to write file ${file.name}:`, err)
+ }
+ }
+}
+
+/**
+ * Deletes a local file from disk
+ */
+export async function deleteLocalFile(
+ fileName: string,
+ filesDir: string,
+ hashTracker: HashTracker
+): Promise {
+ const normalized = resolveRemoteReference(filesDir, fileName)
+
+ try {
+ // CRITICAL ORDER: Mark delete FIRST (in memory) to prevent echo
+ hashTracker.markDelete(normalized.relativePath)
+
+ // THEN delete from disk
+ await fs.unlink(normalized.absolutePath)
+
+ // Clear the hash immediately
+ hashTracker.forget(normalized.relativePath)
+
+ debug(`Deleted file: ${normalized.relativePath}`)
+ } catch (err) {
+ const nodeError = err as NodeJS.ErrnoException
+
+ if (nodeError.code === "ENOENT") {
+ // Treat missing files as already deleted to keep hash tracker in sync
+ hashTracker.forget(normalized.relativePath)
+ debug(`File already deleted: ${normalized.relativePath}`)
+ return
+ }
+
+ // Clear pending delete marker immediately on failure
+ hashTracker.clearDelete(normalized.relativePath)
+ warn(`Failed to delete file ${fileName}:`, err)
+ }
+}
+
+/**
+ * Reads a single file from disk (safe, returns null on error)
+ */
+export async function readFileSafe(
+ fileName: string,
+ filesDir: string
+): Promise {
+ const normalized = resolveRemoteReference(filesDir, fileName)
+
+ try {
+ return await fs.readFile(normalized.absolutePath, "utf-8")
+ } catch {
+ return null
+ }
+}
+
+/**
+ * Filter out files whose content matches the last remembered hash.
+ * Used to skip inbound echoes of our own local sends.
+ */
+export function filterEchoedFiles(
+ files: FileInfo[],
+ hashTracker: ReturnType
+): FileInfo[] {
+ return files.filter((file) => {
+ return !hashTracker.shouldSkip(file.name, file.content)
+ })
+}
+
+function resolveRemoteReference(filesDir: string, rawName: string) {
+ const normalized = sanitizeRelativePath(rawName)
+ const absolutePath = path.join(filesDir, normalized.relativePath)
+ return { ...normalized, absolutePath }
+}
+
+function sanitizeRelativePath(relativePath: string) {
+ const trimmed = normalizePath(relativePath.trim())
+ const hasExtension = SUPPORTED_EXTENSIONS.some((ext) =>
+ trimmed.toLowerCase().endsWith(ext)
+ )
+ const candidate = hasExtension ? trimmed : `${trimmed}${DEFAULT_EXTENSION}`
+ // Don't capitalize when processing remote files - preserve exact casing from Framer
+ const sanitized = sanitizeFilePath(candidate, false)
+ const normalized = normalizePath(sanitized.path)
+
+ return {
+ relativePath: normalized,
+ extension:
+ sanitized.extension || path.extname(normalized) || DEFAULT_EXTENSION,
+ }
+}
+
+function isSupportedExtension(fileName: string) {
+ const lower = fileName.toLowerCase()
+ return SUPPORTED_EXTENSIONS.some((ext) => lower.endsWith(ext))
+}
diff --git a/packages/code-link-cli/src/helpers/installer.ts b/packages/code-link-cli/src/helpers/installer.ts
new file mode 100644
index 000000000..b66936393
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/installer.ts
@@ -0,0 +1,626 @@
+/**
+ * Type installer helper using @typescript/ata
+ */
+
+import { setupTypeAcquisition } from "@typescript/ata"
+import ts from "typescript"
+import path from "path"
+import fs from "fs/promises"
+import { extractImports } from "../utils/imports.js"
+import { debug, warn } from "../utils/logging.js"
+
+export interface InstallerConfig {
+ projectDir: string
+ allowUnsupportedNpm?: boolean
+}
+
+/** npm registry package.json exports field value */
+interface NpmExportValue {
+ import?: string
+ require?: string
+ types?: string
+}
+
+/** npm registry API response for a single package version */
+interface NpmPackageVersion {
+ exports?: Record
+}
+
+/** npm registry API response */
+interface NpmRegistryResponse {
+ "dist-tags"?: { latest?: string }
+ versions?: Record
+}
+
+const FETCH_TIMEOUT_MS = 60_000
+const MAX_FETCH_RETRIES = 3
+const REACT_TYPES_VERSION = "18.3.12"
+const REACT_DOM_TYPES_VERSION = "18.3.1"
+const CORE_LIBRARIES = ["framer-motion", "framer"]
+const JSON_EXTENSION_REGEX = /\.json$/i
+
+/**
+ * Packages that are officially supported for type acquisition.
+ * Use --unsupported-npm flag to allow other packages.
+ */
+const SUPPORTED_PACKAGES = new Set([
+ "framer",
+ "framer-motion",
+ "react",
+ "@types/react",
+ "eventemitter3",
+ "csstype",
+ "motion-dom",
+ "motion-utils",
+])
+
+/**
+ * Installer class for managing automatic type acquisition.
+ */
+export class Installer {
+ private projectDir: string
+ private allowUnsupportedNpm: boolean
+ private ata: ReturnType
+ private processedImports = new Set()
+ private initializationPromise: Promise | null = null
+
+ constructor(config: InstallerConfig) {
+ this.projectDir = config.projectDir
+ this.allowUnsupportedNpm = config.allowUnsupportedNpm ?? false
+
+ const seenPackages = new Set()
+
+ this.ata = setupTypeAcquisition({
+ projectName: "framer-code-link",
+ typescript: ts,
+ logger: console,
+ fetcher: fetchWithRetry,
+ delegate: {
+ started: () => {
+ seenPackages.clear()
+ debug("ATA: fetching type definitions...")
+ },
+ progress: () => {
+ // intentionally noop – progress noise is not helpful in CLI output
+ },
+ finished: (files) => {
+ if (files.size > 0) {
+ debug("ATA: type acquisition complete")
+ }
+ },
+ errorMessage: (message: string, error: Error) => {
+ warn(`ATA warning: ${message}`, error)
+ },
+ receivedFile: (code: string, receivedPath: string) => {
+ void (async () => {
+ const normalized = receivedPath.replace(/^\//, "")
+ const destination = path.join(this.projectDir, normalized)
+
+ const pkgMatch = /\/node_modules\/(@?[^/]+(?:\/[^/]+)?)\//.exec(
+ receivedPath
+ )
+
+ // Check if file already exists with same content
+ try {
+ const existing = await fs.readFile(destination, "utf-8")
+ if (existing === code) {
+ if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
+ seenPackages.add(pkgMatch[1])
+ debug(`📦 Types: ${pkgMatch[1]} (from disk cache)`)
+ }
+ return // Skip write if identical
+ }
+ } catch {
+ // File doesn't exist or can't be read, proceed with write
+ }
+
+ if (pkgMatch && !seenPackages.has(pkgMatch[1])) {
+ seenPackages.add(pkgMatch[1])
+ debug(`📦 Types: ${pkgMatch[1]}`)
+ }
+
+ await this.writeTypeFile(receivedPath, code)
+ })()
+ },
+ },
+ })
+
+ debug("Type installer initialized")
+ }
+
+ /**
+ * Ensure the project scaffolding exists (tsconfig, declarations, etc.)
+ */
+ async initialize(): Promise {
+ if (this.initializationPromise) {
+ return this.initializationPromise
+ }
+
+ this.initializationPromise = this.initializeProject()
+ .then(() => {
+ debug("Type installer initialization complete")
+ })
+ .catch((err: unknown) => {
+ this.initializationPromise = null
+ throw err
+ })
+
+ return this.initializationPromise
+ }
+
+ /**
+ * Fire-and-forget processing of a component file to fetch missing types.
+ * JSON files are ignored.
+ */
+ process(fileName: string, content: string): void {
+ if (!content || JSON_EXTENSION_REGEX.test(fileName)) {
+ return
+ }
+
+ Promise.resolve()
+ .then(async () => {
+ await this.processImports(fileName, content)
+ })
+ .catch((err: unknown) => {
+ debug(`Type installer failed for ${fileName}`, err)
+ })
+ }
+
+ // ---------------------------------------------------------------------------
+ // Internal helpers
+ // ---------------------------------------------------------------------------
+
+ private async initializeProject(): Promise {
+ await Promise.all([
+ this.ensureTsConfig(),
+ this.ensurePrettierConfig(),
+ this.ensureFramerDeclarations(),
+ this.ensurePackageJson(),
+ ])
+
+ // Fire-and-forget type installation - don't block initialization
+ Promise.resolve()
+ .then(async () => {
+ await this.ensureReact18Types()
+
+ const coreImports = CORE_LIBRARIES.map(
+ (lib) => `import "${lib}";`
+ ).join("\n")
+ await this.ata(coreImports)
+ })
+ .catch((err: unknown) => {
+ debug("Type installation failed", err)
+ })
+ }
+
+ private async processImports(
+ fileName: string,
+ content: string
+ ): Promise {
+ const allImports = extractImports(content).filter((i) => i.type === "npm")
+
+ if (allImports.length === 0) return
+
+ // Filter to supported packages unless --unsupported-npm flag is set
+ const imports = this.allowUnsupportedNpm
+ ? allImports
+ : allImports.filter((i) => this.isSupportedPackage(i.name))
+
+ const unsupportedCount = allImports.length - imports.length
+ if (unsupportedCount > 0 && !this.allowUnsupportedNpm) {
+ const unsupported = allImports
+ .filter((i) => !this.isSupportedPackage(i.name))
+ .map((i) => i.name)
+ debug(
+ `Skipping unsupported packages: ${unsupported.join(", ")} (use --unsupported-npm to enable)`
+ )
+ }
+
+ if (imports.length === 0) {
+ return
+ }
+
+ const hash = imports
+ .map((imp) => imp.name)
+ .sort()
+ .join(",")
+
+ if (this.processedImports.has(hash)) {
+ return
+ }
+
+ this.processedImports.add(hash)
+ debug(`Processing imports for ${fileName} (${imports.length} packages)`)
+
+ // Build filtered content with only supported imports for ATA
+ const filteredContent = this.allowUnsupportedNpm
+ ? content
+ : this.buildFilteredImports(imports)
+
+ try {
+ await this.ata(filteredContent)
+ } catch (err) {
+ warn(`ATA failed for ${fileName}`, err as Error)
+ }
+ }
+
+ /**
+ * Check if a package is in the supported list.
+ * Also checks for subpath imports (e.g., "framer/build" -> "framer")
+ */
+ private isSupportedPackage(pkgName: string): boolean {
+ // Direct match
+ if (SUPPORTED_PACKAGES.has(pkgName)) {
+ return true
+ }
+
+ // Check if base package is supported (e.g., "framer-motion/dist" -> "framer-motion")
+ const basePkg = pkgName.startsWith("@")
+ ? pkgName.split("/").slice(0, 2).join("/")
+ : pkgName.split("/")[0]
+
+ return SUPPORTED_PACKAGES.has(basePkg)
+ }
+
+ /**
+ * Build synthetic import statements for ATA from filtered imports
+ */
+ private buildFilteredImports(imports: { name: string }[]): string {
+ return imports.map((imp) => `import "${imp.name}";`).join("\n")
+ }
+
+ private async writeTypeFile(
+ receivedPath: string,
+ code: string
+ ): Promise {
+ const normalized = receivedPath.replace(/^\//, "")
+ const destination = path.join(this.projectDir, normalized)
+
+ try {
+ await fs.mkdir(path.dirname(destination), { recursive: true })
+ await fs.writeFile(destination, code, "utf-8")
+ } catch (err) {
+ warn(`Failed to write type file ${destination}`, err)
+ return
+ }
+
+ if (/node_modules\/@types\/[^/]+\/index\.d\.ts$/.exec(normalized)) {
+ await this.ensureTypesPackageJson(normalized)
+ }
+
+ if (normalized.includes("node_modules/@types/react/index.d.ts")) {
+ await this.patchReactTypes(destination)
+ }
+ }
+
+ private async ensureTypesPackageJson(normalizedPath: string): Promise {
+ const pkgMatch = /node_modules\/(@types\/[^/]+)\//.exec(normalizedPath)
+ if (!pkgMatch) return
+
+ const pkgName = pkgMatch[1]
+ const pkgDir = path.join(this.projectDir, "node_modules", pkgName)
+ const pkgJsonPath = path.join(pkgDir, "package.json")
+
+ try {
+ const response = await fetch(`https://registry.npmjs.org/${pkgName}`)
+ if (!response.ok) return
+
+ const npmData = (await response.json()) as NpmRegistryResponse
+ const version = npmData["dist-tags"]?.latest
+ if (!version || !npmData.versions?.[version]) return
+
+ const pkg = npmData.versions[version]
+ if (pkg.exports) {
+ for (const key of Object.keys(pkg.exports)) {
+ pkg.exports[key] = fixExportTypes(pkg.exports[key])
+ }
+ }
+
+ await fs.mkdir(pkgDir, { recursive: true })
+ await fs.writeFile(pkgJsonPath, JSON.stringify(pkg, null, 2))
+ } catch {
+ // best-effort
+ }
+ }
+
+ private async patchReactTypes(destination: string): Promise {
+ try {
+ let content = await fs.readFile(destination, "utf-8")
+ if (content.includes("function useRef()")) {
+ return
+ }
+
+ const overloadPattern =
+ /function useRef\(initialValue: T \| undefined\): RefObject;/
+
+ if (!content.includes("function useRef(initialValue: T | undefined)")) {
+ return
+ }
+
+ content = content.replace(
+ overloadPattern,
+ `function useRef(initialValue: T | undefined): RefObject;
+ function useRef(): MutableRefObject;`
+ )
+
+ await fs.writeFile(destination, content, "utf-8")
+ } catch {
+ // ignore patch failures
+ }
+ }
+
+ private async ensureTsConfig(): Promise {
+ const tsconfigPath = path.join(this.projectDir, "tsconfig.json")
+ try {
+ await fs.access(tsconfigPath)
+ debug("tsconfig.json already exists")
+ } catch {
+ const config = {
+ compilerOptions: {
+ noEmit: true,
+ target: "ES2021",
+ lib: ["ES2021", "DOM", "DOM.Iterable"],
+ module: "ESNext",
+ moduleResolution: "bundler",
+ customConditions: ["source"],
+ jsx: "react-jsx",
+ allowJs: true,
+ allowSyntheticDefaultImports: true,
+ strict: false,
+ allowImportingTsExtensions: true,
+ resolveJsonModule: true,
+ esModuleInterop: true,
+ skipLibCheck: true,
+ typeRoots: ["./node_modules/@types"],
+ },
+ include: ["files/**/*", "framer-modules.d.ts"],
+ }
+ await fs.writeFile(tsconfigPath, JSON.stringify(config, null, 2))
+ debug("Created tsconfig.json")
+ }
+ }
+
+ private async ensurePrettierConfig(): Promise {
+ const prettierPath = path.join(this.projectDir, ".prettierrc")
+ try {
+ await fs.access(prettierPath)
+ debug(".prettierrc already exists")
+ } catch {
+ const config = {
+ tabWidth: 4,
+ semi: false,
+ trailingComma: "es5",
+ }
+ await fs.writeFile(prettierPath, JSON.stringify(config, null, 2))
+ debug("Created .prettierrc")
+ }
+ }
+
+ private async ensureFramerDeclarations(): Promise {
+ const declarationsPath = path.join(this.projectDir, "framer-modules.d.ts")
+ try {
+ await fs.access(declarationsPath)
+ debug("framer-modules.d.ts already exists")
+ } catch {
+ const declarations = `// Type declarations for Framer URL imports
+declare module "https://framer.com/m/*"
+
+declare module "https://framerusercontent.com/*"
+
+declare module "*.json"
+`
+ await fs.writeFile(declarationsPath, declarations)
+ debug("Created framer-modules.d.ts")
+ }
+ }
+
+ private async ensurePackageJson(): Promise {
+ const packagePath = path.join(this.projectDir, "package.json")
+ try {
+ await fs.access(packagePath)
+ debug("package.json already exists")
+ } catch {
+ const pkg = {
+ name: path.basename(this.projectDir),
+ version: "1.0.0",
+ private: true,
+ description: "Framer files synced with framer-code-link",
+ }
+ await fs.writeFile(packagePath, JSON.stringify(pkg, null, 2))
+ debug("Created package.json")
+ }
+ }
+
+ private async ensureReact18Types(): Promise {
+ const reactTypesDir = path.join(
+ this.projectDir,
+ "node_modules/@types/react"
+ )
+
+ const reactFiles = [
+ "package.json",
+ "index.d.ts",
+ "global.d.ts",
+ "jsx-runtime.d.ts",
+ "jsx-dev-runtime.d.ts",
+ ]
+
+ if (
+ await this.hasTypePackage(reactTypesDir, REACT_TYPES_VERSION, reactFiles)
+ ) {
+ debug("📦 React types (from cache)")
+ } else {
+ debug("Downloading React 18 types...")
+ await this.downloadTypePackage(
+ "@types/react",
+ REACT_TYPES_VERSION,
+ reactTypesDir,
+ reactFiles
+ )
+ }
+
+ const reactDomDir = path.join(
+ this.projectDir,
+ "node_modules/@types/react-dom"
+ )
+
+ const reactDomFiles = ["package.json", "index.d.ts", "client.d.ts"]
+
+ if (
+ await this.hasTypePackage(
+ reactDomDir,
+ REACT_DOM_TYPES_VERSION,
+ reactDomFiles
+ )
+ ) {
+ debug("📦 React DOM types (from cache)")
+ } else {
+ await this.downloadTypePackage(
+ "@types/react-dom",
+ REACT_DOM_TYPES_VERSION,
+ reactDomDir,
+ reactDomFiles
+ )
+ }
+ }
+
+ private async hasTypePackage(
+ destinationDir: string,
+ version: string,
+ files: string[]
+ ): Promise {
+ try {
+ const pkgJsonPath = path.join(destinationDir, "package.json")
+ const pkgJson = await fs.readFile(pkgJsonPath, "utf-8")
+ const parsed = JSON.parse(pkgJson) as { version?: string }
+
+ if (parsed.version !== version) {
+ return false
+ }
+
+ for (const file of files) {
+ if (file === "package.json") continue
+ await fs.access(path.join(destinationDir, file))
+ }
+
+ return true
+ } catch {
+ return false
+ }
+ }
+
+ private async downloadTypePackage(
+ pkgName: string,
+ version: string,
+ destinationDir: string,
+ files: string[]
+ ): Promise {
+ const baseUrl = `https://unpkg.com/${pkgName}@${version}`
+ await fs.mkdir(destinationDir, { recursive: true })
+
+ await Promise.all(
+ files.map(async (file) => {
+ const destination = path.join(destinationDir, file)
+
+ // Check if file already exists
+ try {
+ await fs.access(destination)
+ return // Skip if exists
+ } catch {
+ // File doesn't exist, download it
+ }
+
+ try {
+ const response = await fetch(`${baseUrl}/${file}`)
+ if (!response.ok) return
+ const content = await response.text()
+ await fs.writeFile(destination, content)
+ } catch {
+ // ignore per-file failures
+ }
+ })
+ )
+ }
+}
+
+// -----------------------------------------------------------------------------
+// Helpers
+// -----------------------------------------------------------------------------
+
+/**
+ * Transform package.json exports to include .d.ts type paths
+ */
+function fixExportTypes(
+ value: string | NpmExportValue
+): string | NpmExportValue {
+ if (typeof value === "string") {
+ return {
+ types: value.replace(/\.js$/, ".d.ts").replace(/\.cjs$/, ".d.cts"),
+ }
+ }
+
+ if ((value.import ?? value.require) && !value.types) {
+ const base = value.import ?? value.require
+ value.types = base?.replace(/\.js$/, ".d.ts").replace(/\.cjs$/, ".d.cts")
+ }
+
+ return value
+}
+
+interface FetchError extends Error {
+ cause?: { code?: string }
+}
+
+async function fetchWithRetry(
+ url: string | URL | Request,
+ init?: RequestInit,
+ retries = MAX_FETCH_RETRIES
+): Promise {
+ let urlString: string
+ if (typeof url === "string") {
+ urlString = url
+ } else if (url instanceof URL) {
+ urlString = url.href
+ } else {
+ urlString = url.url
+ }
+
+ for (let attempt = 1; attempt <= retries; attempt++) {
+ const controller = new AbortController()
+ const timeout = setTimeout(() => {
+ controller.abort()
+ }, FETCH_TIMEOUT_MS)
+
+ try {
+ const response = await fetch(url, {
+ ...init,
+ signal: controller.signal,
+ })
+ clearTimeout(timeout)
+ return response
+ } catch (err: unknown) {
+ clearTimeout(timeout)
+ const error = err as FetchError
+
+ const isRetryable =
+ error.cause?.code === "ECONNRESET" ||
+ error.cause?.code === "ETIMEDOUT" ||
+ error.cause?.code === "UND_ERR_CONNECT_TIMEOUT" ||
+ error.message.includes("timeout")
+
+ if (attempt < retries && isRetryable) {
+ const delay = attempt * 1_000
+ warn(
+ `Fetch failed (${error.cause?.code ?? error.message}) for ${urlString}, retrying in ${delay}ms...`
+ )
+ await new Promise((resolve) => setTimeout(resolve, delay))
+ continue
+ }
+
+ warn(`Fetch failed for ${urlString}`, error)
+ throw error
+ }
+ }
+
+ throw new Error(`Max retries exceeded for ${urlString}`)
+}
diff --git a/packages/code-link-cli/src/helpers/sync-validator.ts b/packages/code-link-cli/src/helpers/sync-validator.ts
new file mode 100644
index 000000000..fe74f5211
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/sync-validator.ts
@@ -0,0 +1,85 @@
+/**
+ * Sync Validation Helper
+ *
+ * Pure functions for validating incoming changes during live sync.
+ * Determines if a change should be applied, queued, or rejected.
+ */
+
+import { hashFileContent } from "../utils/state-persistence.js"
+import type { FileSyncMetadata } from "../utils/file-metadata-cache.js"
+
+/**
+ * Result of validating an incoming file change
+ */
+export type ChangeValidation =
+ | { action: "apply"; reason: "new-file" | "safe-update" }
+ | { action: "queue"; reason: "snapshot-in-progress" }
+ | { action: "reject"; reason: "stale-base" | "unknown-file" }
+
+/**
+ * Validates whether an incoming REMOTE file change should be applied
+ *
+ * During watching mode, we trust remote changes and apply them immediately.
+ * During snapshot_processing, we queue them for later (to avoid race conditions).
+ *
+ * Note: This is for INCOMING changes from remote. Local changes (from watcher)
+ * are handled separately and always sent during watching mode.
+ */
+export function validateIncomingChange(
+ fileMeta: FileSyncMetadata | undefined,
+ currentMode: string
+): ChangeValidation {
+ // Queue changes that arrive during snapshot processing
+ if (currentMode === "snapshot_processing" || currentMode === "handshaking") {
+ return { action: "queue", reason: "snapshot-in-progress" }
+ }
+
+ // During watching, apply changes immediately
+ if (currentMode === "watching") {
+ if (!fileMeta) {
+ // New file from remote
+ return { action: "apply", reason: "new-file" }
+ }
+
+ // Existing file - trust the remote (we're in steady state)
+ return { action: "apply", reason: "safe-update" }
+ }
+
+ // During conflict resolution, queue for now (could be enhanced later)
+ if (currentMode === "conflict_resolution") {
+ return { action: "queue", reason: "snapshot-in-progress" }
+ }
+
+ // Shouldn't receive changes while disconnected
+ return { action: "reject", reason: "unknown-file" }
+}
+
+/**
+ * Validates whether an outgoing LOCAL change should be sent to remote
+ *
+ * Checks if the local file has actually changed since last sync
+ * to avoid sending duplicate updates.
+ *
+ * Note: This will be used when WATCHER_EVENT is migrated to the state machine.
+ * Currently, the legacy watcher path always sends changes (with echo prevention).
+ */
+export function validateOutgoingChange(
+ fileName: string,
+ content: string,
+ fileMeta: FileSyncMetadata | undefined
+): { shouldSend: boolean; reason: string } {
+ const currentHash = hashFileContent(content)
+
+ if (!fileMeta) {
+ // New local file
+ return { shouldSend: true, reason: "new-file" }
+ }
+
+ if (fileMeta.localHash === currentHash) {
+ // No change since we last saw this file
+ return { shouldSend: false, reason: "no-change" }
+ }
+
+ // File has changed
+ return { shouldSend: true, reason: "changed" }
+}
diff --git a/packages/code-link-cli/src/helpers/user-actions.ts b/packages/code-link-cli/src/helpers/user-actions.ts
new file mode 100644
index 000000000..1f52806f8
--- /dev/null
+++ b/packages/code-link-cli/src/helpers/user-actions.ts
@@ -0,0 +1,160 @@
+/**
+ * User Action Coordinator
+ *
+ * Provides a clean awaitable API for user confirmations via the Plugin.
+ * Maybe unneeded abstraction, but lets keep until we see if we need more user actions.
+ */
+
+import type { WebSocket } from "ws"
+import type { Conflict } from "../types.js"
+import { sendMessage } from "./connection.js"
+import { debug, warn } from "../utils/logging.js"
+
+class PluginDisconnectedError extends Error {
+ constructor() {
+ super("Plugin disconnected")
+ this.name = "PluginDisconnectedError"
+ }
+}
+
+interface PendingAction {
+ resolve: (value: unknown) => void
+ reject: (error: Error) => void
+}
+
+export class UserActionCoordinator {
+ private pendingActions = new Map()
+
+ /**
+ * Register a pending action and return a typed promise
+ */
+ private awaitAction(actionId: string, description: string): Promise {
+ return new Promise((resolve, reject) => {
+ this.pendingActions.set(actionId, {
+ resolve: resolve as (value: unknown) => void,
+ reject,
+ })
+ debug(`Awaiting ${description}: ${actionId}`)
+ })
+ }
+
+ /**
+ * Sends the delete request to the plugin and awaits the user's decision
+ */
+ async requestDeleteDecision(
+ socket: WebSocket | null,
+ {
+ fileName,
+ requireConfirmation,
+ }: { fileName: string; requireConfirmation: boolean }
+ ): Promise {
+ if (!socket) {
+ throw new Error("Cannot request delete decision: plugin not connected")
+ }
+
+ if (requireConfirmation) {
+ const confirmationPromise = this.awaitAction(
+ `delete:${fileName}`,
+ "delete confirmation"
+ )
+
+ await sendMessage(socket, {
+ type: "file-delete",
+ fileNames: [fileName],
+ requireConfirmation: true,
+ })
+
+ try {
+ return await confirmationPromise
+ } catch (err) {
+ if (err instanceof PluginDisconnectedError) {
+ debug(
+ `Plugin disconnected while waiting for delete confirmation: ${fileName}`
+ )
+ return false
+ }
+ throw err
+ }
+ }
+
+ await sendMessage(socket, {
+ type: "file-delete",
+ fileNames: [fileName],
+ requireConfirmation: false,
+ })
+
+ return true
+ }
+
+ /**
+ * Sends conflicts to the plugin and awaits user resolutions
+ */
+ async requestConflictDecisions(
+ socket: WebSocket | null,
+ conflicts: Conflict[]
+ ): Promise