diff --git a/packages/api-server/src/__tests__/watchPaths.test.ts b/packages/api-server/src/__tests__/watchPaths.test.ts new file mode 100644 index 0000000000..bad052c398 --- /dev/null +++ b/packages/api-server/src/__tests__/watchPaths.test.ts @@ -0,0 +1,448 @@ +import fs from 'node:fs' +import os from 'node:os' +import path from 'node:path' + +import chokidar from 'chokidar' +import { vi, afterAll, beforeAll, describe, expect, it } from 'vitest' + +import { importStatementPath } from '@cedarjs/project-config' + +import { pathsToWatch, getIgnoreFunction } from '../watchPaths.js' + +describe('watchPaths', () => { + let tmpDir: string + const originalRwjsCwd = process.env.RWJS_CWD + + beforeAll(async () => { + // Create an isolated temp project directory + tmpDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'cedar-workspace-packages-test-'), + ) + + // Ensure we are recognized as a Cedar project + await fs.promises.writeFile(path.join(tmpDir, 'cedar.toml'), '# cedar test') + + // Root package.json with workspace globs + const rootPackageJson = { + name: 'workspace-test', + private: true, + workspaces: ['api', 'packages/*'], + } + await fs.promises.writeFile( + path.join(tmpDir, 'package.json'), + JSON.stringify(rootPackageJson, null, 2), + ) + + // Create a workspace package with a `src` and a `dist` directory. Only the + // `dist` directory should be watched + const fooSrcDir = path.join(tmpDir, 'packages', 'foo', 'src') + const fooDistDir = path.join(tmpDir, 'packages', 'foo', 'dist') + await fs.promises.mkdir(fooSrcDir, { recursive: true }) + await fs.promises.mkdir(fooDistDir, { recursive: true }) + const fooIndexSrcPath = path.join(fooSrcDir, 'index.ts') + await fs.promises.writeFile(fooIndexSrcPath, 'export const foo = 1') + const fooIndexDistPath = path.join(fooDistDir, 'index.js') + await fs.promises.writeFile(fooIndexDistPath, 'export const foo = 1') + + // Create the `package.json` for the workspace package so + // workspacePackages() will detect it as a workspace dependency from the + // `api` package. + await fs.promises.writeFile( + path.join(tmpDir, 'packages', 'foo', 'package.json'), + JSON.stringify({ name: 'foo', version: '1.0.0' }, null, 2), + ) + + // Create an `api` package that depends on the workspace package via + // `workspace:*` + const apiDir = path.join(tmpDir, 'api') + await fs.promises.mkdir(apiDir, { recursive: true }) + const apiPackageJson = { + name: 'api', + version: '1.0.0', + dependencies: { + foo: 'workspace:*', + }, + } + await fs.promises.writeFile( + path.join(apiDir, 'package.json'), + JSON.stringify(apiPackageJson, null, 2), + ) + + // Create a minimal Prisma config so `getIgnoreFunction()` can load it + // and determine the database directory without throwing. + await fs.promises.writeFile( + path.join(apiDir, 'prisma.config.cjs'), + "module.exports = { schema: 'schema.prisma' }", + ) + await fs.promises.writeFile(path.join(apiDir, 'schema.prisma'), '') + + // Create an `api/src` directory so chokidar will watch an existing path. + const apiSrcDir = path.join(apiDir, 'src') + await fs.promises.mkdir(apiSrcDir, { recursive: true }) + await fs.promises.writeFile( + path.join(apiSrcDir, 'index.ts'), + "export const api = 'api'", + ) + + // Tell project-config to treat our temp dir as the project root + process.env.RWJS_CWD = tmpDir + }) + + afterAll(async () => { + // Restore environment and cleanup + if (originalRwjsCwd === undefined) { + delete process.env.RWJS_CWD + } else { + process.env.RWJS_CWD = originalRwjsCwd + } + + try { + await fs.promises.rm(tmpDir, { recursive: true, force: true }) + } catch { + // ignore cleanup errors + } + }) + + it('returns patterns that works with chokidar', async () => { + const patterns = await pathsToWatch() + + // If no patterns were returned, collect and assert helpful debug info so + // failures on CI (particularly Windows runners) give actionable output. + if (patterns.length === 0) { + const packagesDir = path.join(tmpDir, 'packages') + const packagesDirExists = fs.existsSync(packagesDir) + expect(packagesDirExists).toBe(true) + + const packageJsonPath = path.join(packagesDir, 'foo', 'package.json') + const packageJsonExists = fs.existsSync(packageJsonPath) + expect(packageJsonExists).toBe(true) + + const rootPkg = JSON.parse( + await fs.promises.readFile(path.join(tmpDir, 'package.json'), 'utf8'), + ) + expect(Array.isArray(rootPkg.workspaces)).toBe(true) + expect( + rootPkg.workspaces.some((w: string) => w.startsWith('packages/')), + ).toBe(true) + + const apiPkg = JSON.parse( + await fs.promises.readFile( + path.join(tmpDir, 'api', 'package.json'), + 'utf8', + ), + ) + expect(apiPkg.dependencies?.foo).toBe('workspace:*') + + const globPattern = path.join(packagesDir, '*').replaceAll('\\', '/') + + let packageDirs: string[] = [] + try { + // Mirror the logic in `workspacePackages()` which uses fs.promises.glob + // and Array.fromAsync to enumerate matching package directories. + + packageDirs = await Array.fromAsync(fs.promises.glob(globPattern)) + } catch (e: any) { + console.log('glob error', e?.message ?? e) + } + + console.log( + JSON.stringify( + { patterns, packagesDir, globPattern, packageDirs, rootPkg, apiPkg }, + null, + 2, + ), + ) + + expect(packageDirs.length).toBeGreaterThan(0) + } + + // Ensure we've normalized separators (no backslashes) so the test failure + // is explicit if normalization doesn't happen. + for (const p of patterns) { + expect(p.includes('\\')).toBe(false) + } + + // Diagnostic logging: show raw and normalized patterns so CI logs are + // actionable if globbing doesn't behave as expected on a runner. + console.log('workspace patterns', JSON.stringify(patterns, null, 2)) + + // Diagnostic: expand the packages/* glob (like workspacePackages does) and + // log the matches. This helps surface platform-specific globbing issues, + // especially on Windows runners. + try { + const packagesDirForDebug = path.join(tmpDir, 'packages') + const globPatternForDebug = path + .join(packagesDirForDebug, '*') + .replaceAll('\\', '/') + + const packageDirsForDebug = await Array.fromAsync( + fs.promises.glob(globPatternForDebug), + ) + console.log('packages glob pattern:', globPatternForDebug) + console.log( + 'packages glob matches:', + JSON.stringify(packageDirsForDebug, null, 2), + ) + } catch (e: any) { + console.log('packages glob error:', e?.message ?? e) + } + + const watcher = chokidar.watch(patterns, { + persistent: true, + ignoreInitial: true, + }) + + // Surface watcher errors immediately to test logs + watcher.on('error', (error) => { + console.error('chokidar watcher error:', error) + }) + + try { + // Wait until the watcher is ready + await new Promise((resolve) => { + watcher.on('ready', () => { + try { + console.debug( + 'chokidar ready; watched directories:', + JSON.stringify(watcher.getWatched(), null, 2), + ) + } catch (e) { + console.debug('chokidar ready; could not serialize watched dirs', e) + } + + resolve() + }) + }) + + // Prepare a promise that resolves when chokidar reports the change + const eventPromise = new Promise<{ eventName: string; filePath: string }>( + (resolve, reject) => { + const timeout = setTimeout(() => { + reject(new Error('Timed out waiting for chokidar event')) + }, 10_000) + + const onAll = (eventName: string, filePath: string) => { + try { + console.debug('chokidar event:', eventName, filePath) + } catch (e) { + console.debug('chokidar event logging failed', e) + } + + // Normalize the reported path so this works across OSes + const normalized = String(filePath).replace(/\\/g, '/') + + if (normalized.endsWith('/packages/foo/dist/index.js')) { + clearTimeout(timeout) + watcher.off('all', onAll) + resolve({ eventName, filePath }) + } + } + + watcher.on('all', onAll) + }, + ) + + // Trigger a change in the watched file + const targetFile = path.join( + tmpDir, + 'packages', + 'foo', + 'dist', + 'index.js', + ) + try { + const beforeStat = await fs.promises.stat(targetFile) + console.debug('targetFile mtime before append:', beforeStat.mtimeMs) + } catch (e) { + console.debug('stat before append failed:', e) + } + + await fs.promises.appendFile(targetFile, '\n// update\n') + + try { + const afterStat = await fs.promises.stat(targetFile) + console.debug('targetFile mtime after append:', afterStat.mtimeMs) + } catch (e) { + console.debug('stat after append failed:', e) + } + + const { eventName } = await eventPromise + + // chokidar could report either `add` (in some races) or `change` for the edit + expect(['add', 'change']).toContain(eventName) + } finally { + // Always close the watcher + await watcher.close() + } + }, 10_000) + + it('chokidar triggers on new files added', async () => { + const patterns = await pathsToWatch() + + const watcher = chokidar.watch(patterns, { + persistent: true, + ignoreInitial: true, + }) + + // Surface watcher errors immediately to test logs + watcher.on('error', (error) => { + console.error('chokidar watcher error:', error) + // Always fail the test if an error occurs + expect(true).toBe(false) + }) + + let onAll = (_eventName: string, _filePath: string) => {} + + try { + // Wait until the watcher is ready + await new Promise((resolve) => watcher.on('ready', resolve)) + + // Prepare a promise that resolves when chokidar reports the change + const eventPromise = new Promise<{ eventName: string; filePath: string }>( + (resolve, reject) => { + const timeout = setTimeout(() => { + reject(new Error('Timed out waiting for chokidar event')) + }, 10_000) + + onAll = vi.fn((eventName: string, filePath: string) => { + clearTimeout(timeout) + resolve({ eventName, filePath }) + }) + + watcher.on('all', onAll) + }, + ) + + const distPath = path.join(tmpDir, 'packages', 'foo', 'dist') + + // Trigger a change in dist/ + await fs.promises.writeFile( + path.join(distPath, 'new-file.js'), + '\n// update\n', + ) + + const result = await eventPromise + + expect(result.eventName).toEqual('add') + expect(onAll).toHaveBeenCalledOnce() + expect(importStatementPath(result.filePath)).toMatch( + /packages\/foo\/dist\/new-file\.js$/, + ) + } finally { + // Always close the watcher + watcher.off('all', onAll) + await watcher.close() + } + }, 10_000) + + it('ignores edits inside packages/foo/node_modules', async () => { + const patterns = await pathsToWatch() + const ignoreFn = await getIgnoreFunction() + + const watcher = chokidar.watch(patterns, { + persistent: true, + ignoreInitial: true, + ignored: ignoreFn, + }) + + watcher.on('error', (error) => { + console.error('chokidar watcher error:', error) + // Always fail the test if an error occurs + expect(true).toBe(false) + }) + + try { + // Wait until the watcher is ready + await new Promise((resolve) => watcher.on('ready', resolve)) + + const nmFile = path.join( + tmpDir, + 'packages', + 'foo', + 'node_modules', + 'pkg', + 'index.ts', + ) + await fs.promises.mkdir(path.dirname(nmFile), { recursive: true }) + await fs.promises.writeFile(nmFile, 'export const x = 1') + + const eventPromise = new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + watcher.off('all', onAll) + resolve() + }, 500) + + const onAll = (eventName: string, filePath: string) => { + const normalized = importStatementPath(filePath) + + if (normalized.includes('/packages/foo/node_modules/')) { + clearTimeout(timeout) + watcher.off('all', onAll) + reject( + new Error( + 'node_modules edit triggered watcher event: ' + normalized, + ), + ) + } + } + + watcher.on('all', onAll) + }) + + await fs.promises.appendFile(nmFile, '\n// update\n') + await eventPromise + } finally { + await watcher.close() + } + }, 10_000) + + it('ignores edits inside packages/foo/src', async () => { + const patterns = await pathsToWatch() + const ignoreFn = await getIgnoreFunction() + + const watcher = chokidar.watch(patterns, { + persistent: true, + ignoreInitial: true, + ignored: ignoreFn, + }) + + watcher.on('error', (error) => { + console.error('chokidar watcher error:', error) + // Always fail the test if an error occurs + expect(true).toBe(false) + }) + + try { + // Wait until the watcher is ready + await new Promise((resolve) => watcher.on('ready', resolve)) + + const srcFile = path.join(tmpDir, 'packages', 'foo', 'src', 'index.ts') + await fs.promises.mkdir(path.dirname(srcFile), { recursive: true }) + await fs.promises.writeFile(srcFile, 'export const y = 1') + + const eventPromise = new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + watcher.off('all', onAll) + resolve() + }, 500) + + const onAll = (_eventName: string, filePath: string) => { + const normalized = importStatementPath(filePath) + + if (normalized.includes('/packages/foo/src/')) { + clearTimeout(timeout) + watcher.off('all', onAll) + reject(new Error('src edit triggered watcher event: ' + normalized)) + } + } + + watcher.on('all', onAll) + }) + + await fs.promises.appendFile(srcFile, '\n// update\n') + await eventPromise + } finally { + await watcher.close() + } + }, 10_000) +}) diff --git a/packages/api-server/src/watch.ts b/packages/api-server/src/watch.ts index 444190bb26..b75a032b53 100644 --- a/packages/api-server/src/watch.ts +++ b/packages/api-server/src/watch.ts @@ -1,4 +1,4 @@ -import path from 'path' +import path from 'node:path' // See https://github.com/webdiscus/ansis#troubleshooting // eslint-disable-next-line @typescript-eslint/ban-ts-comment @@ -13,11 +13,12 @@ import { rebuildApi, } from '@cedarjs/internal/dist/build/api' import { loadAndValidateSdls } from '@cedarjs/internal/dist/validateSchema' -import { ensurePosixPath, getPaths, getDbDir } from '@cedarjs/project-config' +import { getPaths } from '@cedarjs/project-config' import type { BuildAndRestartOptions } from './buildManager.js' import { BuildManager } from './buildManager.js' import { serverManager } from './serverManager.js' +import { getIgnoreFunction, pathsToWatch } from './watchPaths.js' const cedarPaths = getPaths() @@ -73,45 +74,22 @@ async function validateSdls() { * Initialize the file watcher for the API server * Watches for changes in the API source directory and rebuilds/restarts as * needed + * + * Also watches package sources so that changes to workspace packages used by + * the API trigger a rebuild/restart (HMR for API-side workspace packages). */ export async function startWatch() { - const dbDir = await getDbDir(cedarPaths.api.prismaConfig) - - // NOTE: the file with a detected change comes through as a unix path, even on - // windows. So we need to convert the cedarPaths - const ignoredApiPaths = [ - // use this, because using cedarPaths.api.dist seems to not ignore on first - // build - 'api/dist', - cedarPaths.api.types, - dbDir, - ].map((path) => ensurePosixPath(path)) - const ignoredExtensions = [ - '.DS_Store', - '.db', - '.sqlite', - '-journal', - '.test.js', - '.test.ts', - '.scenarios.ts', - '.scenarios.js', - '.d.ts', - '.log', - ] - - const watcher = chokidar.watch([cedarPaths.api.src], { + const patterns = await pathsToWatch() + + const watcher = chokidar.watch(patterns, { persistent: true, ignoreInitial: true, - ignored: (file: string) => { - const shouldIgnore = - file.includes('node_modules') || - ignoredApiPaths.some((ignoredPath) => file.includes(ignoredPath)) || - ignoredExtensions.some((ext) => file.endsWith(ext)) - - return shouldIgnore - }, + ignored: await getIgnoreFunction(), }) + // This can fire multiple times + // https://github.com/paulmillr/chokidar/issues/286 + // https://github.com/paulmillr/chokidar/issues/338 watcher.on('ready', async () => { // First time await buildManager.run({ clean: true, rebuild: false }) @@ -119,18 +97,19 @@ export async function startWatch() { }) watcher.on('all', async (eventName, filePath) => { - // On sufficiently large projects (500+ files, or >= 2000 ms build times) on older machines, - // esbuild writing to the api directory makes chokidar emit an `addDir` event. - // This starts an infinite loop where the api starts building itself as soon as it's finished. - // This could probably be fixed with some sort of build caching + // On sufficiently large projects (500+ files, or >= 2000 ms build times) on + // older machines, esbuild writing to the api directory makes chokidar emit + // an `addDir` event. This starts an infinite loop where the api starts + // building itself as soon as it's finished. This could probably be fixed + // with some sort of build caching if (eventName === 'addDir' && filePath === cedarPaths.api.base) { return } if (eventName) { if (filePath.includes('.sdl')) { - // We validate here, so that developers will see the error - // As they're running the dev server + // We validate here, so that developers will see the error as they're + // running the dev server const isValid = await validateSdls() // Exit early if not valid @@ -140,9 +119,9 @@ export async function startWatch() { } } - console.log( - ansis.dim(`[${eventName}] ${filePath.replace(cedarPaths.api.base, '')}`), - ) + // Normalize the displayed path so it's relative to the project base. + const displayPath = path.relative(cedarPaths.base, filePath) + console.log(ansis.dim(`[${eventName}] ${displayPath}`)) buildManager.cancelScheduledBuild() @@ -155,7 +134,7 @@ export async function startWatch() { }) } -// For ESM we'll wrap this in a check to only execute this function if -// the file is run as a script using +// For ESM we'll wrap this in a check to only execute this function if the file +// is run as a script using // `import.meta.url === `file://${process.argv[1]}`` startWatch() diff --git a/packages/api-server/src/watchPaths.ts b/packages/api-server/src/watchPaths.ts new file mode 100644 index 0000000000..05c89a2ee1 --- /dev/null +++ b/packages/api-server/src/watchPaths.ts @@ -0,0 +1,151 @@ +import fs from 'node:fs' +import path from 'node:path' + +import { + getDbDir, + getPaths, + importStatementPath, +} from '@cedarjs/project-config' + +async function workspacePackagesPaths() { + const cedarPaths = getPaths() + const packagesDir = path.join(cedarPaths.base, 'packages') + + const packages: string[] = [] + + try { + const rootPackageJsonPath = path.join(cedarPaths.base, 'package.json') + + const rootPackageJson = JSON.parse( + fs.readFileSync(rootPackageJsonPath, 'utf8'), + ) + const hasPackageJsonWorkspaces = + Array.isArray(rootPackageJson.workspaces) && + rootPackageJson.workspaces.some((w: string) => w.startsWith('packages/')) + + // Optimization to return early if no workspace packages are defined + if (!hasPackageJsonWorkspaces || !fs.existsSync(packagesDir)) { + return [] + } + + const globPattern = path.join(packagesDir, '*').replaceAll('\\', '/') + const packageDirs = await Array.fromAsync(fs.promises.glob(globPattern)) + + const apiPackageJsonPath = path.join(cedarPaths.api.base, 'package.json') + + // Look for 'workspace:*' dependencies in the API package.json + // No need to watch *all* workspace packages, only need to watch those that + // the api workspace actually depends on + const apiPackageJson = JSON.parse( + fs.readFileSync(apiPackageJsonPath, 'utf8'), + ) + const deps = { + ...(apiPackageJson.dependencies ?? {}), + ...(apiPackageJson.devDependencies ?? {}), + ...(apiPackageJson.peerDependencies ?? {}), + } + + const workspaceDepNames = new Set() + + for (const [name, version] of Object.entries(deps)) { + if (String(version).startsWith('workspace:')) { + workspaceDepNames.add(name) + } + } + + for (const packageDir of packageDirs) { + const packageJsonPath = path.join(packageDir, 'package.json') + + if (!fs.existsSync(packageJsonPath)) { + continue + } + + const pkgJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8')) + + if (workspaceDepNames.has(pkgJson.name)) { + packages.push(path.join(packageDir, 'dist')) + } + } + } catch { + // If anything goes wrong while determining workspace packages, ignore them + // all + } + + return packages +} + +function workspacePackagesIgnorePaths() { + const cedarPaths = getPaths() + + const packagesDir = path.join(cedarPaths.base, 'packages') + const packageIgnoredPaths: string[] = [] + + if (fs.existsSync(packagesDir)) { + packageIgnoredPaths.push(path.join(packagesDir, '*/src')) + } + + return packageIgnoredPaths +} + +async function apiIgnorePaths() { + const cedarPaths = getPaths() + + const dbDir = await getDbDir(cedarPaths.api.prismaConfig) + + const ignoredApiPaths = [ + // TODO: Is this still true? + // use this, because using cedarPaths.api.dist seems to not ignore on first + // build + 'api/dist', + cedarPaths.api.types, + dbDir, + ] + + return ignoredApiPaths +} + +async function ignorePaths() { + // The file with a detected change comes through as a unix path, even on + // windows. So we need to convert all paths to unix-style paths to ensure + // matches. Plus, chokidar needs unix-style `/` path separators for globs even + // on Windows, which is exactly what `importStatementPath()` converts paths to + const apiIgnore = await apiIgnorePaths() + const packagesIgnore = workspacePackagesIgnorePaths() + return [...apiIgnore, ...packagesIgnore].map((p) => importStatementPath(p)) +} + +export async function getIgnoreFunction() { + const ignoredWatchPaths = await ignorePaths() + + const ignoredExtensions = [ + '.DS_Store', + '.db', + '.sqlite', + '-journal', + '.test.js', + '.test.ts', + '.scenarios.ts', + '.scenarios.js', + '.d.ts', + '.log', + ] + + return (file: string) => { + const shouldIgnore = + file.includes('node_modules') || + ignoredWatchPaths.some((ignoredPath) => file.includes(ignoredPath)) || + ignoredExtensions.some((ext) => file.endsWith(ext)) + + return shouldIgnore + } +} + +export async function pathsToWatch() { + const cedarPaths = getPaths() + const watchPaths = [cedarPaths.api.src, ...(await workspacePackagesPaths())] + + // For glob paths, which `workspacePackages()` above might return, chokidar + // needs unix-style `/` path separators also on Windows, which is exactly what + // `importStatementPath()` provides. + return watchPaths.map((p) => importStatementPath(p)) +}