diff --git a/.gitignore b/.gitignore index 00023dd5..2beec0f2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ /desktop/build/bin /desktop/frontend/dist +/protoc-gen-kaja/node_modules +/protoc-gen-kaja/results +/protoc-gen-kaja/status.txt /server/build /ui/node_modules diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..ca8ef956 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,249 @@ +## Guidelines + +- See [Development](README.md#development) for instructions how to run and test. +- Only add code comments for really tricky parts; otherwise keep it clean. +- If API is called "getConfiguration", use "configuration" not "config" in code. +- Don't run `go build` directly; use `scripts/server`, `scripts/desktop`, or `scripts/docker`. Kill `scripts/server` when done. Make sure the server port is not in use. +- Don't run `scripts/build-ui` separately; when `scripts/server` is running, the UI is recompiled automatically on page load. +- The is using https://primer.style/product/getting-started/react/. Use out-of-the-box Primer components as much as possible. Avoid custom styling unless absolutely necessary. +- Don't update generated files directly; they will be overwritten. + - `ui/src/server/` is generated by protoc-gen-kaja. + - `ui/src/wailsjs/**` are generated by Wails. +- When I prompt you to make changes that are radically different from what's documented here, please update this file accordingly. +- Don't commit changes to `kaja.json` +- Use past tense in pull request titles and commit messages (e.g., "Fix bug" → "Fixed bug"). +- Use capitalized "Kaja" for user-facing labels (titles, headings, UI text). Keep lowercase "kaja" for code, terminal commands, and file paths. +- Use pure Primer when possible, avoid custom wrappers and abstractions. +- Ask me before creating custom UI components; prefer direct use of Primer components. +- Keep pull-request descriptions super short - one or two sentences summarizing the change. + +## Directory Structure + +``` +/ +├── desktop/ # Desktop application (Wails framework) +├── protoc-gen-kaja/ # Protoc plugin for TypeScript codegen (Go) +├── server/ # Backend server (Go) - serves both web and desktop +├── ui/ # Frontend UI (React/TypeScript) +├── workspace/ # Demo workspace with example proto definitions +├── scripts/ # Build and development scripts +└── docs/ # Documentation +``` + +### Build Directories + +There are multiple `build/` directories, each serving a different purpose: + +| Directory | Purpose | Gitignored | +| ------------------------- | ---------------------------------------------------------------------------------------- | ---------- | +| `/server/build/` | Protoc plugins (protoc-gen-\*) and bundled UI assets (main.js, main.css, monaco workers) | Yes | +| `/desktop/build/` | Platform files (app icons, Info.plist) - tracked in git | No | +| `/desktop/build/bin/` | Desktop executable binaries | Yes | +| `/desktop/frontend/dist/` | Frontend distribution for desktop (copied from server/build) | Yes | +| `$TMPDIR/kaja/` | Compilation temp folders (auto-cleaned after 60 min) | N/A | + +### Development vs Production Builds + +The server uses Go build tags to switch between development and production modes: + +**Development** (`-tags development`): + +- `server/assets_development.go` is used +- Reads UI files from filesystem at runtime +- Calls `ui.BuildForDevelopment()` to rebuild assets on startup +- Allows hot-reload during development + +**Production** (default, no tags): + +- `server/assets_production.go` is used +- All assets are embedded in the binary via `//go:embed` +- No filesystem access needed for serving UI +- Single self-contained binary + +### Server vs Desktop + +Both share the same backend code but differ in how they're packaged: + +**Server (Web)**: + +- Single Go binary with embedded React UI +- Serves HTTP API on port 41520 +- Run with: `scripts/server` +- Assets from `/server/build/` and `/server/static/` + +**Desktop (Wails)**: + +- Uses Wails framework (Go + webview) +- Embeds frontend via `//go:embed all:frontend/dist` +- Frontend files copied from server build to `/desktop/frontend/dist/` +- Native window and file dialogs +- Run with: `scripts/desktop` + +### Source Directories + +**`/ui/`** - React/TypeScript frontend: + +- `src/*.tsx` - React components +- `src/server/` - Generated proto client code (from `/server/proto/api.proto`) +- `src/wailsjs/` - Generated Wails bindings (auto-generated) + +**`/server/`** - Go backend: + +- `cmd/server/` - Main server application +- `cmd/build-ui/` - Tool to bundle React UI with esbuild +- `pkg/api/` - Generated proto code (Go) +- `proto/api.proto` - API service definition (source of truth) +- `static/` - Static files (index.html, favicon) + +**`/protoc-gen-kaja/`** - Protoc plugin for TypeScript code generation: + +- A Go-based protoc plugin that generates TypeScript client code from `.proto` files +- Currently a drop-in replacement for `@protobuf-ts/plugin` (`protoc-gen-ts`), producing identical output. Will eventually diverge and do things differently. +- Ships as a single native Go binary — no Node.js dependency +- Has its own `go.mod` (separate Go module from `/server/`) +- Built by `scripts/server` into `server/build/protoc-gen-kaja`; shipped alongside `protoc` in production and desktop builds +- Used for all TypeScript proto codegen: both Kaja's own API (`server/proto/api.proto` → `ui/src/server/`) and user workspace protos (compiled at runtime) +- Tests in `protoc-gen-kaja/tests/` compare output against `protoc-gen-ts` to ensure identical codegen; run with `protoc-gen-kaja/scripts/test` +- `protoc-gen-kaja/tests/000_big` is a comprehensive multi-file integration test (8 proto files across 6 directories) using an e-commerce theme. It covers: all 15 scalar types, all WKTs, all map key types (bool, int64, string, int32), map with message values, all 4 streaming RPC types, custom options (method/message/field extensions), `import public`, proto3 optional, `allow_alias` enums, reserved fields/names, deprecated messages/fields/enums/methods, `jstype` (JS_STRING, JS_NUMBER), `json_name`, TypeScript keyword field names, `__proto__` field, `oneof_kind` field, `constructor` oneof member, 4-level deep nesting, self-referential messages, nested collision (Product_Variant vs Product.Variant), runtime import collision (WireType, MessageType), cross-package name collision (Status, Metadata), empty service, idempotency levels, detached comments, and comments with special chars (`*/`, ``, `\n`). When adding new codegen features, expand this test to cover them. +- Automated loop: `protoc-gen-kaja/scripts/loop` runs two agents in alternation — RALPH (fixer, reads `protoc-gen-kaja/RALPH.md`) and NELSON (adversarial tester, reads `protoc-gen-kaja/NELSON.md`). RALPH writes "DONE" to `protoc-gen-kaja/status.txt` when all tests pass; NELSON writes "HAHA" when it finds a new failing test. The loop continues until NELSON can't break it. + +**`/desktop/`** - Wails desktop app: + +- `main.go` - Wails app entry point +- `frontend/dist/` - Copied from server build (gitignored) + +**`/workspace/`** - Example workspace for development and testing: + +- This is a demo workspace that developers use to test kaja +- `kaja.json` - Configuration file defining demo projects hosted on kaja.tools: + - quirks, users, teams services (both gRPC and Twirp protocols) +- `quirks/proto/`, `users/proto/`, `teams/proto/` - Proto files for each service +- Run `scripts/demo-protos` to update proto files from kaja-tools/website +- The `scripts/server` script starts kaja with this workspace by default + +### Code Generation Flow + +``` +/server/proto/api.proto + │ + ├──→ [protoc + protoc-gen-go/twirp] → /server/pkg/api/*.go + │ + └──→ [protoc + protoc-gen-kaja] → /ui/src/server/*.ts + │ + v + go run cmd/build-ui/main.go (esbuild) + │ + v + /server/build/ + (main.js, main.css, workers) + │ + ┌────────────────────────────┼────────────────────────────┐ + │ │ │ + v v v + Server (embedded) Desktop (copied to Docker (embedded) + /desktop/frontend/dist) +``` + +## Primer Components + +Available components from `@primer/react` (v38). Prefer these over custom components. + +### Main (`@primer/react`) + +- **ActionBar** - A collection of horizontally aligned IconButtons with overflow menu +- **ActionList** - Vertical list of interactive actions or options +- **ActionMenu** - Combines ActionList and Overlay for quick actions and selections +- **AnchoredOverlay** - Opens an Overlay positioned relative to an anchor element +- **Autocomplete** - Filter through a list and pick one or more values +- **Avatar** - Image representing a user or organization +- **AvatarStack** - Displays two or more Avatars in an inline stack +- **Banner** - Highlights important information +- **Blankslate** - Placeholder explaining why content is missing +- **BranchName** - Label component for displaying branch names +- **Breadcrumb** - Legacy breadcrumb component +- **Breadcrumbs** - Displays current page hierarchy for navigation +- **Button** - Initiates actions on a page or form +- **ButtonGroup** - Renders a series of buttons together +- **Checkbox** - Form control for single and multiple selections +- **CheckboxGroup** - Renders a set of checkboxes +- **CircleBadge** - Connects logos of third-party services visually +- **ConfirmationDialog** - Dialog for confirming destructive actions +- **CounterLabel** - Adds a count to navigational elements and buttons +- **Details** - Styled wrapper for native `
` element +- **Dialog** - Floating surface for transient content +- **Flash** - Inline message banner for feedback +- **FormControl** - Labelled input with optional validation and hint text +- **Header** - Page-level header bar +- **Heading** - Defines hierarchical content structure +- **IconButton** - Button displaying an icon instead of text +- **Label** - Adds contextual metadata to a design +- **LabelGroup** - Layout constraints for groups of Labels +- **Link** - Styles for hyperlink text +- **LinkButton** - Button styled as a link +- **NavList** - Vertical list of navigation links +- **Overlay** - Floating surface design patterns +- **PageHeader** - Top-level page heading +- **PageLayout** - Defines header, main, pane, and footer areas +- **Pagination** - Horizontal links for navigating paginated content +- **Popover** - Brings attention to specific UI elements +- **Portal** - Renders children into a different DOM subtree +- **ProgressBar** - Shows completion progress or part-to-whole ratios +- **Radio** - Form control for single selection from options +- **RadioGroup** - Renders mutually exclusive options +- **RelativeTime** - Displays time clearly and accessibly +- **SegmentedControl** - Pick one choice from a linear set of options +- **Select** - Dropdown for single predefined choice +- **SelectPanel** - Anchored dialog for selecting one or multiple items +- **SideNav** - Vertical navigation sidebar +- **Spinner** - Indeterminate loading indicator +- **SplitPageLayout** - Two-column layout with main content and sidebar +- **Stack** - Responsive horizontal and vertical layout flows +- **StateLabel** - Renders issue or pull request status +- **SubNav** - Secondary horizontal navigation +- **Text** - Applies Primer typographic styles +- **TextInput** - Single-line text input +- **TextInputWithTokens** - Input for list-based values +- **Textarea** - Multi-line text input +- **Timeline** - Displays items on a vertical timeline +- **ToggleSwitch** - Immediately toggles a setting on or off +- **Token** - Compact representation of an object or metadata +- **Tooltip** - Additional context on hover or keyboard focus +- **TreeView** - Hierarchical list with expandable parents +- **Truncate** - Shortens overflowing text with ellipsis +- **UnderlineNav** - Horizontal tabbed navigation +- **VisuallyHidden** - Hides content visually while keeping it accessible + +### Experimental (`@primer/react/experimental`) + +- **Announce** - Live region announcements for screen readers +- **AriaAlert** - Assertive live region alert +- **AriaStatus** - Polite live region status update +- **ButtonBase** - Base component for building custom buttons +- **DataTable** - 2D data structure with rows and columns +- **FeatureFlags** - Provider for toggling feature flags +- **FilteredActionList** - ActionList with built-in text filtering +- **Hidden** - Conditionally hides content at breakpoints +- **InlineMessage** - Informs users about action results within content +- **IssueLabel** - Renders GitHub issue labels +- **KeybindingHint** - Displays keyboard shortcut hints +- **ScrollableRegion** - Accessible scrollable container +- **SelectPanel2** - Next-generation SelectPanel +- **SkeletonAvatar** - Loading placeholder for Avatar +- **SkeletonBox** - Loading placeholder for non-text elements +- **SkeletonText** - Loading placeholder for text +- **Table** - Low-level table component (used by DataTable) +- **Tabs** - Tabbed interface for switching views +- **TopicTag** - Renders topic/tag labels +- **UnderlinePanels** - Tabbed panels for related content + +### Deprecated (`@primer/react/deprecated`) + +- **ActionList** - Legacy ActionList (use main ActionList instead) +- **ActionMenu** - Legacy ActionMenu (use main ActionMenu instead) +- **Dialog** - Legacy Dialog v1 (use main Dialog instead) +- **FilteredSearch** - Legacy filtered search input +- **Octicon** - Renders Octicon icons directly +- **Pagehead** - Legacy page heading +- **TabNav** - Legacy tab navigation (use UnderlineNav instead) +- **Tooltip** - Legacy Tooltip (use main Tooltip instead) +- **UnderlineNav** - Legacy UnderlineNav diff --git a/CLAUDE.md b/CLAUDE.md index 3b87d478..eef4bd20 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,235 +1 @@ -## Guidelines - -- See [Development](README.md#development) for instructions how to run and test. -- Only add code comments for really tricky parts; otherwise keep it clean. -- If API is called "getConfiguration", use "configuration" not "config" in code. -- Don't run `go build` directly; use `scripts/server`, `scripts/desktop`, or `scripts/docker`. Kill `scripts/server` when done. Make sure the server port is not in use. -- Don't run `scripts/build-ui` separately; when `scripts/server` is running, the UI is recompiled automatically on page load. -- The is using https://primer.style/product/getting-started/react/. Use out-of-the-box Primer components as much as possible. Avoid custom styling unless absolutely necessary. -- Don't update generated files directly; they will be overwritten. - - `ui/src/wailsjs/**` are generated by Wails. -- When I prompt you to make changes that are radically different from what's documented here, please update this file accordingly. -- Don't commit changes to `kaja.json` -- Use past tense in pull request titles and commit messages (e.g., "Fix bug" → "Fixed bug"). -- Use capitalized "Kaja" for user-facing labels (titles, headings, UI text). Keep lowercase "kaja" for code, terminal commands, and file paths. -- Use pure Primer when possible, avoid custom wrappers and abstractions. -- Ask me before creating custom UI components; prefer direct use of Primer components. -- Keep pull-request descriptions super short - one or two sentences summarizing the change. - -## Directory Structure - -``` -/ -├── desktop/ # Desktop application (Wails framework) -├── server/ # Backend server (Go) - serves both web and desktop -├── ui/ # Frontend UI (React/TypeScript) -├── workspace/ # Demo workspace with example proto definitions -├── scripts/ # Build and development scripts -└── docs/ # Documentation -``` - -### Build Directories - -There are multiple `build/` directories, each serving a different purpose: - -| Directory | Purpose | Gitignored | -| ------------------------- | ---------------------------------------------------------------------------------------- | ---------- | -| `/server/build/` | Protoc plugins (protoc-gen-\*) and bundled UI assets (main.js, main.css, monaco workers) | Yes | -| `/desktop/build/` | Platform files (app icons, Info.plist) - tracked in git | No | -| `/desktop/build/bin/` | Desktop executable binaries | Yes | -| `/desktop/frontend/dist/` | Frontend distribution for desktop (copied from server/build) | Yes | -| `$TMPDIR/kaja/` | Compilation temp folders (auto-cleaned after 60 min) | N/A | - -### Development vs Production Builds - -The server uses Go build tags to switch between development and production modes: - -**Development** (`-tags development`): - -- `server/assets_development.go` is used -- Reads UI files from filesystem at runtime -- Calls `ui.BuildForDevelopment()` to rebuild assets on startup -- Allows hot-reload during development - -**Production** (default, no tags): - -- `server/assets_production.go` is used -- All assets are embedded in the binary via `//go:embed` -- No filesystem access needed for serving UI -- Single self-contained binary - -### Server vs Desktop - -Both share the same backend code but differ in how they're packaged: - -**Server (Web)**: - -- Single Go binary with embedded React UI -- Serves HTTP API on port 41520 -- Run with: `scripts/server` -- Assets from `/server/build/` and `/server/static/` - -**Desktop (Wails)**: - -- Uses Wails framework (Go + webview) -- Embeds frontend via `//go:embed all:frontend/dist` -- Frontend files copied from server build to `/desktop/frontend/dist/` -- Native window and file dialogs -- Run with: `scripts/desktop` - -### Source Directories - -**`/ui/`** - React/TypeScript frontend: - -- `src/*.tsx` - React components -- `src/server/` - Generated proto client code (from `/server/proto/api.proto`) -- `src/wailsjs/` - Generated Wails bindings (auto-generated) - -**`/server/`** - Go backend: - -- `cmd/server/` - Main server application -- `cmd/build-ui/` - Tool to bundle React UI with esbuild -- `pkg/api/` - Generated proto code (Go) -- `proto/api.proto` - API service definition (source of truth) -- `static/` - Static files (index.html, favicon) - -**`/desktop/`** - Wails desktop app: - -- `main.go` - Wails app entry point -- `frontend/dist/` - Copied from server build (gitignored) - -**`/workspace/`** - Example workspace for development and testing: - -- This is a demo workspace that developers use to test kaja -- `kaja.json` - Configuration file defining demo projects hosted on kaja.tools: - - quirks, users, teams services (both gRPC and Twirp protocols) -- `quirks/proto/`, `users/proto/`, `teams/proto/` - Proto files for each service -- Run `scripts/demo-protos` to update proto files from kaja-tools/website -- The `scripts/server` script starts kaja with this workspace by default - -### Code Generation Flow - -``` -/server/proto/api.proto - │ - ├──→ [protoc + protoc-gen-go/twirp] → /server/pkg/api/*.go - │ - └──→ [protoc + protoc-gen-ts] → /ui/src/server/*.ts - │ - v - go run cmd/build-ui/main.go (esbuild) - │ - v - /server/build/ - (main.js, main.css, workers) - │ - ┌────────────────────────────┼────────────────────────────┐ - │ │ │ - v v v - Server (embedded) Desktop (copied to Docker (embedded) - /desktop/frontend/dist) -``` - -## Primer Components - -Available components from `@primer/react` (v38). Prefer these over custom components. - -### Main (`@primer/react`) - -- **ActionBar** - A collection of horizontally aligned IconButtons with overflow menu -- **ActionList** - Vertical list of interactive actions or options -- **ActionMenu** - Combines ActionList and Overlay for quick actions and selections -- **AnchoredOverlay** - Opens an Overlay positioned relative to an anchor element -- **Autocomplete** - Filter through a list and pick one or more values -- **Avatar** - Image representing a user or organization -- **AvatarStack** - Displays two or more Avatars in an inline stack -- **Banner** - Highlights important information -- **Blankslate** - Placeholder explaining why content is missing -- **BranchName** - Label component for displaying branch names -- **Breadcrumb** - Legacy breadcrumb component -- **Breadcrumbs** - Displays current page hierarchy for navigation -- **Button** - Initiates actions on a page or form -- **ButtonGroup** - Renders a series of buttons together -- **Checkbox** - Form control for single and multiple selections -- **CheckboxGroup** - Renders a set of checkboxes -- **CircleBadge** - Connects logos of third-party services visually -- **ConfirmationDialog** - Dialog for confirming destructive actions -- **CounterLabel** - Adds a count to navigational elements and buttons -- **Details** - Styled wrapper for native `
` element -- **Dialog** - Floating surface for transient content -- **Flash** - Inline message banner for feedback -- **FormControl** - Labelled input with optional validation and hint text -- **Header** - Page-level header bar -- **Heading** - Defines hierarchical content structure -- **IconButton** - Button displaying an icon instead of text -- **Label** - Adds contextual metadata to a design -- **LabelGroup** - Layout constraints for groups of Labels -- **Link** - Styles for hyperlink text -- **LinkButton** - Button styled as a link -- **NavList** - Vertical list of navigation links -- **Overlay** - Floating surface design patterns -- **PageHeader** - Top-level page heading -- **PageLayout** - Defines header, main, pane, and footer areas -- **Pagination** - Horizontal links for navigating paginated content -- **Popover** - Brings attention to specific UI elements -- **Portal** - Renders children into a different DOM subtree -- **ProgressBar** - Shows completion progress or part-to-whole ratios -- **Radio** - Form control for single selection from options -- **RadioGroup** - Renders mutually exclusive options -- **RelativeTime** - Displays time clearly and accessibly -- **SegmentedControl** - Pick one choice from a linear set of options -- **Select** - Dropdown for single predefined choice -- **SelectPanel** - Anchored dialog for selecting one or multiple items -- **SideNav** - Vertical navigation sidebar -- **Spinner** - Indeterminate loading indicator -- **SplitPageLayout** - Two-column layout with main content and sidebar -- **Stack** - Responsive horizontal and vertical layout flows -- **StateLabel** - Renders issue or pull request status -- **SubNav** - Secondary horizontal navigation -- **Text** - Applies Primer typographic styles -- **TextInput** - Single-line text input -- **TextInputWithTokens** - Input for list-based values -- **Textarea** - Multi-line text input -- **Timeline** - Displays items on a vertical timeline -- **ToggleSwitch** - Immediately toggles a setting on or off -- **Token** - Compact representation of an object or metadata -- **Tooltip** - Additional context on hover or keyboard focus -- **TreeView** - Hierarchical list with expandable parents -- **Truncate** - Shortens overflowing text with ellipsis -- **UnderlineNav** - Horizontal tabbed navigation -- **VisuallyHidden** - Hides content visually while keeping it accessible - -### Experimental (`@primer/react/experimental`) - -- **Announce** - Live region announcements for screen readers -- **AriaAlert** - Assertive live region alert -- **AriaStatus** - Polite live region status update -- **ButtonBase** - Base component for building custom buttons -- **DataTable** - 2D data structure with rows and columns -- **FeatureFlags** - Provider for toggling feature flags -- **FilteredActionList** - ActionList with built-in text filtering -- **Hidden** - Conditionally hides content at breakpoints -- **InlineMessage** - Informs users about action results within content -- **IssueLabel** - Renders GitHub issue labels -- **KeybindingHint** - Displays keyboard shortcut hints -- **ScrollableRegion** - Accessible scrollable container -- **SelectPanel2** - Next-generation SelectPanel -- **SkeletonAvatar** - Loading placeholder for Avatar -- **SkeletonBox** - Loading placeholder for non-text elements -- **SkeletonText** - Loading placeholder for text -- **Table** - Low-level table component (used by DataTable) -- **Tabs** - Tabbed interface for switching views -- **TopicTag** - Renders topic/tag labels -- **UnderlinePanels** - Tabbed panels for related content - -### Deprecated (`@primer/react/deprecated`) - -- **ActionList** - Legacy ActionList (use main ActionList instead) -- **ActionMenu** - Legacy ActionMenu (use main ActionMenu instead) -- **Dialog** - Legacy Dialog v1 (use main Dialog instead) -- **FilteredSearch** - Legacy filtered search input -- **Octicon** - Renders Octicon icons directly -- **Pagehead** - Legacy page heading -- **TabNav** - Legacy tab navigation (use UnderlineNav instead) -- **Tooltip** - Legacy Tooltip (use main Tooltip instead) -- **UnderlineNav** - Legacy UnderlineNav +@AGENTS.md \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 794abd09..1dfc5bbd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,9 +15,11 @@ RUN if [ "$RUN_TESTS" = "true" ] ; then \ npm test -- run; \ fi +COPY protoc-gen-kaja /protoc-gen-kaja COPY server /server WORKDIR /server RUN go run cmd/build-ui/main.go +RUN go build -C /protoc-gen-kaja -o /server/build/protoc-gen-kaja . RUN if [ "$RUN_TESTS" = "true" ] ; then \ go test ./... -v; \ fi @@ -25,8 +27,7 @@ RUN go build -ldflags "-X main.GitRef=$GIT_REF" -o /build/server ./cmd/server FROM alpine:latest AS runner COPY --from=builder /build/server /server/ -COPY --from=builder /server/build/protoc-gen-ts /server/build/ -RUN apk add --update nodejs +COPY --from=builder /server/build/protoc-gen-kaja /server/build/ RUN apk update && apk add --no-cache make protobuf-dev WORKDIR /server EXPOSE 41520 diff --git a/TODO.md b/TODO.md deleted file mode 100644 index e69de29b..00000000 diff --git a/protoc-gen-kaja/NELSON.md b/protoc-gen-kaja/NELSON.md new file mode 100644 index 00000000..59aa7186 --- /dev/null +++ b/protoc-gen-kaja/NELSON.md @@ -0,0 +1,32 @@ +## Task + +You are a **senior adversarial tester**. A junior engineer is porting [protoc-gen-ts](https://github.com/timostamm/protobuf-ts/tree/main/packages/plugin) to Go plugin `protoc-gen-kaja`. Your job is to **find at least one new case where the Go output differs from the TypeScript original** and add a failing test that proves it. + +You succeed when tests fail. You fail when tests pass. + +## How This Works + +You are running inside an automated loop. **Each invocation is stateless** — you have no memory of previous runs. This file (NELSON.md) is your only persistent memory. Read it first. Write to it before you finish. Your future self depends on it. + +## Steps (follow this order every run) + +1. **Read state.** Read the [Notes](#notes) section below. Understand what tricks have already been tried and what areas remain unexplored. +2. **Orient.** Study the Go implementation (`protoc-gen-kaja/main.go`) and the TS plugin source. Look for discrepancies: edge cases, missing features, incorrect escaping, wrong formatting, missing options handling. +3. **Find a gap.** Pick ONE specific behavior where the Go plugin likely differs from the TS plugin. Be creative — look at proto2 features, obscure options, unusual naming, deep nesting, corner cases in comments, whitespace, import ordering, etc. +4. **Write a failing test.** Create or modify a test case with a `.proto` file that exposes the bug. Spend the bulk of your effort here. The test must actually fail — run it and confirm. +5. **Test.** Run the full test suite. Verify your new test fails. If it passes (the Go plugin handles it correctly), try a different angle — don't give up. +6. **Update memory.** Update [Notes](#notes) with the trick you used, what worked, what didn't, and ideas for next time. Keep notes focused on **attack strategies**, not on tests you've already added. If a trick has been successfully used, note it briefly and move on to new ideas. +7. **Commit.** One-line past-tense commit message summarizing the new test. +8. **Check result.** If at least one test fails, write "HAHA" to protoc-gen-kaja/status.txt and stop. If all tests pass (you failed to break it), just end — you'll run again with a different approach. + +## Rules + +- **Your goal is failing tests.** A run where all tests pass is a failed run for you. +- **Never fix the Go implementation.** You write tests, not fixes. +- **Never weaken existing tests.** Don't modify passing tests to make them fail artificially. +- **Be creative and adversarial.** Think about proto2 vs proto3 differences, obscure field options, unicode in identifiers, deeply nested types, circular imports, reserved words in different contexts, whitespace sensitivity, comment edge cases, etc. +- **One new test per run.** Focus on one specific bug. Don't shotgun multiple test cases. +- **Don't repeat yourself.** If a trick is logged in Notes as already used, find a new one. +- **Keep Notes as an attack playbook.** Good: "Boolean map keys — Go returns 'boolean', TS returns 'string'. Tested in 300_bool_map_key." Bad: "Good progress finding bugs." + +## Notes diff --git a/protoc-gen-kaja/RALPH.md b/protoc-gen-kaja/RALPH.md new file mode 100644 index 00000000..06f6fe5d --- /dev/null +++ b/protoc-gen-kaja/RALPH.md @@ -0,0 +1,34 @@ +## Task + +You are porting [protoc-gen-ts](https://github.com/timostamm/protobuf-ts/tree/main/packages/plugin) to Go plugin `protoc-gen-kaja`. The Go implementation must produce **byte-for-byte identical output** to the TypeScript original. No exceptions. No "close enough". + +## How This Works + +You are running inside an automated loop. **Each invocation is stateless** — you have no memory of previous runs. This file (RALPH.md) is your only persistent memory. Read it first. Write to it before you finish. Your future self depends on it. + +## Steps (follow this order every run) + +1. **Read state.** Read the [Plan](#plan) and [Notes](#notes) sections below. Understand where you left off. Don't redo work that's already done. +2. **Orient.** If Plan is empty, analyze the codebase, research the TS plugin (clone it if needed), and write a detailed plan. If Plan exists, pick the next incomplete item. +3. **Implement.** Spend the bulk of your effort here. Work on ONE failing test case or feature at a time. Make real, substantive progress. +4. **Test.** Run the tests. Read the output carefully. If a test fails, understand WHY before changing code. +5. **Update memory.** Update [Plan](#plan) with what's done and what's next. Update [Notes](#notes) with learnings that will help future runs. Be specific — file paths, function names, gotchas, how to run tests. +6. **Commit.** One-line past-tense message summarizing what changed. +7. **Check completion.** If ALL tests pass, write "DONE" to protoc-gen-kaja/status.txt and stop. If any test fails, do NOT write DONE. Just end — you'll run again. + +## Rules + +- **DONE means ALL tests pass.** Not most. Not "the important ones". ALL. Zero failures. +- **Never weaken requirements.** Don't modify test expectations. Don't skip tests. Don't add notes like "close enough" or "cosmetic difference". If you see such notes below, delete them. +- **Never mark DONE prematurely.** Run the full test suite and confirm zero failures before writing DONE. +- **Be bold with architecture.** If the current approach is fundamentally wrong, refactor it. Document why in the plan. +- **Keep Notes actionable.** Good: "Run tests with `protoc-gen-kaja/scripts/test`. Failures show as diffs." Bad: "Making good progress overall." +- **One thing at a time.** Fix one test, commit, move to the next. Don't try to fix everything in one run. + +## Plan + +## Notes + +- Run tests with `protoc-gen-kaja/scripts/test --summary`. Full output without `--summary`. +- Use `protoc-gen-kaja/scripts/diff ` to inspect specific failures. +- Results are in `protoc-gen-kaja/results//`. Each has `expected/`, `actual/`, `result.txt`, and optionally `failure.txt`. diff --git a/protoc-gen-kaja/go.mod b/protoc-gen-kaja/go.mod new file mode 100644 index 00000000..7a9db4f5 --- /dev/null +++ b/protoc-gen-kaja/go.mod @@ -0,0 +1,7 @@ +module github.com/wham/kaja/v2/protoc-gen-kaja + +go 1.24.0 + +toolchain go1.24.7 + +require google.golang.org/protobuf v1.36.11 diff --git a/protoc-gen-kaja/go.sum b/protoc-gen-kaja/go.sum new file mode 100644 index 00000000..296be183 --- /dev/null +++ b/protoc-gen-kaja/go.sum @@ -0,0 +1,4 @@ +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= +google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= diff --git a/protoc-gen-kaja/main.go b/protoc-gen-kaja/main.go new file mode 100644 index 00000000..65ee8e63 --- /dev/null +++ b/protoc-gen-kaja/main.go @@ -0,0 +1,7925 @@ +package main + +import ( + "encoding/base64" + "fmt" + "io" + "math" + "os" + "sort" + "path/filepath" + "strconv" + "strings" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/descriptorpb" + "google.golang.org/protobuf/types/pluginpb" +) + +func main() { + input, err := io.ReadAll(os.Stdin) + if err != nil { + os.Stderr.WriteString("failed to read input: " + err.Error() + "\n") + os.Exit(1) + } + + req := &pluginpb.CodeGeneratorRequest{} + if err := proto.Unmarshal(input, req); err != nil { + os.Stderr.WriteString("failed to unmarshal request: " + err.Error() + "\n") + os.Exit(1) + } + + resp := generate(req) + + output, err := proto.Marshal(resp) + if err != nil { + os.Stderr.WriteString("failed to marshal response: " + err.Error() + "\n") + os.Exit(1) + } + + os.Stdout.Write(output) +} + +// TypeScript reserved keywords and type names that need to be escaped +var tsReservedKeywords = map[string]bool{ + "break": true, "case": true, "catch": true, "class": true, "const": true, "continue": true, + "debugger": true, "default": true, "delete": true, "do": true, "else": true, "enum": true, + "export": true, "extends": true, "false": true, "finally": true, "for": true, "function": true, + "if": true, "import": true, "in": true, "instanceof": true, "new": true, "null": true, + "return": true, "super": true, "switch": true, "this": true, "throw": true, "true": true, + "try": true, "typeof": true, "var": true, "void": true, "while": true, "with": true, + "as": true, "implements": true, "interface": true, "let": true, "package": true, "private": true, + "protected": true, "public": true, "static": true, "yield": true, "any": true, "boolean": true, + "constructor": true, "declare": true, "get": true, "module": true, "require": true, "number": true, + "set": true, "string": true, "symbol": true, "type": true, "from": true, "of": true, +} + +var tsReservedTypeNames = map[string]bool{ + "object": true, "Uint8Array": true, "array": true, "Array": true, "string": true, "String": true, + "number": true, "Number": true, "boolean": true, "Boolean": true, "bigint": true, "BigInt": true, +} + +// Reserved class method/property names that need escaping in service clients +var tsReservedMethodNames = map[string]bool{ + // Generic reserved names + "name": true, "constructor": true, "close": true, "toString": true, + // gRPC client reserved method names + "makeUnaryRequest": true, "makeClientStreamRequest": true, + "makeServerStreamRequest": true, "makeBidiStreamRequest": true, + "getChannel": true, "waitForReady": true, + // ServiceInfo interface properties + "methods": true, "typeName": true, "options": true, +} + +// Escape TypeScript reserved keywords and type names by adding '$' suffix +func escapeTypescriptKeyword(name string) string { + if tsReservedKeywords[name] || tsReservedTypeNames[name] { + return name + "$" + } + return name +} + +// Escape reserved class method/property names by adding '$' suffix +func escapeMethodName(name string) string { + if tsReservedMethodNames[name] { + return name + "$" + } + return name +} + +type params struct{} + +func parseParameters(paramStr *string) params { + return params{} +} + +func findFile(files []*descriptorpb.FileDescriptorProto, name string) *descriptorpb.FileDescriptorProto { + for _, f := range files { + if f.GetName() == name { + return f + } + } + return nil +} + +// collectTransitiveWKTDeps returns all google/protobuf/ files transitively +// reachable from the given FileToGenerate files. +func collectTransitiveWKTDeps(fileToGenerate []string, allFiles []*descriptorpb.FileDescriptorProto) map[string]bool { + result := map[string]bool{} + visited := map[string]bool{} + var visit func(fileName string) + visit = func(fileName string) { + if visited[fileName] { + return + } + visited[fileName] = true + f := findFile(allFiles, fileName) + if f == nil { + return + } + for _, dep := range f.Dependency { + if strings.HasPrefix(dep, "google/protobuf/") { + result[dep] = true + } + visit(dep) + } + } + for _, genFile := range fileToGenerate { + visit(genFile) + } + return result +} + +func getOutputFileName(protoFile string) string { + base := strings.TrimSuffix(protoFile, ".proto") + return base + ".ts" +} + +func getClientOutputFileName(protoFile string) string { + base := strings.TrimSuffix(protoFile, ".proto") + return base + ".client.ts" +} + +func generate(req *pluginpb.CodeGeneratorRequest) *pluginpb.CodeGeneratorResponse { + resp := &pluginpb.CodeGeneratorResponse{} + resp.SupportedFeatures = proto.Uint64(uint64(pluginpb.CodeGeneratorResponse_FEATURE_PROTO3_OPTIONAL)) + + // Parse plugin parameters + params := parseParameters(req.Parameter) + + // Pre-scan: identify files with services in this batch + filesWithServices := make(map[string]bool) + importedByServiceFilesInSameDir := make(map[string]bool) // Dependencies imported by service files in the same directory + importedByServiceFilesInDiffDir := make(map[string]bool) // Dependencies imported by service files in different directories + importedByNonServiceFiles := make(map[string]bool) + + for _, fileName := range req.FileToGenerate { + file := findFile(req.ProtoFile, fileName) + if file != nil { + hasService := len(file.Service) > 0 + fileDir := filepath.Dir(fileName) + if hasService { + filesWithServices[fileName] = true + // Mark all dependencies of this service file + for _, dep := range file.Dependency { + depDir := filepath.Dir(dep) + if fileDir == depDir { + // Same directory - not a library file + importedByServiceFilesInSameDir[dep] = true + } else { + // Different directory - potential library file + importedByServiceFilesInDiffDir[dep] = true + } + } + } else { + // Mark all dependencies of this non-service file + for _, dep := range file.Dependency { + importedByNonServiceFiles[dep] = true + } + } + } + } + + // Generate files for each proto file to generate, tracking which produced output + generatedFiles := make(map[string]bool) + hasExtensionFiles := false + for _, fileName := range req.FileToGenerate { + file := findFile(req.ProtoFile, fileName) + if file == nil { + continue + } + + if len(file.Extension) > 0 { + hasExtensionFiles = true + } + + // A file is "imported by service files only" if: + // 1. It's imported by at least one service file in a DIFFERENT directory (library file pattern) + // 2. It's NOT imported by any non-service files + // 3. It's NOT imported by any service files in the SAME directory (same-dir imports don't count) + // 4. It's NOT a main file (has a service) - main service files handle their own imports + // + // This flag affects WireType positioning: library files in subdirectories used only by services + // get WireType early, while files in the same directory as their importers get it late. + hasService := len(file.Service) > 0 + isImportedOnlyByServices := !hasService && + importedByServiceFilesInDiffDir[fileName] && + !importedByServiceFilesInSameDir[fileName] && + !importedByNonServiceFiles[fileName] + + content := generateFile(file, req.ProtoFile, params, isImportedOnlyByServices) + if content == "" { + continue + } + + generatedFiles[fileName] = true + outputName := getOutputFileName(fileName) + resp.File = append(resp.File, &pluginpb.CodeGeneratorResponse_File{ + Name: proto.String(outputName), + Content: proto.String(content), + }) + + // Generate client file if there are services + if len(file.Service) > 0 { + clientContent := generateClientFile(file, req.ProtoFile, params) + clientName := getClientOutputFileName(fileName) + resp.File = append(resp.File, &pluginpb.CodeGeneratorResponse_File{ + Name: proto.String(clientName), + Content: proto.String(clientContent), + }) + } + } + + // Also generate for google.protobuf well-known types if they're dependencies, + // but only if at least one FileToGenerate produced output or has extensions + if len(generatedFiles) > 0 || hasExtensionFiles { + // Collect all WKT files transitively reachable from FileToGenerate + neededWKTs := collectTransitiveWKTDeps(req.FileToGenerate, req.ProtoFile) + for _, file := range req.ProtoFile { + fileName := file.GetName() + if !neededWKTs[fileName] { + continue + } + content := generateFile(file, req.ProtoFile, params, false) + if content != "" { + outputName := getOutputFileName(fileName) + resp.File = append(resp.File, &pluginpb.CodeGeneratorResponse_File{ + Name: proto.String(outputName), + Content: proto.String(content), + }) + } + } + } + + return resp +} + +type generator struct { + b strings.Builder + params params + file *descriptorpb.FileDescriptorProto + allFiles []*descriptorpb.FileDescriptorProto + indent string + isImportedByService bool // True if imported ONLY by service files (not by non-service files) + importedTypeNames map[string]bool // Set of simple type names that have been imported + typeNameSuffixes map[string]int // Map from full proto type name to numeric suffix (0 = no suffix, 1 = $1, etc.) + localTypeNames map[string]bool // Set of TS names defined locally in this file (for collision detection) + importAliases map[string]string // Map from proto type name → aliased TS import name (e.g., ".beta.Data" → "Data$") + rawImportNames map[string]string // Map from proto type name → raw TS import name before aliasing (e.g., ".beta.Data" → "Data") + wireTypeRef string // "WireType" normally, "WireType$" when local type collides with runtime WireType + messageTypeRef string // "MessageType" normally, "MessageType$" when local type collides with runtime MessageType + serviceTypeRef string // "ServiceType" normally, "ServiceType$" when local type collides with runtime-rpc ServiceType + unknownFieldHandlerRef string // "UnknownFieldHandler" normally, "UnknownFieldHandler$" when local type collides + partialMessageRef string // "PartialMessage" normally, "PartialMessage$" when local type collides + binaryReadOptionsRef string // "BinaryReadOptions" normally, "BinaryReadOptions$" when local type collides + binaryWriteOptionsRef string // "BinaryWriteOptions" normally, "BinaryWriteOptions$" when local type collides + iBinaryReaderRef string // "IBinaryReader" normally, "IBinaryReader$" when local type collides + iBinaryWriterRef string // "IBinaryWriter" normally, "IBinaryWriter$" when local type collides + reflectionMergePartialRef string // "reflectionMergePartial" normally, "reflectionMergePartial$" when local type collides + scalarTypeRef string // "ScalarType" normally, "ScalarType$" when local type collides with runtime ScalarType + longTypeRef string // "LongType" normally, "LongType$" when local type collides with runtime LongType + pbLongRef string // "PbLong" normally, "PbLong$" when local type collides with runtime PbLong + typeofJsonValueRef string // "typeofJsonValue" normally, "typeofJsonValue$" when local type collides + isJsonObjectRef string // "isJsonObject" normally, "isJsonObject$" when local type collides + jsonObjectRef string // "JsonObject" normally, "JsonObject$" when local type collides + jsonValueRef string // "JsonValue" normally, "JsonValue$" when local type collides + jsonReadOptionsRef string // "JsonReadOptions" normally, "JsonReadOptions$" when local type collides + jsonWriteOptionsRef string // "jsonWriteOptions" normally, "jsonWriteOptions$" when local type collides + jsonWriteOptionsTypeRef string // "JsonWriteOptions" normally, "JsonWriteOptions$" when local type collides (type import) + iMessageTypeRef string // "IMessageType" normally, "IMessageType$" when local type collides + lowerCamelCaseRef string // "lowerCamelCase" normally, "lowerCamelCase$" when local type collides + stackInterceptRef string // "stackIntercept" normally, "stackIntercept$" when message name collides + rpcTransportRef string // "RpcTransport" normally, "RpcTransport$" when service name collides + serviceInfoRef string // "ServiceInfo" normally, "ServiceInfo$" when service name collides + serviceImportAliases map[string]string // service TS name → aliased name (e.g., "UnaryCall" → "UnaryCall$") when service name collides with call type + callTypeRefs map[string]string // call type name → ref name (e.g., "DuplexStreamingCall" → "DuplexStreamingCall$") when message name collides +} + +func (g *generator) p(format string, args ...interface{}) { + line := fmt.Sprintf(format, args...) + hasLF := strings.Contains(line, "\n") + hasCR := strings.Contains(line, "\r") + if hasLF || hasCR { + isJSDoc := strings.HasPrefix(line, " * ") + // Process character by character to distinguish \n (JSDoc continuation) + // from \r (raw line break without JSDoc prefix, matching TS printer behavior) + var current strings.Builder + g.b.WriteString(g.indent) + for i := 0; i < len(line); i++ { + ch := line[i] + if ch == '\n' || ch == '\r' { + g.b.WriteString(current.String()) + g.b.WriteString("\n") + current.Reset() + g.b.WriteString(g.indent) + if ch == '\n' && isJSDoc { + g.b.WriteString(" * ") + } + } else { + current.WriteByte(ch) + } + } + g.b.WriteString(current.String()) + g.b.WriteString("\n") + } else { + g.b.WriteString(g.indent) + g.b.WriteString(line) + g.b.WriteString("\n") + } +} + +func (g *generator) pNoIndent(format string, args ...interface{}) { + fmt.Fprintf(&g.b, format, args...) + g.b.WriteString("\n") +} + +// isFileDeprecated returns true if the entire file is marked as deprecated +func (g *generator) isFileDeprecated() bool { + return g.file.Options != nil && g.file.GetOptions().GetDeprecated() +} + +// isOptimizeCodeSize returns true if the file has option optimize_for = CODE_SIZE or LITE_RUNTIME +func (g *generator) isOptimizeCodeSize() bool { + if g.file.Options == nil || g.file.Options.OptimizeFor == nil { + return false + } + opt := g.file.GetOptions().GetOptimizeFor() + return opt == descriptorpb.FileOptions_CODE_SIZE || opt == descriptorpb.FileOptions_LITE_RUNTIME +} + +// escapeJSDocComment escapes sequences that would break JSDoc comments +func escapeJSDocComment(s string) string { + // Escape */ sequences which would close the JSDoc comment prematurely + return strings.ReplaceAll(s, "*/", "*\\/") +} + +// customOption represents a key-value pair for custom options +type customOption struct { + key string + value interface{} +} + +type mapEntryValue struct { + key string + value interface{} +} + +type extInfo struct { + ext *descriptorpb.FieldDescriptorProto + pkg string + msgPrefix string // parent message name(s) for nested extensions, e.g. "Extensions." +} + +// buildExtensionMap builds a map of extension field number -> extension info for a given extendee type +func (g *generator) buildExtensionMap(extendeeName string) map[int32]extInfo { + extensionMap := make(map[int32]extInfo) + + collectFromFile := func(f *descriptorpb.FileDescriptorProto) { + pkg := "" + if f.Package != nil { + pkg = *f.Package + } + // Top-level extensions + for _, ext := range f.Extension { + if ext.GetExtendee() == extendeeName { + extensionMap[ext.GetNumber()] = extInfo{ext: ext, pkg: pkg} + } + } + // Extensions nested in messages (recursively) + var scanMsg func(msg *descriptorpb.DescriptorProto, prefix string) + scanMsg = func(msg *descriptorpb.DescriptorProto, prefix string) { + msgPrefix := prefix + msg.GetName() + "." + for _, ext := range msg.Extension { + if ext.GetExtendee() == extendeeName { + extensionMap[ext.GetNumber()] = extInfo{ext: ext, pkg: pkg, msgPrefix: msgPrefix} + } + } + for _, nested := range msg.NestedType { + scanMsg(nested, msgPrefix) + } + } + for _, msg := range f.MessageType { + scanMsg(msg, "") + } + } + + // Check current file and all imported files + for _, f := range g.allFiles { + collectFromFile(f) + } + + return extensionMap +} + +// resolveEnumValueName looks up an enum value name by its fully-qualified type name and numeric value +func (g *generator) resolveEnumValueName(typeName string, number int32) string { + for _, f := range g.allFiles { + for _, enum := range f.EnumType { + var fqn string + if f.GetPackage() == "" { + fqn = "." + enum.GetName() + } else { + fqn = "." + f.GetPackage() + "." + enum.GetName() + } + if fqn == typeName { + for _, val := range enum.Value { + if val.GetNumber() == number { + return val.GetName() + } + } + } + } + // Also check nested enums inside messages + for _, msg := range f.MessageType { + if name := g.findEnumInMessage(f, msg, typeName, number); name != "" { + return name + } + } + } + return fmt.Sprintf("%d", number) +} + +func (g *generator) findEnumInMessage(f *descriptorpb.FileDescriptorProto, msg *descriptorpb.DescriptorProto, typeName string, number int32) string { + var prefix string + if f.GetPackage() == "" { + prefix = "." + msg.GetName() + } else { + prefix = "." + f.GetPackage() + "." + msg.GetName() + } + return g.findEnumInMessageWithPrefix(prefix, msg, typeName, number) +} + +func (g *generator) findEnumInMessageWithPrefix(prefix string, msg *descriptorpb.DescriptorProto, typeName string, number int32) string { + for _, enum := range msg.EnumType { + fqn := prefix + "." + enum.GetName() + if fqn == typeName { + for _, val := range enum.Value { + if val.GetNumber() == number { + return val.GetName() + } + } + } + } + for _, nested := range msg.NestedType { + nestedPrefix := prefix + "." + nested.GetName() + if name := g.findEnumInMessageWithPrefix(nestedPrefix, nested, typeName, number); name != "" { + return name + } + } + return "" +} + +// parseCustomOptions extracts custom extension values from raw unknown fields +func (g *generator) parseCustomOptions(unknown []byte, extensionMap map[int32]extInfo) []customOption { + var result []customOption + + for len(unknown) > 0 { + num, typ, n := protowire.ConsumeTag(unknown) + if n < 0 { + break + } + unknown = unknown[n:] + + extInf, found := extensionMap[int32(num)] + if !found { + switch typ { + case protowire.VarintType: + _, n := protowire.ConsumeVarint(unknown) + unknown = unknown[n:] + case protowire.Fixed64Type: + unknown = unknown[8:] + case protowire.BytesType: + _, n := protowire.ConsumeBytes(unknown) + unknown = unknown[n:] + case protowire.Fixed32Type: + unknown = unknown[4:] + } + continue + } + + ext := extInf.ext + pkg := extInf.pkg + if pkg != "" { + pkg += "." + } + extName := pkg + extInf.msgPrefix + ext.GetName() + + switch ext.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + v, n := protowire.ConsumeBytes(unknown) + result = append(result, customOption{key: extName, value: string(v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: v != 0}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + v, n := protowire.ConsumeVarint(unknown) + enumName := g.resolveEnumValueName(ext.GetTypeName(), int32(v)) + result = append(result, customOption{key: extName, value: enumName}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: int(v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: fmt.Sprintf("%d", int64(v))}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: fmt.Sprintf("%d", v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: int(protowire.DecodeZigZag(v))}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + v, n := protowire.ConsumeVarint(unknown) + result = append(result, customOption{key: extName, value: fmt.Sprintf("%d", protowire.DecodeZigZag(v))}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + v, n := protowire.ConsumeFixed32(unknown) + result = append(result, customOption{key: extName, value: int(int32(v))}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + v, n := protowire.ConsumeFixed64(unknown) + result = append(result, customOption{key: extName, value: fmt.Sprintf("%d", int64(v))}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + v, n := protowire.ConsumeFixed32(unknown) + result = append(result, customOption{key: extName, value: int(v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + v, n := protowire.ConsumeFixed64(unknown) + result = append(result, customOption{key: extName, value: fmt.Sprintf("%d", v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + v, n := protowire.ConsumeFixed32(unknown) + fval := float64(math.Float32frombits(v)) + if math.IsNaN(fval) { + result = append(result, customOption{key: extName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: extName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: extName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: extName, value: fval}) + } + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + v, n := protowire.ConsumeFixed64(unknown) + fval := math.Float64frombits(v) + if math.IsNaN(fval) { + result = append(result, customOption{key: extName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: extName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: extName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: extName, value: fval}) + } + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + v, n := protowire.ConsumeBytes(unknown) + result = append(result, customOption{key: extName, value: base64.StdEncoding.EncodeToString(v)}) + unknown = unknown[n:] + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + v, n := protowire.ConsumeBytes(unknown) + msgDesc := g.findMessageType(ext.GetTypeName()) + if msgDesc != nil { + nested := g.parseMessageValue(v, msgDesc) + result = append(result, customOption{key: extName, value: nested}) + } + unknown = unknown[n:] + default: + switch typ { + case protowire.VarintType: + _, n := protowire.ConsumeVarint(unknown) + unknown = unknown[n:] + case protowire.Fixed64Type: + unknown = unknown[8:] + case protowire.BytesType: + _, n := protowire.ConsumeBytes(unknown) + unknown = unknown[n:] + case protowire.Fixed32Type: + unknown = unknown[4:] + } + } + } + + if len(result) == 0 { + return nil + } + // Merge repeated fields with the same key into arrays + result = mergeRepeatedOptions(result) + return result +} + +// mergeRepeatedOptions merges customOption entries with the same key into array values. +// e.g. [{key:"tags", value:"alpha"}, {key:"tags", value:"beta"}] → [{key:"tags", value:["alpha","beta"]}] +func mergeRepeatedOptions(opts []customOption) []customOption { + var merged []customOption + seen := make(map[string]int) // key → index in merged + for _, opt := range opts { + if me, ok := opt.value.(mapEntryValue); ok { + // Map entry: merge into []customOption (object) + entry := customOption{key: me.key, value: me.value} + if idx, exists := seen[opt.key]; exists { + existing := merged[idx].value + if arr, ok := existing.([]customOption); ok { + merged[idx].value = append(arr, entry) + } else { + merged[idx].value = []customOption{entry} + } + } else { + seen[opt.key] = len(merged) + merged = append(merged, customOption{key: opt.key, value: []customOption{entry}}) + } + } else if idx, ok := seen[opt.key]; ok { + // Already seen this key — convert to or append to array + existing := merged[idx].value + switch arr := existing.(type) { + case []interface{}: + merged[idx].value = append(arr, opt.value) + default: + merged[idx].value = []interface{}{existing, opt.value} + } + } else { + seen[opt.key] = len(merged) + merged = append(merged, opt) + } + } + return merged +} + +// parseMessageValue decodes a message's wire bytes into an ordered list of field name→value pairs +func (g *generator) parseMessageValue(data []byte, msgDesc *descriptorpb.DescriptorProto) []customOption { + // Build field number → field descriptor map + fieldMap := make(map[int32]*descriptorpb.FieldDescriptorProto) + for _, f := range msgDesc.Field { + fieldMap[f.GetNumber()] = f + } + + var result []customOption + for len(data) > 0 { + num, typ, n := protowire.ConsumeTag(data) + if n < 0 { + break + } + data = data[n:] + + fd, found := fieldMap[int32(num)] + if !found { + // Skip unknown field + switch typ { + case protowire.VarintType: + _, n = protowire.ConsumeVarint(data) + case protowire.Fixed64Type: + n = 8 + case protowire.BytesType: + _, n = protowire.ConsumeBytes(data) + case protowire.Fixed32Type: + n = 4 + } + data = data[n:] + continue + } + + fieldName := fd.GetJsonName() + + // Handle packed repeated encoding: wire type is BytesType but field is a scalar numeric type + if typ == protowire.BytesType && fd.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + switch fd.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_INT32, descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_INT64, descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_BOOL, descriptorpb.FieldDescriptorProto_TYPE_ENUM: + packed, pn := protowire.ConsumeBytes(data) + data = data[pn:] + for len(packed) > 0 { + v, vn := protowire.ConsumeVarint(packed) + if vn < 0 { + break + } + packed = packed[vn:] + switch fd.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + result = append(result, customOption{key: fieldName, value: int(int32(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + result = append(result, customOption{key: fieldName, value: int(v)}) + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", int64(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", v)}) + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + result = append(result, customOption{key: fieldName, value: int(protowire.DecodeZigZag(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", protowire.DecodeZigZag(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + result = append(result, customOption{key: fieldName, value: v != 0}) + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + enumName := g.resolveEnumValueName(fd.GetTypeName(), int32(v)) + result = append(result, customOption{key: fieldName, value: enumName}) + } + } + continue + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32, descriptorpb.FieldDescriptorProto_TYPE_SFIXED32, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + packed, pn := protowire.ConsumeBytes(data) + data = data[pn:] + for len(packed) > 0 { + v, vn := protowire.ConsumeFixed32(packed) + if vn < 0 { + break + } + packed = packed[vn:] + switch fd.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + result = append(result, customOption{key: fieldName, value: int(v)}) + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + result = append(result, customOption{key: fieldName, value: int(int32(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + fval := float64(math.Float32frombits(v)) + if math.IsNaN(fval) { + result = append(result, customOption{key: fieldName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: fieldName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: fieldName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: fieldName, value: fval}) + } + } + } + continue + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64, descriptorpb.FieldDescriptorProto_TYPE_SFIXED64, + descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + packed, pn := protowire.ConsumeBytes(data) + data = data[pn:] + for len(packed) > 0 { + v, vn := protowire.ConsumeFixed64(packed) + if vn < 0 { + break + } + packed = packed[vn:] + switch fd.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", v)}) + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", int64(v))}) + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + fval := math.Float64frombits(v) + if math.IsNaN(fval) { + result = append(result, customOption{key: fieldName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: fieldName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: fieldName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: fieldName, value: fval}) + } + } + } + continue + } + } + + switch fd.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + v, n := protowire.ConsumeBytes(data) + result = append(result, customOption{key: fieldName, value: string(v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: v != 0}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + v, n := protowire.ConsumeVarint(data) + enumName := g.resolveEnumValueName(fd.GetTypeName(), int32(v)) + result = append(result, customOption{key: fieldName, value: enumName}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: int(v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", int64(v))}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: int(protowire.DecodeZigZag(v))}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + v, n := protowire.ConsumeVarint(data) + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", protowire.DecodeZigZag(v))}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + v, n := protowire.ConsumeFixed32(data) + result = append(result, customOption{key: fieldName, value: int(int32(v))}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + v, n := protowire.ConsumeFixed64(data) + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", int64(v))}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + v, n := protowire.ConsumeFixed32(data) + result = append(result, customOption{key: fieldName, value: int(v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + v, n := protowire.ConsumeFixed64(data) + result = append(result, customOption{key: fieldName, value: fmt.Sprintf("%d", v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + v, n := protowire.ConsumeFixed32(data) + fval := float64(math.Float32frombits(v)) + if math.IsNaN(fval) { + result = append(result, customOption{key: fieldName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: fieldName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: fieldName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: fieldName, value: fval}) + } + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + v, n := protowire.ConsumeFixed64(data) + fval := math.Float64frombits(v) + if math.IsNaN(fval) { + result = append(result, customOption{key: fieldName, value: "NaN"}) + } else if math.IsInf(fval, 1) { + result = append(result, customOption{key: fieldName, value: "Infinity"}) + } else if math.IsInf(fval, -1) { + result = append(result, customOption{key: fieldName, value: "-Infinity"}) + } else { + result = append(result, customOption{key: fieldName, value: fval}) + } + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + v, n := protowire.ConsumeBytes(data) + result = append(result, customOption{key: fieldName, value: base64.StdEncoding.EncodeToString(v)}) + data = data[n:] + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + v, n := protowire.ConsumeBytes(data) + nestedMsg := g.findMessageType(fd.GetTypeName()) + if nestedMsg != nil { + if nestedMsg.Options != nil && nestedMsg.GetOptions().GetMapEntry() { + // Map entry: parse key/value and store as mapEntryValue + nested := g.parseMessageValue(v, nestedMsg) + var mapKey string + var mapVal interface{} + // Determine if map key is numeric (needs quoting in JSON) + var keyIsNumeric bool + for _, f := range nestedMsg.Field { + if f.GetNumber() == 1 { // key field + switch f.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + keyIsNumeric = true + } + } + } + for _, e := range nested { + if e.key == "key" { + mapKey = fmt.Sprintf("%v", e.value) + } else if e.key == "value" { + mapVal = e.value + } + } + if keyIsNumeric { + mapKey = fmt.Sprintf("\"%s\"", mapKey) + } + result = append(result, customOption{key: fieldName, value: mapEntryValue{key: mapKey, value: mapVal}}) + } else { + nested := g.parseMessageValue(v, nestedMsg) + result = append(result, customOption{key: fieldName, value: nested}) + } + } + data = data[n:] + default: + switch typ { + case protowire.VarintType: + _, n = protowire.ConsumeVarint(data) + case protowire.Fixed64Type: + n = 8 + case protowire.BytesType: + _, n = protowire.ConsumeBytes(data) + case protowire.Fixed32Type: + n = 4 + } + data = data[n:] + } + } + return mergeRepeatedOptions(result) +} + +func (g *generator) getCustomMethodOptions(opts *descriptorpb.MethodOptions) []customOption { + if opts == nil { + return nil + } + extensionMap := g.buildExtensionMap(".google.protobuf.MethodOptions") + return g.parseCustomOptions(opts.ProtoReflect().GetUnknown(), extensionMap) +} + +func (g *generator) getCustomMessageOptions(opts *descriptorpb.MessageOptions) []customOption { + if opts == nil { + return nil + } + extensionMap := g.buildExtensionMap(".google.protobuf.MessageOptions") + return g.parseCustomOptions(opts.ProtoReflect().GetUnknown(), extensionMap) +} + +func (g *generator) getCustomFieldOptions(opts *descriptorpb.FieldOptions) []customOption { + if opts == nil { + return nil + } + extensionMap := g.buildExtensionMap(".google.protobuf.FieldOptions") + return g.parseCustomOptions(opts.ProtoReflect().GetUnknown(), extensionMap) +} + +func (g *generator) getCustomServiceOptions(opts *descriptorpb.ServiceOptions) []customOption { + if opts == nil { + return nil + } + extensionMap := g.buildExtensionMap(".google.protobuf.ServiceOptions") + return g.parseCustomOptions(opts.ProtoReflect().GetUnknown(), extensionMap) +} + +// formatCustomOptions formats custom options as a TypeScript object literal +func formatCustomOptions(opts []customOption) string { + if len(opts) == 0 { + return "{}" + } + + var parts []string + // Options are already in wire order (field number order) + for _, opt := range opts { + var valueStr string + switch val := opt.value.(type) { + case string: + escaped := strings.ReplaceAll(val, `\`, `\\`) + escaped = strings.ReplaceAll(escaped, `"`, `\"`) + escaped = strings.ReplaceAll(escaped, "\n", `\n`) + escaped = strings.ReplaceAll(escaped, "\r", `\r`) + escaped = strings.ReplaceAll(escaped, "\t", `\t`) + valueStr = fmt.Sprintf("\"%s\"", escaped) + case bool: + valueStr = fmt.Sprintf("%t", val) + case int: + valueStr = fmt.Sprintf("%d", val) + case float64: + valueStr = formatFloatJS(val) + case []customOption: + valueStr = formatCustomOptions(val) + case []interface{}: + valueStr = formatCustomOptionArray(val) + default: + valueStr = fmt.Sprintf("%v", val) + } + keyStr := opt.key + if strings.Contains(opt.key, ".") || (len(opt.key) > 0 && opt.key[0] >= '0' && opt.key[0] <= '9') { + keyStr = fmt.Sprintf("\"%s\"", opt.key) + } + parts = append(parts, fmt.Sprintf("%s: %s", keyStr, valueStr)) + } + + return "{ " + strings.Join(parts, ", ") + " }" +} + +// isArrayIndex returns true if s is a canonical JS array index (non-negative integer < 2^32-1). +// These keys are enumerated first by Object.entries() in ascending numeric order. +var wrapperTypeNames = map[string]bool{ + "DoubleValue": true, "FloatValue": true, + "Int64Value": true, "UInt64Value": true, + "Int32Value": true, "UInt32Value": true, + "BoolValue": true, "StringValue": true, "BytesValue": true, +} + +func isWrapperTypeName(name string) bool { + return wrapperTypeNames[name] +} + +func isArrayIndex(s string) bool { + if len(s) == 0 || len(s) > 10 { + return false + } + if s == "0" { + return true + } + if s[0] < '1' || s[0] > '9' { + return false + } + for i := 1; i < len(s); i++ { + if s[i] < '0' || s[i] > '9' { + return false + } + } + v, err := strconv.ParseUint(s, 10, 64) + if err != nil { + return false + } + return v < (1<<32 - 1) +} + +// formatFloatJS formats a float64 the way JavaScript's Number.prototype.toString() does: +// scientific notation for |v| < 1e-6 or |v| >= 1e21, fixed-point otherwise. +func formatFloatJS(v float64) string { + if v == 0 { + return "0" + } + // JavaScript uses fixed-point for 1e-6 <= |v| < 1e21 + abs := v + if abs < 0 { + abs = -abs + } + if abs < 1e-6 || abs >= 1e21 { + // Use Go 'e' format then adjust to JS style: + // Go: 1e-20 → "1e-20", 1.23e-15 → "1.23e-15" — these match JS + // But Go uses lowercase 'e' which JS also does, and Go omits '+' for negative exp. + // For positive exponent, JS uses 'e+', Go 'e' format also does. + s := strconv.FormatFloat(v, 'e', -1, 64) + // Go's 'e' format uses e+00 / e-00 with at least 2 digits for exponent, + // but JS uses minimal digits. Remove leading zeros from exponent. + if idx := strings.Index(s, "e"); idx >= 0 { + expPart := s[idx+1:] // e.g. "+021" or "-020" or "+07" + sign := expPart[0] // '+' or '-' + digits := strings.TrimLeft(expPart[1:], "0") + if digits == "" { + digits = "0" + } + if sign == '-' { + s = s[:idx] + "e-" + digits + } else { + s = s[:idx] + "e+" + digits + } + } + return s + } + return strconv.FormatFloat(v, 'f', -1, 64) +} + +// formatCustomOptionArray formats a []interface{} as a TypeScript array literal +func formatCustomOptionArray(vals []interface{}) string { + var elems []string + for _, v := range vals { + switch val := v.(type) { + case string: + escaped := strings.ReplaceAll(val, `\`, `\\`) + escaped = strings.ReplaceAll(escaped, `"`, `\"`) + escaped = strings.ReplaceAll(escaped, "\n", `\n`) + escaped = strings.ReplaceAll(escaped, "\r", `\r`) + escaped = strings.ReplaceAll(escaped, "\t", `\t`) + elems = append(elems, fmt.Sprintf("\"%s\"", escaped)) + case bool: + elems = append(elems, fmt.Sprintf("%t", val)) + case int: + elems = append(elems, fmt.Sprintf("%d", val)) + case float64: + elems = append(elems, formatFloatJS(val)) + case []customOption: + elems = append(elems, formatCustomOptions(val)) + default: + elems = append(elems, fmt.Sprintf("%v", v)) + } + } + return "[" + strings.Join(elems, ", ") + "]" +} + +// getLeadingDetachedComments retrieves leading detached comments for a given path in SourceCodeInfo +// Leading detached comments are comments separated from the element by a blank line +func (g *generator) getLeadingDetachedComments(path []int32) []string { + if g.file.SourceCodeInfo == nil { + return nil + } + for _, loc := range g.file.SourceCodeInfo.Location { + if len(loc.Path) != len(path) { + continue + } + match := true + for i := range path { + if loc.Path[i] != path[i] { + match = false + break + } + } + if match && len(loc.LeadingDetachedComments) > 0 { + var result []string + for _, comment := range loc.LeadingDetachedComments { + // Process each detached comment + // Strip one leading space from each line (protobuf convention) + lines := strings.Split(comment, "\n") + for i, line := range lines { + if line == "" { + lines[i] = "" + } else if strings.HasPrefix(line, " ") { + lines[i] = line[1:] + } else { + lines[i] = line + } + } + result = append(result, strings.Join(lines, "\n")) + } + return result + } + } + return nil +} + +// getLeadingComments retrieves leading comments for a given path in SourceCodeInfo. +// Returns (comment, true) when a comment exists (even if whitespace-only), +// or ("", false) when no comment is present. +func (g *generator) getLeadingComments(path []int32) (string, bool) { + if g.file.SourceCodeInfo == nil { + return "", false + } + for _, loc := range g.file.SourceCodeInfo.Location { + if len(loc.Path) != len(path) { + continue + } + match := true + for i := range path { + if loc.Path[i] != path[i] { + match = false + break + } + } + if match && loc.LeadingComments != nil { + comment := *loc.LeadingComments + // A single empty comment line (just "//\n") has no content + if comment == "\n" { + return "", false + } + // Multiple empty comment lines (all newlines) — preserve as empty JSDoc lines + if strings.TrimRight(comment, "\n") == "" { + return comment[:len(comment)-1], true + } + // Count trailing blank lines before trimming + rawLines := strings.Split(comment, "\n") + trailingBlanks := 0 + if len(rawLines) > 0 && rawLines[len(rawLines)-1] == "" { + foundNonBlank := false + for i := len(rawLines) - 2; i >= 0; i-- { + if strings.TrimSpace(rawLines[i]) == "" { + trailingBlanks++ + } else { + foundNonBlank = true + break + } + } + if !foundNonBlank { + trailingBlanks = 0 // entire comment is blank/whitespace + } + } + + // Don't trim the start - we need to preserve leading empty lines + comment = strings.TrimRight(comment, " \t\n") + // Strip one leading space from each line (protobuf convention) + lines := strings.Split(comment, "\n") + for i, line := range lines { + line = strings.TrimRight(line, " \t") + if line == "" { + lines[i] = "" // Keep empty for blank comment lines + } else if strings.HasPrefix(line, " ") { + lines[i] = line[1:] + } else { + lines[i] = line + } + } + result := strings.Join(lines, "\n") + // Add trailing blank lines: extras preserved in text, last one as marker + if trailingBlanks > 0 { + for i := 0; i < trailingBlanks-1; i++ { + result += "\n" + } + result += "\n__HAS_TRAILING_BLANK__" + } + return result, true + } + } + return "", false +} + +// getTrailingComments retrieves trailing comments for a given path in SourceCodeInfo +func (g *generator) getTrailingComments(path []int32) string { + if g.file.SourceCodeInfo == nil { + return "" + } + for _, loc := range g.file.SourceCodeInfo.Location { + if len(loc.Path) != len(path) { + continue + } + match := true + for i := range path { + if loc.Path[i] != path[i] { + match = false + break + } + } + if match && loc.TrailingComments != nil { + comment := *loc.TrailingComments + comment = strings.TrimRight(comment, "\n") + // Strip one leading space from each line (protobuf convention) + lines := strings.Split(comment, "\n") + for i, line := range lines { + if strings.HasPrefix(line, " ") { + lines[i] = line[1:] + } + } + return strings.Join(lines, "\n") + } + } + return "" +} + +// getEnumTrailingComments retrieves trailing comments for an enum, preserving trailing blank info +func (g *generator) getEnumTrailingComments(path []int32) string { + if g.file.SourceCodeInfo == nil { + return "" + } + for _, loc := range g.file.SourceCodeInfo.Location { + if len(loc.Path) != len(path) { + continue + } + match := true + for i := range path { + if loc.Path[i] != path[i] { + match = false + break + } + } + if match && loc.TrailingComments != nil { + comment := *loc.TrailingComments + // Count trailing blank lines before trimming + rawLines := strings.Split(comment, "\n") + trailingBlanks := 0 + if len(rawLines) > 0 && rawLines[len(rawLines)-1] == "" { + foundNonBlank := false + for i := len(rawLines) - 2; i >= 0; i-- { + if strings.TrimSpace(rawLines[i]) == "" { + trailingBlanks++ + } else { + foundNonBlank = true + break + } + } + if !foundNonBlank { + trailingBlanks = 0 + } + } + comment = strings.TrimRight(comment, " \t\n") + lines := strings.Split(comment, "\n") + for i, line := range lines { + line = strings.TrimRight(line, " \t") + if line == "" { + lines[i] = "" + } else if strings.HasPrefix(line, " ") { + lines[i] = line[1:] + } else { + lines[i] = line + } + } + result := strings.Join(lines, "\n") + if trailingBlanks > 0 { + for i := 0; i < trailingBlanks-1; i++ { + result += "\n" + } + result += "\n__HAS_TRAILING_BLANK__" + } + return result + } + } + return "" +} + +// detectTypeNameCollisions scans all messages and enums to detect naming collisions +// and assigns numeric suffixes ($1, $2, etc.) where needed +func (g *generator) detectTypeNameCollisions() { + // Map from TypeScript name to list of full proto names that generate it + tsNameToProtoNames := make(map[string][]string) + + // Collect all type names (messages and enums) + for _, msg := range g.file.MessageType { + g.collectMessageTypeNames(msg, "", "", tsNameToProtoNames) + } + for _, enum := range g.file.EnumType { + g.collectEnumTypeNames(enum, "", "", tsNameToProtoNames) + } + + // Assign numeric suffixes for collisions + for _, protoNames := range tsNameToProtoNames { + if len(protoNames) > 1 { + // Collision detected! Assign numeric suffixes + // First occurrence gets 0 (no suffix), subsequent get 1, 2, 3, etc. + for i, protoName := range protoNames { + if i == 0 { + g.typeNameSuffixes[protoName] = 0 // No suffix + } else { + g.typeNameSuffixes[protoName] = i // $1, $2, etc. + } + } + } + } +} + +// collectMessageTypeNames recursively collects all message type names +func (g *generator) collectMessageTypeNames(msg *descriptorpb.DescriptorProto, parentPrefix string, protoParentPrefix string, tsNameToProtoNames map[string][]string) { + // Skip map entry messages + if msg.Options != nil && msg.GetOptions().GetMapEntry() { + return + } + + baseName := msg.GetName() + escapedName := baseName + if parentPrefix == "" { + escapedName = escapeTypescriptKeyword(baseName) + } + + tsName := parentPrefix + escapedName + protoName := protoParentPrefix + baseName + + // Build full proto name for tracking + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + protoName + + // Add to map + tsNameToProtoNames[tsName] = append(tsNameToProtoNames[tsName], fullProtoName) + + // Recurse into nested messages - use unescaped baseName for prefix + // (matching generateMessageInterface which uses parentPrefix + baseName + "_") + nestedPrefix := parentPrefix + baseName + "_" + for _, nested := range msg.NestedType { + g.collectMessageTypeNames(nested, nestedPrefix, protoName + ".", tsNameToProtoNames) + } + + // Recurse into nested enums + for _, enum := range msg.EnumType { + g.collectEnumTypeNames(enum, nestedPrefix, protoName + ".", tsNameToProtoNames) + } +} + +// collectEnumTypeNames recursively collects all enum type names +func (g *generator) collectEnumTypeNames(enum *descriptorpb.EnumDescriptorProto, parentPrefix string, protoParentPrefix string, tsNameToProtoNames map[string][]string) { + baseName := enum.GetName() + escapedName := baseName + if parentPrefix == "" { + escapedName = escapeTypescriptKeyword(baseName) + } + + tsName := parentPrefix + escapedName + protoName := protoParentPrefix + baseName + + // Build full proto name for tracking + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + protoName + + // Add to map + tsNameToProtoNames[tsName] = append(tsNameToProtoNames[tsName], fullProtoName) +} + + +func generateFile(file *descriptorpb.FileDescriptorProto, allFiles []*descriptorpb.FileDescriptorProto, params params, isImportedByService bool) string { + // Skip files that have no messages, enums, or services (e.g., files with only extension definitions) + if len(file.MessageType) == 0 && len(file.EnumType) == 0 && len(file.Service) == 0 { + return "" + } + + g := &generator{ + params: params, + file: file, + allFiles: allFiles, + isImportedByService: isImportedByService, + importedTypeNames: make(map[string]bool), + typeNameSuffixes: make(map[string]int), + localTypeNames: make(map[string]bool), + importAliases: make(map[string]string), + rawImportNames: make(map[string]string), + wireTypeRef: "WireType", + messageTypeRef: "MessageType", + serviceTypeRef: "ServiceType", + unknownFieldHandlerRef: "UnknownFieldHandler", + partialMessageRef: "PartialMessage", + binaryReadOptionsRef: "BinaryReadOptions", + binaryWriteOptionsRef: "BinaryWriteOptions", + iBinaryReaderRef: "IBinaryReader", + iBinaryWriterRef: "IBinaryWriter", + reflectionMergePartialRef: "reflectionMergePartial", + scalarTypeRef: "ScalarType", + longTypeRef: "LongType", + pbLongRef: "PbLong", + typeofJsonValueRef: "typeofJsonValue", + isJsonObjectRef: "isJsonObject", + jsonObjectRef: "JsonObject", + jsonValueRef: "JsonValue", + jsonReadOptionsRef: "JsonReadOptions", + jsonWriteOptionsRef: "jsonWriteOptions", + jsonWriteOptionsTypeRef: "JsonWriteOptions", + iMessageTypeRef: "IMessageType", + lowerCamelCaseRef: "lowerCamelCase", + stackInterceptRef: "stackIntercept", + rpcTransportRef: "RpcTransport", + serviceInfoRef: "ServiceInfo", + } + + // Detect type name collisions and assign numeric suffixes + g.detectTypeNameCollisions() + + // Header + g.pNoIndent("// @generated by protobuf-ts 2.11.1 with parameter long_type_string") + pkgComment := "" + syntax := file.GetSyntax() + if syntax == "" { + syntax = "proto2" // Default to proto2 when syntax is not specified + } + if file.Package != nil && *file.Package != "" { + pkgComment = fmt.Sprintf(" (package \"%s\", syntax %s)", *file.Package, syntax) + } else { + pkgComment = fmt.Sprintf(" (syntax %s)", syntax) + } + g.pNoIndent("// @generated from protobuf file \"%s\"%s", file.GetName(), pkgComment) + g.pNoIndent("// tslint:disable") + // Add file-level deprecation comment if the entire file is deprecated + if g.isFileDeprecated() { + g.pNoIndent("// @deprecated") + } + + // Add file-level leading detached comments (license headers, etc.) + // These are typically attached to the syntax declaration (field 12) + if file.SourceCodeInfo != nil { + for _, loc := range file.SourceCodeInfo.Location { + // Check for syntax field with detached comments + if len(loc.Path) == 1 && loc.Path[0] == 12 && len(loc.LeadingDetachedComments) > 0 { + // Blank line before the license header + g.pNoIndent("//") + for blockIdx, detached := range loc.LeadingDetachedComments { + // Don't use TrimSpace - it removes trailing newlines which represent blank // lines + // Just check if the comment has any non-whitespace content + if strings.TrimRight(detached, "\n") != "" { + // Split by newline (keeping trailing empty strings for blank lines) + lines := strings.Split(detached, "\n") + // Check if last line is empty (trailing newline case) + hasTrailingNewline := len(lines) > 0 && lines[len(lines)-1] == "" + // Output all lines except the trailing empty one (we'll handle it separately) + endIdx := len(lines) + if hasTrailingNewline { + endIdx = len(lines) - 1 + } + for i := 0; i < endIdx; i++ { + line := lines[i] + if line == "" { + g.pNoIndent("//") + } else { + g.pNoIndent("//%s", line) + } + } + // If block has trailing newline, output it + if hasTrailingNewline { + g.pNoIndent("//") + } + // Add // separator between blocks (not after last block) + if blockIdx < len(loc.LeadingDetachedComments)-1 { + g.pNoIndent("//") + } + } + } + } + } + } + + // Add package-level leading detached comments (path [2]) + if file.SourceCodeInfo != nil { + for _, loc := range file.SourceCodeInfo.Location { + if len(loc.Path) == 1 && loc.Path[0] == 2 && len(loc.LeadingDetachedComments) > 0 { + g.pNoIndent("//") + for blockIdx, detached := range loc.LeadingDetachedComments { + if strings.TrimRight(detached, "\n") != "" { + lines := strings.Split(detached, "\n") + hasTrailingNewline := len(lines) > 0 && lines[len(lines)-1] == "" + endIdx := len(lines) + if hasTrailingNewline { + endIdx = len(lines) - 1 + } + for i := 0; i < endIdx; i++ { + line := lines[i] + if line == "" { + g.pNoIndent("//") + } else { + g.pNoIndent("//%s", line) + } + } + if hasTrailingNewline { + g.pNoIndent("//") + } + if blockIdx < len(loc.LeadingDetachedComments)-1 { + g.pNoIndent("//") + } + } + } + } + } + } + + // Collect imports needed + imports := g.collectImports(file) + + // Write imports + g.writeImports(imports) + + // Output file-level leading detached comments (from first message) + if len(file.MessageType) > 0 { + firstMsgPath := []int32{4, 0} + detachedComments := g.getLeadingDetachedComments(firstMsgPath) + for blockIdx, comment := range detachedComments { + // Trim trailing newline (it will be represented by // separator or blank line) + comment = strings.TrimRight(comment, "\n") + // Split by newline and output each line + for _, line := range strings.Split(comment, "\n") { + if line == "" { + g.pNoIndent("// ") + } else { + g.pNoIndent("// %s", line) + } + } + // Add empty line separator between blocks (not after last block) + if blockIdx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + // Blank line after all blocks + if len(detachedComments) > 0 { + g.pNoIndent("") + } + } + + // Generate message interfaces (with nested types/enums) + for msgIdx, msg := range file.MessageType { + g.generateMessageInterface(msg, "", "", []int32{4, int32(msgIdx)}) + } + + // Generate top-level enums + for enumIdx, enum := range file.EnumType { + g.generateEnum(enum, "", "", []int32{5, int32(enumIdx)}) + } + + // Generate message implementation classes + for _, msg := range file.MessageType { + g.generateMessageClass(msg, "", "") + } + + // Generate services + for _, svc := range file.Service { + g.generateService(svc) + } + + return g.b.String() +} + +func (g *generator) collectUsedTypes() (map[string]bool, []string) { + usedInMessages := make(map[string]bool) + usedInServices := make(map[string]bool) + var messageFieldTypes []string + var serviceTypes []string + + // Scan all messages for field types + // Process in forward declaration order (same as proto file order) + // Then reverse the list to match TypeScript plugin's prepend behavior + var scanMessage func(*descriptorpb.DescriptorProto) + scanMessage = func(msg *descriptorpb.DescriptorProto) { + // Process fields in declaration order (not field number order) + // protobuf-ts processes fields in declaration order, then prepends imports + for _, field := range msg.Field { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM { + typeName := field.GetTypeName() + // For map fields, register the value type (not the entry type) + // at the position of the map field itself + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + entryMsg := g.findMessageType(typeName) + if entryMsg != nil && entryMsg.Options != nil && entryMsg.GetOptions().GetMapEntry() { + // Extract the value field (field number 2) type + for _, entryField := range entryMsg.Field { + if entryField.GetNumber() == 2 && + (entryField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE || + entryField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM) { + valType := entryField.GetTypeName() + if !usedInMessages[valType] { + usedInMessages[valType] = true + messageFieldTypes = append(messageFieldTypes, valType) + } + } + } + continue + } + } + if !usedInMessages[typeName] { + usedInMessages[typeName] = true + messageFieldTypes = append(messageFieldTypes, typeName) + } + } + } + for _, nested := range msg.NestedType { + // Skip map entry messages — their value types are handled inline above + if nested.Options != nil && nested.GetOptions().GetMapEntry() { + continue + } + scanMessage(nested) + } + } + + // Process messages in forward order + for i := 0; i < len(g.file.MessageType); i++ { + scanMessage(g.file.MessageType[i]) + } + + // Scan services for method input/output types (in forward method order for imports) + for _, service := range g.file.Service { + for i := 0; i < len(service.Method); i++ { + method := service.Method[i] + // Add output type first (matches protobuf-ts ordering within each method) + outputType := method.GetOutputType() + if outputType != "" && !usedInServices[outputType] { + usedInServices[outputType] = true + serviceTypes = append(serviceTypes, outputType) + } + // Add input type second + inputType := method.GetInputType() + if inputType != "" && !usedInServices[inputType] { + usedInServices[inputType] = true + serviceTypes = append(serviceTypes, inputType) + } + } + } + + // Reverse messageFieldTypes to match TypeScript plugin's prepend behavior + // TypeScript plugin adds imports at the top (prepends), so last encountered appears first + for i, j := 0, len(messageFieldTypes)-1; i < j; i, j = i+1, j-1 { + messageFieldTypes[i], messageFieldTypes[j] = messageFieldTypes[j], messageFieldTypes[i] + } + + // For service-only files, reverse per-method-pair order to match protobuf-ts's + // prepend semantics: later methods appear first, but within each method + // output stays above input. Collect per-method type pairs, reverse the pairs, + // then flatten. For files with messages, keep forward order (output, input). + if len(g.file.MessageType) == 0 && len(serviceTypes) > 0 { + // Re-collect as per-method pairs so we can reverse method order + var methodPairs [][]string + usedInServices2 := make(map[string]bool) + for _, service := range g.file.Service { + for i := 0; i < len(service.Method); i++ { + method := service.Method[i] + var pair []string + outputType := method.GetOutputType() + if outputType != "" && !usedInServices2[outputType] { + usedInServices2[outputType] = true + pair = append(pair, outputType) + } + inputType := method.GetInputType() + if inputType != "" && !usedInServices2[inputType] { + usedInServices2[inputType] = true + pair = append(pair, inputType) + } + if len(pair) > 0 { + methodPairs = append(methodPairs, pair) + } + } + } + // Reverse method pair order (last method's types appear first) + for i, j := 0, len(methodPairs)-1; i < j; i, j = i+1, j-1 { + methodPairs[i], methodPairs[j] = methodPairs[j], methodPairs[i] + } + serviceTypes = nil + for _, pair := range methodPairs { + serviceTypes = append(serviceTypes, pair...) + } + } + + // Build final ordered list: + // 1. Service-only types (not used in message fields) - these go BEFORE ServiceType + // 2. Message field types (even if also used in services) - these go AFTER runtime imports + var orderedTypes []string + used := make(map[string]bool) + + // First add service-only types + for _, typeName := range serviceTypes { + if !usedInMessages[typeName] { + orderedTypes = append(orderedTypes, typeName) + used[typeName] = true + } + } + + // Then add message field types in reversed order (to match TypeScript prepend) + for _, typeName := range messageFieldTypes { + if !used[typeName] { + orderedTypes = append(orderedTypes, typeName) + used[typeName] = true + } + } + + return used, orderedTypes +} + +func (g *generator) collectImports(file *descriptorpb.FileDescriptorProto) map[string]bool { + imports := make(map[string]bool) + + // Always need runtime imports for messages + if len(file.MessageType) > 0 { + imports["@protobuf-ts/runtime"] = true + } + + // Check for dependencies (other proto files) + for _, dep := range file.Dependency { + if strings.Contains(dep, "google/protobuf/") { + // Well-known types + imports["./" + strings.TrimSuffix(filepath.Base(dep), ".proto")] = true + } else { + imports["./" + strings.TrimSuffix(filepath.Base(dep), ".proto")] = true + } + } + + return imports +} + +func (g *generator) writeImports(imports map[string]bool) { + // Collect local type names for collision detection + g.collectLocalTypeNames() + + // Collect used types - service-only types first, then message field types + usedTypes, orderedTypes := g.collectUsedTypes() + + // Build a map from dependency path to file for quick lookup + depFiles := make(map[string]*descriptorpb.FileDescriptorProto) + currentFileDir := filepath.Dir(g.file.GetName()) + + for _, dep := range g.file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil { + // Compute relative path from current file to dependency + depPath := strings.TrimSuffix(dep, ".proto") + relPath := g.getRelativeImportPath(currentFileDir, depPath) + depFiles[relPath] = depFile + } + } + // Include files transitively reachable via import public + for _, pubFile := range g.collectTransitivePublicDeps(g.file) { + depPath := strings.TrimSuffix(pubFile.GetName(), ".proto") + relPath := g.getRelativeImportPath(currentFileDir, depPath) + if _, exists := depFiles[relPath]; !exists { + depFiles[relPath] = pubFile + } + } + + // Pre-compute import aliases for types that collide + g.precomputeImportAliases(depFiles) + + // Helper to generate import statement for a type + generateImport := func(typeName string) string { + if !usedTypes[typeName] { + return "" + } + + // Find which dependency this type belongs to + typeNameStripped := strings.TrimPrefix(typeName, ".") + var matchedDepFile *descriptorpb.FileDescriptorProto + var matchedImportPath string + + // First, find all files matching the package + var candidateFiles []*struct { + file *descriptorpb.FileDescriptorProto + path string + } + for importPath, depFile := range depFiles { + depPkg := "" + if depFile.Package != nil { + depPkg = *depFile.Package + } + if depPkg == "" || strings.HasPrefix(typeNameStripped, depPkg+".") { + candidateFiles = append(candidateFiles, &struct { + file *descriptorpb.FileDescriptorProto + path string + }{depFile, importPath}) + } + } + + if len(candidateFiles) == 0 { + return "" + } + + // If multiple files have the same package, we need to find which one contains the type + if len(candidateFiles) == 1 { + matchedDepFile = candidateFiles[0].file + matchedImportPath = candidateFiles[0].path + } else { + // Check each candidate to find which one contains the type + for _, candidate := range candidateFiles { + depPkg := "" + if candidate.file.Package != nil { + depPkg = *candidate.file.Package + } + var remainder string + if depPkg != "" { + remainder = strings.TrimPrefix(typeNameStripped, depPkg+".") + } else { + remainder = typeNameStripped + } + parts := strings.Split(remainder, ".") + + if findTypeInDescriptors(candidate.file.MessageType, candidate.file.EnumType, parts) { + matchedDepFile = candidate.file + matchedImportPath = candidate.path + break + } + } + } + + if matchedDepFile == nil { + return "" + } + + // Extract the type from this dependency + depPkg := "" + if matchedDepFile.Package != nil { + depPkg = *matchedDepFile.Package + } + var remainder string + if depPkg != "" { + remainder = strings.TrimPrefix(typeNameStripped, depPkg+".") + } else { + remainder = typeNameStripped + } + parts := strings.Split(remainder, ".") + + var importStmt string + var importedName string + + if findTypeInDescriptors(matchedDepFile.MessageType, matchedDepFile.EnumType, parts) { + if len(parts) == 1 { + importedName = escapeTypescriptKeyword(parts[0]) + } else { + importedName = strings.Join(parts, "_") + } + importStmt = fmt.Sprintf("import { %s } from \"%s\";", importedName, matchedImportPath) + } + + // Use pre-computed alias if this type has a name collision + if importedName != "" { + if alias, ok := g.importAliases[typeName]; ok { + importStmt = fmt.Sprintf("import { %s as %s } from \"%s\";", importedName, alias, matchedImportPath) + g.importedTypeNames[alias] = true + } else { + g.importedTypeNames[importedName] = true + } + } + + return importStmt + } + + // Determine which types are service-only (imported before ServiceType) + // vs message-field types (imported after MessageType) + usedInMessages := make(map[string]bool) + var scanMessage func(*descriptorpb.DescriptorProto) + scanMessage = func(msg *descriptorpb.DescriptorProto) { + for _, field := range msg.Field { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM { + usedInMessages[field.GetTypeName()] = true + } + } + for _, nested := range msg.NestedType { + scanMessage(nested) + } + } + for _, msg := range g.file.MessageType { + scanMessage(msg) + } + + // Phase 1: Import service-only external types (before ServiceType) + seenImports := make(map[string]bool) + for _, typeName := range orderedTypes { + // Skip if used in messages (will be imported later) + if usedInMessages[typeName] { + continue + } + // Skip if it's defined in the current file (not external) + if g.isLocalType(typeName) { + continue + } + importStmt := generateImport(typeName) + if importStmt != "" && !seenImports[importStmt] { + g.pNoIndent("%s", importStmt) + seenImports[importStmt] = true + } + } + + // Check if we need ServiceType import + needsServiceType := len(g.file.Service) > 0 + + // Check if service comes before messages in the file + // The WireType import position depends on source order in certain cases + serviceBeforeMessages := false + if needsServiceType && len(g.file.MessageType) > 0 { + // Service is field 6, MessageType is field 4 in FileDescriptorProto + // Check source code info to see which appears first + if g.file.SourceCodeInfo != nil { + firstServiceLine := int32(999999) + firstMessageLine := int32(999999) + + // First pass: find service and message line numbers + messageLines := make(map[int]int32) + + for _, loc := range g.file.SourceCodeInfo.Location { + // Service definition: path [6, index] + if len(loc.Path) >= 2 && loc.Path[0] == 6 && loc.Span != nil && len(loc.Span) > 0 { + if loc.Span[0] < firstServiceLine { + firstServiceLine = loc.Span[0] + } + } + // Message definition: path [4, index] + if len(loc.Path) == 2 && loc.Path[0] == 4 && loc.Span != nil && len(loc.Span) > 0 { + msgIdx := int(loc.Path[1]) + msgLine := loc.Span[0] + messageLines[msgIdx] = msgLine + if msgLine < firstMessageLine { + firstMessageLine = msgLine + } + } + } + + // Second pass: determine which messages are before the service + messagesBeforeService := make(map[int]bool) + for msgIdx, msgLine := range messageLines { + messagesBeforeService[msgIdx] = msgLine < firstServiceLine + } + + // WireType comes right after ServiceType if: + // 1. Service comes before the first message AND file has many messages (>10), OR + // 2. All messages before the service have zero actual fields (are truly empty) + if firstServiceLine < firstMessageLine && len(g.file.MessageType) > 10 { + serviceBeforeMessages = true + } else { + // Check if all messages before service are empty + allBeforeAreEmpty := true + countBefore := 0 + for msgIdx, beforeService := range messagesBeforeService { + if beforeService { + countBefore++ + if msgIdx < len(g.file.MessageType) { + msg := g.file.MessageType[msgIdx] + // Count actual fields (skip reserved, skip map entry messages) + hasActualFields := false + if msg.Options == nil || !msg.GetOptions().GetMapEntry() { + for _, field := range msg.Field { + // Skip GROUP type fields + if field.GetType() != descriptorpb.FieldDescriptorProto_TYPE_GROUP { + hasActualFields = true + break + } + } + } + if hasActualFields { + allBeforeAreEmpty = false + break + } + } + } + } + serviceBeforeMessages = allBeforeAreEmpty && countBefore > 0 + } + } + } + + // Check if this is google.protobuf.Timestamp, Duration, FieldMask, Struct, or Any for special imports + isTimestamp := false + isDuration := false + isFieldMask := false + isStruct := false + isAny := false + isWrapper := false // For Int32Value, StringValue, etc. + wrapperNeedsScalarType := false // Only Int64Value/UInt64Value need ScalarType+LongType + isGoogleTypeDateTime := false + if g.file.Package != nil && *g.file.Package == "google.type" { + for _, msg := range g.file.MessageType { + if msg.GetName() == "DateTime" { + isGoogleTypeDateTime = true + } + } + } + if g.file.Package != nil && *g.file.Package == "google.protobuf" { + for _, msg := range g.file.MessageType { + name := msg.GetName() + if name == "Timestamp" { + isTimestamp = true + } else if name == "Duration" { + isDuration = true + } else if name == "FieldMask" { + isFieldMask = true + } else if name == "Struct" || name == "Value" || name == "ListValue" { + isStruct = true + } else if name == "Any" { + isAny = true + } else if isWrapperTypeName(name) { + isWrapper = true + if name == "Int64Value" || name == "UInt64Value" { + wrapperNeedsScalarType = true + } + } + } + } + + // Import ServiceType if needed (before Phase 2 imports) + if needsServiceType { + if g.serviceTypeRef == "ServiceType$" { + g.pNoIndent("import { ServiceType as ServiceType$ } from \"@protobuf-ts/runtime-rpc\";") + } else { + g.pNoIndent("import { ServiceType } from \"@protobuf-ts/runtime-rpc\";") + } + } + + // Phase 2: Standard runtime imports if we have messages + if len(g.file.MessageType) > 0 { + // Check if any message (including nested) has actual fields (not just GROUP fields) + hasAnyFields := false + var checkMessageForFields func(*descriptorpb.DescriptorProto) bool + checkMessageForFields = func(msg *descriptorpb.DescriptorProto) bool { + // Skip map entry messages + if msg.Options != nil && msg.GetOptions().GetMapEntry() { + return false + } + // Check direct fields + for _, field := range msg.Field { + if field.GetType() != descriptorpb.FieldDescriptorProto_TYPE_GROUP { + return true + } + } + // Check nested messages + for _, nested := range msg.NestedType { + if checkMessageForFields(nested) { + return true + } + } + return false + } + + for _, msg := range g.file.MessageType { + if checkMessageForFields(msg) { + hasAnyFields = true + break + } + } + + // Determine if WireType comes early: + // 1. File has service AND service comes before messages + // 2. File has NO service BUT is imported by a service file in the same batch + // 3. File has NO service AND first message is empty (no actual fields) + wireTypeEarly := false + wireTypeVeryLate := false // After UnknownFieldHandler + // Check if first message is empty (no actual fields) + firstMessageEmpty := false + if len(g.file.MessageType) > 0 { + firstMsg := g.file.MessageType[0] + actualFieldCount := 0 + for _, field := range firstMsg.Field { + if field.GetType() != descriptorpb.FieldDescriptorProto_TYPE_GROUP { + actualFieldCount++ + } + } + firstMessageEmpty = actualFieldCount == 0 + } + if needsServiceType { + wireTypeEarly = serviceBeforeMessages || firstMessageEmpty + } else { + wireTypeEarly = firstMessageEmpty + } + + // WireType goes after UnknownFieldHandler ("very late") when the first message's + // InternalBinaryRead registers WireType (via scalarRepeated for repeated numeric/enum + // fields). This happens when the first message with fields has at least one repeated + // scalar/enum field that is not string/bytes. + firstMsgHasRepeatedNumeric := false + if len(g.file.MessageType) > 0 { + firstMsg := g.file.MessageType[0] + for _, field := range firstMsg.Field { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + ft := field.GetType() + if ft != descriptorpb.FieldDescriptorProto_TYPE_STRING && + ft != descriptorpb.FieldDescriptorProto_TYPE_BYTES && + ft != descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + firstMsgHasRepeatedNumeric = true + break + } + } + } + } + if firstMsgHasRepeatedNumeric { + wireTypeVeryLate = true + wireTypeEarly = false + } + + // Skip method-related imports when optimize_for = CODE_SIZE + if !g.isOptimizeCodeSize() { + // ScalarType/LongType for wrappers - at top when no collision, deferred when either is aliased + if wrapperNeedsScalarType && g.scalarTypeRef == "ScalarType" && g.longTypeRef == "LongType" { + g.pNoIndent("import { ScalarType } from \"@protobuf-ts/runtime\";") + g.pNoIndent("import { LongType } from \"@protobuf-ts/runtime\";") + } + if hasAnyFields && wireTypeEarly { + g.pNoIndent("import { WireType%s } from \"@protobuf-ts/runtime\";", g.wireTypeImportAlias()) + } + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.binaryWriteOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.iBinaryWriterImport()) + if hasAnyFields && !wireTypeEarly && !wireTypeVeryLate { + g.pNoIndent("import { WireType%s } from \"@protobuf-ts/runtime\";", g.wireTypeImportAlias()) + } + // For Any, BinaryReadOptions comes later with JSON imports + if !isAny { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.binaryReadOptionsImport()) } + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.iBinaryReaderImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.unknownFieldHandlerImport()) + if hasAnyFields && wireTypeVeryLate { + g.pNoIndent("import { WireType%s } from \"@protobuf-ts/runtime\";", g.wireTypeImportAlias()) + } + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.partialMessageImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.reflectionMergePartialImport()) + } + // ScalarType for wrappers (after reflectionMergePartial, only when aliased) + if wrapperNeedsScalarType && (g.scalarTypeRef == "ScalarType$" || g.longTypeRef == "LongType$") { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.scalarTypeImport()) + } + + // Add JSON imports for Timestamp + if isTimestamp { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.typeofJsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.pbLongImport()) + } + + // Add JSON imports for Duration + if isDuration { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.typeofJsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.pbLongImport()) + } + + // Add PbLong import for google.type.DateTime + if isGoogleTypeDateTime { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.pbLongImport())} + + // Add JSON imports for FieldMask + if isFieldMask { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.typeofJsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.lowerCamelCaseImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + } + + // Add JSON imports for Struct + if isStruct { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.isJsonObjectImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.typeofJsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonObjectImport()) } + + // Add JSON imports for wrapper types + if isWrapper { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + } + // LongType for wrappers (after wrapper JSON imports, only when either ScalarType or LongType is aliased) + if wrapperNeedsScalarType && (g.scalarTypeRef == "ScalarType$" || g.longTypeRef == "LongType$") { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.longTypeImport()) + } + + // Add JSON imports for Any + if isAny { + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.isJsonObjectImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.typeofJsonValueImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonValueImport()) + g.pNoIndent("import { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.jsonWriteOptionsTypeImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.binaryReadOptionsImport()) + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime\";", g.iMessageTypeImport()) + } + if g.messageTypeRef == "MessageType$" { + g.pNoIndent("import { MessageType as MessageType$ } from \"@protobuf-ts/runtime\";") + } else { + g.pNoIndent("import { MessageType } from \"@protobuf-ts/runtime\";") + } + } + + // Phase 3: Import message field types and types used in both services and messages + for _, typeName := range orderedTypes { + // Skip if already imported (service-only) + importStmt := generateImport(typeName) + if importStmt == "" || seenImports[importStmt] { + continue + } + // Skip local types + if g.isLocalType(typeName) { + continue + } + g.pNoIndent("%s", importStmt) + seenImports[importStmt] = true + } +} + +func (g *generator) isLocalType(typeName string) bool { + // Check if the type is defined in the current file + typeNameStripped := strings.TrimPrefix(typeName, ".") + currentPkg := "" + if g.file.Package != nil { + currentPkg = *g.file.Package + } + + // If it doesn't start with current package, it's not local + if !strings.HasPrefix(typeNameStripped, currentPkg+".") { + return false + } + + // Extract just the type name without package + localName := strings.TrimPrefix(typeNameStripped, currentPkg+".") + parts := strings.Split(localName, ".") + + // Check if it's a top-level message or enum + for _, msg := range g.file.MessageType { + if msg.GetName() == parts[0] { + return true + } + } + for _, enum := range g.file.EnumType { + if enum.GetName() == parts[0] { + return true + } + } + + return false +} + +// collectLocalTypeNames populates g.localTypeNames with all TS names +// that are defined locally in this file (messages, enums, including nested). +// This is used to detect import name collisions. +func (g *generator) collectLocalTypeNames() { + var collectMsg func(msg *descriptorpb.DescriptorProto, tsPrefix string, protoPrefix string) + collectMsg = func(msg *descriptorpb.DescriptorProto, tsPrefix string, protoPrefix string) { + baseName := msg.GetName() + tsName := tsPrefix + baseName + if tsPrefix == "" { + tsName = escapeTypescriptKeyword(baseName) + } + protoName := protoPrefix + baseName + // Check for typeNameSuffixes + fullProtoName := "" + if g.file.Package != nil && *g.file.Package != "" { + fullProtoName = *g.file.Package + "." + protoName + } else { + fullProtoName = protoName + } + if suffix, exists := g.typeNameSuffixes[fullProtoName]; exists && suffix > 0 { + tsName = tsName + fmt.Sprintf("$%d", suffix) + } + g.localTypeNames[tsName] = true + nestedTsPrefix := tsPrefix + baseName + "_" + nestedProtoPrefix := protoName + "." + for _, nested := range msg.NestedType { + collectMsg(nested, nestedTsPrefix, nestedProtoPrefix) + } + for _, enum := range msg.EnumType { + enumName := tsPrefix + baseName + "_" + enum.GetName() + g.localTypeNames[enumName] = true + } + } + for _, msg := range g.file.MessageType { + collectMsg(msg, "", "") + } + for _, enum := range g.file.EnumType { + tsName := escapeTypescriptKeyword(enum.GetName()) + g.localTypeNames[tsName] = true + } + // Detect runtime WireType collision + if g.localTypeNames["WireType"] { + g.wireTypeRef = "WireType$" + } + // Detect runtime MessageType collision + if g.localTypeNames["MessageType"] { + g.messageTypeRef = "MessageType$" + } + // Detect runtime-rpc ServiceType collision + if g.localTypeNames["ServiceType"] { + g.serviceTypeRef = "ServiceType$" + } + // Detect runtime UnknownFieldHandler collision + if g.localTypeNames["UnknownFieldHandler"] { + g.unknownFieldHandlerRef = "UnknownFieldHandler$" + } + // Detect runtime PartialMessage collision + if g.localTypeNames["PartialMessage"] { + g.partialMessageRef = "PartialMessage$" + } + // Detect runtime BinaryReadOptions collision + if g.localTypeNames["BinaryReadOptions"] { + g.binaryReadOptionsRef = "BinaryReadOptions$" + } + // Detect runtime BinaryWriteOptions collision + if g.localTypeNames["BinaryWriteOptions"] { + g.binaryWriteOptionsRef = "BinaryWriteOptions$" + } + // Detect runtime IBinaryReader collision + if g.localTypeNames["IBinaryReader"] { + g.iBinaryReaderRef = "IBinaryReader$" + } + // Detect runtime IBinaryWriter collision + if g.localTypeNames["IBinaryWriter"] { + g.iBinaryWriterRef = "IBinaryWriter$" + } + // Detect runtime reflectionMergePartial collision + if g.localTypeNames["reflectionMergePartial"] { + g.reflectionMergePartialRef = "reflectionMergePartial$" + } + // Detect runtime ScalarType collision + if g.localTypeNames["ScalarType"] { + g.scalarTypeRef = "ScalarType$" + } + // Detect runtime LongType collision + if g.localTypeNames["LongType"] { + g.longTypeRef = "LongType$" + } + // Detect runtime PbLong collision + if g.localTypeNames["PbLong"] { + g.pbLongRef = "PbLong$" + } + // Detect runtime typeofJsonValue collision + if g.localTypeNames["typeofJsonValue"] { + g.typeofJsonValueRef = "typeofJsonValue$" + } + // Detect runtime JsonValue collision + if g.localTypeNames["JsonValue"] { + g.jsonValueRef = "JsonValue$" + } + // Detect runtime isJsonObject collision + if g.localTypeNames["isJsonObject"] { + g.isJsonObjectRef = "isJsonObject$" + } + // Detect runtime JsonObject collision + if g.localTypeNames["JsonObject"] { + g.jsonObjectRef = "JsonObject$" + } + // Detect runtime jsonWriteOptions collision + if g.localTypeNames["jsonWriteOptions"] { + g.jsonWriteOptionsRef = "jsonWriteOptions$" + } + // Detect runtime JsonWriteOptions type collision + if g.localTypeNames["JsonWriteOptions"] { + g.jsonWriteOptionsTypeRef = "JsonWriteOptions$" + } + // Detect runtime IMessageType collision + if g.localTypeNames["IMessageType"] { + g.iMessageTypeRef = "IMessageType$" + } + // Detect runtime JsonReadOptions collision + if g.localTypeNames["JsonReadOptions"] { + g.jsonReadOptionsRef = "JsonReadOptions$" + } + // Detect runtime lowerCamelCase collision + if g.localTypeNames["lowerCamelCase"] { + g.lowerCamelCaseRef = "lowerCamelCase$" + } +} + +// computeImportedTSName computes the TS name that would be used when importing +// a proto type from a dependency file. This is the "raw" name without alias resolution. +func (g *generator) computeImportedTSName(typeName string, depFiles map[string]*descriptorpb.FileDescriptorProto) string { + typeNameStripped := strings.TrimPrefix(typeName, ".") + for _, depFile := range depFiles { + depPkg := "" + if depFile.Package != nil { + depPkg = *depFile.Package + } + if depPkg == "" || strings.HasPrefix(typeNameStripped, depPkg+".") { + var remainder string + if depPkg != "" { + remainder = strings.TrimPrefix(typeNameStripped, depPkg+".") + } else { + remainder = typeNameStripped + } + parts := strings.Split(remainder, ".") + if findTypeInDescriptors(depFile.MessageType, depFile.EnumType, parts) { + if len(parts) == 1 { + return escapeTypescriptKeyword(parts[0]) + } + return strings.Join(parts, "_") + } + } + } + return "" +} + +// precomputeImportAliases detects import name collisions and pre-populates +// g.importAliases. Types are scanned in protobuf-ts registration order +// (input first, output second per method), which determines which type +// keeps the original name and which gets the '$' suffix alias. +func (g *generator) precomputeImportAliases(depFiles map[string]*descriptorpb.FileDescriptorProto) { + type typeInfo struct { + protoName string + tsName string + } + + var regOrder []typeInfo + seen := make(map[string]bool) + + // Service types: input first, output second per method (protobuf-ts registration order) + for _, service := range g.file.Service { + for _, method := range service.Method { + inputType := method.GetInputType() + if inputType != "" && !seen[inputType] && !g.isLocalType(inputType) { + seen[inputType] = true + tsName := g.computeImportedTSName(inputType, depFiles) + if tsName != "" { + regOrder = append(regOrder, typeInfo{inputType, tsName}) + } + } + outputType := method.GetOutputType() + if outputType != "" && !seen[outputType] && !g.isLocalType(outputType) { + seen[outputType] = true + tsName := g.computeImportedTSName(outputType, depFiles) + if tsName != "" { + regOrder = append(regOrder, typeInfo{outputType, tsName}) + } + } + } + } + + // Message field types in forward field number order + var scanMsg func(*descriptorpb.DescriptorProto) + scanMsg = func(msg *descriptorpb.DescriptorProto) { + for _, field := range msg.Field { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM { + typeName := field.GetTypeName() + if !seen[typeName] && !g.isLocalType(typeName) { + seen[typeName] = true + tsName := g.computeImportedTSName(typeName, depFiles) + if tsName != "" { + regOrder = append(regOrder, typeInfo{typeName, tsName}) + } + } + } + } + for _, nested := range msg.NestedType { + scanMsg(nested) + } + } + for _, msg := range g.file.MessageType { + scanMsg(msg) + } + + // Detect collisions: first type to claim a name wins + claimed := make(map[string]string) // tsName → first proto type that claimed it + for _, info := range regOrder { + if g.localTypeNames[info.tsName] { + // Collision with local type — alias this imported type + taken := make(map[string]bool) + for k := range g.localTypeNames { + taken[k] = true + } + for _, v := range g.importAliases { + taken[v] = true + } + alias := info.tsName + "$" + for counter := 2; taken[alias]; counter++ { + alias = info.tsName + "$" + strconv.Itoa(counter) + } + g.importAliases[info.protoName] = alias + g.rawImportNames[info.protoName] = info.tsName + } else if existing, ok := claimed[info.tsName]; ok && existing != info.protoName { + // Collision with another imported type + taken := make(map[string]bool) + for k := range g.localTypeNames { + taken[k] = true + } + for _, v := range claimed { + taken[v] = true + } + for _, v := range g.importAliases { + taken[v] = true + } + alias := info.tsName + "$" + for counter := 2; taken[alias]; counter++ { + alias = info.tsName + "$" + strconv.Itoa(counter) + } + g.importAliases[info.protoName] = alias + g.rawImportNames[info.protoName] = info.tsName + } else if !ok { + claimed[info.tsName] = info.protoName + } + } +} + +// formatTypeImport returns the import clause for a proto type, handling aliases. +// For aliased types: "OrigName as Alias" +// For normal types: "TypeName" +func (g *generator) formatTypeImport(protoTypeName string) string { + if alias, ok := g.importAliases[protoTypeName]; ok { + rawName := g.rawImportNames[protoTypeName] + return rawName + " as " + alias + } + return g.stripPackage(protoTypeName) +} + +func (g *generator) getRelativeImportPath(fromDir, toPath string) string { + // If fromDir is empty (file at root), use simple ./ path + if fromDir == "" || fromDir == "." { + return "./" + toPath + } + + // Handle same directory + if fromDir == filepath.Dir(toPath) { + return "./" + filepath.Base(toPath) + } + + // Handle parent directory navigation + fromParts := []string{} + if fromDir != "" { + fromParts = strings.Split(fromDir, "/") + } + toParts := strings.Split(toPath, "/") + + // Find common prefix length + commonLen := 0 + minLen := len(fromParts) + if len(toParts) < minLen { + minLen = len(toParts) + } + for i := 0; i < minLen; i++ { + if fromParts[i] == toParts[i] { + commonLen++ + } else { + break + } + } + + // Build relative path + upCount := len(fromParts) - commonLen + var result []string + for i := 0; i < upCount; i++ { + result = append(result, "..") + } + for i := commonLen; i < len(toParts); i++ { + result = append(result, toParts[i]) + } + + if len(result) == 0 { + return "./" + filepath.Base(toPath) + } + + // Don't use ./ prefix when going up directories + if upCount > 0 { + return strings.Join(result, "/") + } + + return "./" + strings.Join(result, "/") +} + +func (g *generator) getImportPathForType(fullTypeName string) string { + // fullTypeName starts with . (e.g., .lib.Void, .quirks.v1.TypesRequest) + typeNameStripped := strings.TrimPrefix(fullTypeName, ".") + + // Helper to check if a type is defined in a file + typeInFile := func(file *descriptorpb.FileDescriptorProto, typeName string) bool { + pkg := "" + if file.Package != nil { + pkg = *file.Package + } + + // Type must be in this file's package + if pkg != "" && !strings.HasPrefix(typeName, pkg+".") { + return false + } + + // Strip package to get the type parts + var parts []string + if pkg == "" { + parts = strings.Split(typeName, ".") + } else { + parts = strings.Split(strings.TrimPrefix(typeName, pkg+"."), ".") + } + + // Check top-level messages + for _, msg := range file.MessageType { + if msg.GetName() == parts[0] { + if len(parts) == 1 { + return true + } + // Check nested types + return g.typeInMessage(msg, parts[1:]) + } + } + + // Check top-level enums + for _, enum := range file.EnumType { + if enum.GetName() == parts[0] && len(parts) == 1 { + return true + } + } + + return false + } + + // Check dependencies first + currentFileDir := filepath.Dir(g.file.GetName()) + for _, dep := range g.file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil && typeInFile(depFile, typeNameStripped) { + depPath := strings.TrimSuffix(dep, ".proto") + return g.getRelativeImportPath(currentFileDir, depPath) + } + } + + // Check files transitively reachable via import public + for _, pubFile := range g.collectTransitivePublicDeps(g.file) { + if typeInFile(pubFile, typeNameStripped) { + depPath := strings.TrimSuffix(pubFile.GetName(), ".proto") + return g.getRelativeImportPath(currentFileDir, depPath) + } + } + + // Check current file + if typeInFile(g.file, typeNameStripped) { + return "./" + strings.TrimSuffix(filepath.Base(g.file.GetName()), ".proto") + } + + // Default to current file (should not happen) + return "./" + strings.TrimSuffix(filepath.Base(g.file.GetName()), ".proto") +} + +// typeInMessage checks if a nested type path exists in a message +func (g *generator) typeInMessage(msg *descriptorpb.DescriptorProto, parts []string) bool { + if len(parts) == 0 { + return false + } + + // Check nested messages + for _, nested := range msg.NestedType { + if nested.GetName() == parts[0] { + if len(parts) == 1 { + return true + } + return g.typeInMessage(nested, parts[1:]) + } + } + + // Check nested enums + for _, enum := range msg.EnumType { + if enum.GetName() == parts[0] && len(parts) == 1 { + return true + } + } + + return false +} + +func (g *generator) findFileByName(name string) *descriptorpb.FileDescriptorProto { + for _, f := range g.allFiles { + if f.GetName() == name { + return f + } + } + return nil +} + +// collectTransitivePublicDeps returns all files transitively reachable via +// `import public` from the given file's direct dependencies. +func (g *generator) collectTransitivePublicDeps(file *descriptorpb.FileDescriptorProto) []*descriptorpb.FileDescriptorProto { + seen := make(map[string]bool) + var result []*descriptorpb.FileDescriptorProto + for _, dep := range file.Dependency { + seen[dep] = true + } + var walk func(f *descriptorpb.FileDescriptorProto) + walk = func(f *descriptorpb.FileDescriptorProto) { + for _, idx := range f.PublicDependency { + if int(idx) < len(f.Dependency) { + pubDep := f.Dependency[idx] + if !seen[pubDep] { + seen[pubDep] = true + pubFile := g.findFileByName(pubDep) + if pubFile != nil { + result = append(result, pubFile) + walk(pubFile) + } + } + } + } + } + for _, dep := range file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil { + walk(depFile) + } + } + return result +} + +func (g *generator) generateMessageInterface(msg *descriptorpb.DescriptorProto, parentPrefix string, protoParentPrefix string, msgPath []int32) { + // Skip map entry messages + if msg.Options != nil && msg.GetOptions().GetMapEntry() { + return + } + + baseName := msg.GetName() + // Only escape top-level types (nested types don't need escaping) + escapedName := baseName + if parentPrefix == "" { + escapedName = escapeTypescriptKeyword(baseName) + } + fullName := parentPrefix + escapedName + // For @generated comment, use original name not escaped + protoName := protoParentPrefix + baseName + + // Check if this type has a collision suffix + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + protoName + if suffix, exists := g.typeNameSuffixes[fullProtoName]; exists && suffix > 0 { + fullName = fullName + fmt.Sprintf("$%d", suffix) + } + + // Output message-level detached comments (comments between messages) + // Skip for first message - those are output as file-level comments after imports + isFirstMessage := len(msgPath) == 2 && msgPath[0] == 4 && msgPath[1] == 0 + if len(msgPath) > 0 && !isFirstMessage { + detachedComments := g.getLeadingDetachedComments(msgPath) + if len(detachedComments) > 0 { + // Output detached comments as // style BEFORE message JSDoc + for idx, detached := range detachedComments { + // Trim trailing newline (it will be represented by blank line or separator) + detached = strings.TrimRight(detached, "\n") + // Split by newline and output each line + for _, line := range strings.Split(detached, "\n") { + if line == "" { + // For message-level: blank lines within blocks are "// " (with space) + g.pNoIndent("// ") + } else { + g.pNoIndent("// %s", line) + } + } + // Add separator after detached comment block (except for last block) + // For message-level: separator is a blank line (not "//") + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + // Add blank line after all detached comments, before JSDoc + g.pNoIndent("") + } + } + + // Message interface first + g.pNoIndent("/**") + + // Add leading and trailing comments if available (msgPath should point to this message) + if len(msgPath) > 0 { + leadingComments, hasLeading := g.getLeadingComments(msgPath) + trailingComments := g.getEnumTrailingComments(msgPath) + + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + // Add separator blank line(s) + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + } + + // Add @deprecated if message has deprecated option OR file is deprecated + if (msg.Options != nil && msg.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") + } + + g.pNoIndent(" * @generated from protobuf message %s%s", pkgPrefix, protoName) + g.pNoIndent(" */") + g.pNoIndent("export interface %s {", fullName) + + // Track which oneofs have been generated + generatedOneofs := make(map[int32]bool) + + // Track if we've generated the first field (for detached comment handling) + firstFieldGenerated := false + + // Generate fields in field number order + // When we encounter a field that's part of a oneof, generate the entire oneof at that point + for fieldIdx, field := range msg.Field { + // Skip GROUP type fields - they're deprecated and handled as nested messages + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + + var fieldPath []int32 + if len(msgPath) > 0 { + fieldPath = append(msgPath, 2, int32(fieldIdx)) + } + + if field.OneofIndex != nil { + // This field is part of a oneof + oneofIdx := field.GetOneofIndex() + oneofProtoName := msg.OneofDecl[oneofIdx].GetName() + + // Check if this is a proto3 optional (synthetic oneof) + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + if isProto3Optional { + // Proto3 optional field - treat as regular optional field + g.generateField(field, fullName, fieldPath, firstFieldGenerated) + firstFieldGenerated = true + } else { + // Real oneof - only generate once (when we encounter its first field) + if !generatedOneofs[oneofIdx] { + generatedOneofs[oneofIdx] = true + + // Collect all fields for this oneof (skip group fields) + var oneofFields []*descriptorpb.FieldDescriptorProto + for _, f := range msg.Field { + if f.OneofIndex != nil && f.GetOneofIndex() == oneofIdx && f.GetType() != descriptorpb.FieldDescriptorProto_TYPE_GROUP { + oneofFields = append(oneofFields, f) + } + } + + // Convert oneof name to camelCase + oneofCamelName := g.toCamelCase(oneofProtoName) + + // Escape reserved property names + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + + g.generateOneofField(oneofCamelName, oneofProtoName, oneofFields, msg, msgPath, oneofIdx) + firstFieldGenerated = true + } + } + } else { + // Regular field + g.generateField(field, fullName, fieldPath, firstFieldGenerated) + firstFieldGenerated = true + } + } + + g.pNoIndent("}") + + // Generate nested message interfaces first + for nestedIdx, nested := range msg.NestedType { + nestedPath := append(msgPath, 3, int32(nestedIdx)) + // Build TypeScript prefix by appending baseName with underscore + // Build proto prefix by appending baseName with dot + g.generateMessageInterface(nested, parentPrefix + baseName + "_", protoName + ".", nestedPath) + } + + // Generate nested enums after nested messages + for enumIdx, nested := range msg.EnumType { + // Build path for nested enum: msgPath + field 4 (enum_type) + index + var enumPath []int32 + if len(msgPath) > 0 { + enumPath = append([]int32{}, msgPath...) + enumPath = append(enumPath, 4, int32(enumIdx)) + } + // Build TypeScript prefix by appending baseName with underscore + // Build proto prefix by appending baseName with dot + g.generateEnum(nested, parentPrefix + baseName + "_", protoName + ".", enumPath) + } +} + +func (g *generator) generateMessageClass(msg *descriptorpb.DescriptorProto, parentPrefix string, protoParentPrefix string) { + // Skip map entry messages + if msg.Options != nil && msg.GetOptions().GetMapEntry() { + return + } + + baseName := msg.GetName() + // Only escape top-level types (nested types don't need escaping) + escapedName := baseName + if parentPrefix == "" { + escapedName = escapeTypescriptKeyword(baseName) + } + fullName := parentPrefix + escapedName + protoName := protoParentPrefix + baseName + + // Check if this type has a collision suffix + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + protoName + if suffix, exists := g.typeNameSuffixes[fullProtoName]; exists && suffix > 0 { + fullName = fullName + fmt.Sprintf("$%d", suffix) + } + + // Message type class + g.generateMessageTypeClass(msg, fullName, protoName) + + // Generate nested message classes + for _, nested := range msg.NestedType { + // Build TypeScript prefix by appending baseName with underscore + // Build proto prefix by appending baseName with dot + g.generateMessageClass(nested, parentPrefix + baseName + "_", protoName + ".") + } +} + +func (g *generator) generateField(field *descriptorpb.FieldDescriptorProto, msgName string, fieldPath []int32, isNotFirstField bool) { + g.indent = " " + + // Add leading detached comments (always as // style before JSDoc) + if len(fieldPath) > 0 { + detachedComments := g.getLeadingDetachedComments(fieldPath) + if len(detachedComments) > 0 { + // Output detached comments as // style BEFORE JSDoc + for idx, detached := range detachedComments { + // Trim trailing newline (it will be represented by blank line or separator) + detached = strings.TrimRight(detached, "\n") + // Split by newline and output each line + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.p("// ") + } else { + g.p("// %s", line) + } + } + // Add blank line separator after detached comment block (except for last block) + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + // Add blank line after all detached comments, before JSDoc + g.pNoIndent("") + } + } + + g.p("/**") + + // Add leading comments if fieldPath is provided + hasLeadingComments := false + hasTrailingBlankInComment := false + if len(fieldPath) > 0 { + leadingComments, hasLeading := g.getLeadingComments(fieldPath) + // Check if comment had trailing blank line + if strings.HasSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") { + hasTrailingBlankInComment = true + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + if hasLeading { + hasLeadingComments = true + for _, line := range strings.Split(leadingComments, "\n") { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + } + } + + // Add blank lines before @generated + // If comment had trailing blank, add that blank line + if hasTrailingBlankInComment { + g.p(" *") + } + // Add standard blank line before @generated (if we had any comments) + if hasLeadingComments { + g.p(" *") + } + + // Build the @generated comment line + protoType := g.getProtoType(field) + fieldName := field.GetName() + fieldNumber := field.GetNumber() + + optionsAnnotation := g.formatFieldOptionsAnnotation(field) + + // Check if field is deprecated OR file is deprecated + fieldIsDeprecated := field.Options != nil && field.GetOptions().GetDeprecated() + // Add @deprecated tag for both field-level and file-level deprecation + if fieldIsDeprecated || g.isFileDeprecated() { + g.p(" * @deprecated") + } + + g.p(" * @generated from protobuf field: %s %s = %d%s", protoType, fieldName, fieldNumber, optionsAnnotation) + g.p(" */") + + fieldName = g.propertyName(field) + + // Get trailing comments if fieldPath is provided + trailingComment := "" + var trailingCommentExtraLines []string + if len(fieldPath) > 0 { + tc := g.getTrailingComments(fieldPath) + if tc != "" { + lines := strings.Split(tc, "\n") + trailingComment = " // " + lines[0] + for _, extra := range lines[1:] { + trailingCommentExtraLines = append(trailingCommentExtraLines, extra) + } + } + } + + // Check if it's a repeated field + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + // Check if it's a map field + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // Map field - multiline format + keyField := msgType.Field[0] + valueField := msgType.Field[1] + keyType := g.getTypescriptTypeForMapKey(keyField) + valueType := g.getBaseTypescriptType(valueField) + g.p("%s: {", fieldName) + g.indent = " " + g.p("[key: %s]: %s;", keyType, valueType) + g.indent = " " + g.p("};%s", trailingComment) + } else { + // Regular repeated field + baseType := g.getBaseTypescriptType(field) + g.p("%s: %s[];%s", fieldName, baseType, trailingComment) + } + } else { + // Singular field + fieldType := g.getBaseTypescriptType(field) + optional := "" + // Mark as optional if: + // 1. Proto2 optional (syntax is proto2 AND label is OPTIONAL) + // 2. Proto3 message (messages are always optional) + // 3. Proto3 explicit optional scalar (proto3_optional = true) + isProto2 := g.file.GetSyntax() == "proto2" || g.file.GetSyntax() == "" + if field.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REQUIRED { + // Proto2 required message fields are still optional in TS (no zero value) + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + optional = "?" + } + } else if isProto2 && field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL { + // Proto2 optional scalar or message + optional = "?" + } else if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + // Proto3 message (implicitly optional) + optional = "?" + } else if field.Proto3Optional != nil && *field.Proto3Optional { + // Proto3 explicit optional scalar + optional = "?" + } + } + g.p("%s%s: %s;%s", fieldName, optional, fieldType, trailingComment) + } + // Output extra trailing comment lines (multiline trailing comments) + for _, extra := range trailingCommentExtraLines { + g.p("// %s", extra) + } + + g.indent = "" +} + +func (g *generator) generateOneofField(oneofCamelName string, oneofProtoName string, fields []*descriptorpb.FieldDescriptorProto, msg *descriptorpb.DescriptorProto, msgPath []int32, oneofIndex int32) { + g.indent = " " + + // Get oneof leading comment + oneofPath := append(append([]int32{}, msgPath...), 8, oneofIndex) + oneofLeadingComments, hasOneofLeading := g.getLeadingComments(oneofPath) + + // Add leading detached comments (as // style before JSDoc) + detachedComments := g.getLeadingDetachedComments(oneofPath) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.p("// ") + } else { + g.p("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + + // Generate oneof JSDoc + g.p("/**") + + // Add leading comments if present + if hasOneofLeading { + hasTrailingBlank := strings.HasSuffix(oneofLeadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + oneofLeadingComments = strings.TrimSuffix(oneofLeadingComments, "\n__HAS_TRAILING_BLANK__") + } + for _, line := range strings.Split(oneofLeadingComments, "\n") { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + // Oneof trailing comment goes into the oneof JSDoc (before @generated) + oneofTrailingComment := g.getTrailingComments(oneofPath) + if oneofTrailingComment != "" { + for _, line := range strings.Split(oneofTrailingComment, "\n") { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + g.p(" *") + } + // Add @deprecated if file is deprecated + if g.isFileDeprecated() { + g.p(" * @deprecated") + } + g.p(" * @generated from protobuf oneof: %s", oneofProtoName) + g.p(" */") + g.p("%s: {", oneofCamelName) + + // Generate each alternative + for i, field := range fields { + g.indent = " " + fieldJsonName := g.propertyName(field) + g.p("oneofKind: \"%s\";", fieldJsonName) + + // Get field index in message + var fieldIndex int32 + for idx, f := range msg.Field { + if f.GetNumber() == field.GetNumber() { + fieldIndex = int32(idx) + break + } + } + + // Get field leading comment + fieldPath := append(append([]int32{}, msgPath...), 2, fieldIndex) + fieldLeadingComments, hasFieldLeading := g.getLeadingComments(fieldPath) + + // Add detached comments for non-first oneof member fields as // style + if i > 0 { + fieldDetached := g.getLeadingDetachedComments(fieldPath) + if len(fieldDetached) > 0 { + for dIdx, detached := range fieldDetached { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.p("// ") + } else { + g.p("// %s", line) + } + } + if dIdx < len(fieldDetached)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + } + + // Generate field JSDoc + g.p("/**") + if hasFieldLeading { + hasTrailingBlank := strings.HasSuffix(fieldLeadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + fieldLeadingComments = strings.TrimSuffix(fieldLeadingComments, "\n__HAS_TRAILING_BLANK__") + } + for _, line := range strings.Split(fieldLeadingComments, "\n") { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + optionsAnnotation := g.formatFieldOptionsAnnotation(field) + // Check if field is deprecated + fieldIsDeprecated := field.Options != nil && field.GetOptions().GetDeprecated() + if fieldIsDeprecated || g.isFileDeprecated() { + g.p(" * @deprecated") + } + g.p(" * @generated from protobuf field: %s %s = %d%s", g.getProtoType(field), field.GetName(), field.GetNumber(), optionsAnnotation) + g.p(" */") + fieldType := g.getTypescriptType(field) + fieldTrailingComment := g.getTrailingComments(fieldPath) + if fieldTrailingComment != "" { + lines := strings.Split(fieldTrailingComment, "\n") + g.p("%s: %s; // %s", fieldJsonName, fieldType, lines[0]) + for _, extra := range lines[1:] { + g.p("// %s", extra) + } + } else { + g.p("%s: %s;", fieldJsonName, fieldType) + } + g.indent = " " + if i < len(fields)-1 { + g.p("} | {") + } + } + + // Add undefined alternative + g.p("} | {") + g.indent = " " + g.p("oneofKind: undefined;") + g.indent = " " + g.p("};") + g.indent = "" +} + +// propertyName returns the TypeScript property name for a field +// This does camelCase conversion where all letters after underscores are capitalized +// Reserved object properties (__proto__, toString) and the oneofKind discriminator get $ suffix +func (g *generator) propertyName(field *descriptorpb.FieldDescriptorProto) string { + name := field.GetName() + camelName := g.toCamelCase(name) + + // Escape reserved object properties and oneofKind discriminator + if camelName == "__proto__" || camelName == "toString" || camelName == "oneofKind" { + return camelName + "$" + } + + return camelName +} + +// needsLocalName returns true if the field's TypeScript property name differs +// from the default camelCase conversion (i.e., it was escaped) +func (g *generator) needsLocalName(field *descriptorpb.FieldDescriptorProto) bool { + name := field.GetName() + camelName := g.toCamelCase(name) + return camelName == "__proto__" || camelName == "toString" || camelName == "oneofKind" +} + +// toCamelCase converts a snake_case name to camelCase +func (g *generator) toCamelCase(name string) string { + // Convert snake_case to camelCase: capitalize all letters after underscores + parts := strings.Split(name, "_") + startsWithUnderscore := len(name) > 0 && name[0] == '_' + + for i := 1; i < len(parts); i++ { + if len(parts[i]) > 0 { + parts[i] = strings.ToUpper(parts[i][:1]) + parts[i][1:] + } + } + result := strings.Join(parts, "") + + // Special handling: if a lowercase letter follows a digit, capitalize it + // Example: "int32s" becomes "int32S" in "fInt32S" + runes := []rune(result) + for i := 1; i < len(runes); i++ { + if runes[i] >= 'a' && runes[i] <= 'z' && runes[i-1] >= '0' && runes[i-1] <= '9' { + runes[i] = runes[i] - 'a' + 'A' + } + } + result = string(runes) + + // If name started with underscore, capitalize the first letter + // Otherwise, lowercase the first letter + if len(result) > 0 { + if startsWithUnderscore { + result = strings.ToUpper(result[:1]) + result[1:] + } else { + result = strings.ToLower(result[:1]) + result[1:] + } + } + return result +} + +// jsonName returns the jsonName for use in reflection metadata +// This uses protoc's JsonName which follows JSON naming conventions +func (g *generator) jsonName(field *descriptorpb.FieldDescriptorProto) string { + if field.JsonName != nil { + // Use the proto-provided JsonName as-is + return *field.JsonName + } + // Fallback: convert snake_case to camelCase (should not happen with protoc) + return g.propertyName(field) +} + +// protocGeneratedJsonName returns what protoc would auto-generate as the jsonName +// This follows protoc's rules: remove underscores, capitalize letter after underscore +func (g *generator) protocGeneratedJsonName(fieldName string) string { + var result strings.Builder + capitalizeNext := false + + for _, ch := range fieldName { + if ch == '_' { + capitalizeNext = true + continue + } + + // Capitalize the next letter (but not digit) after underscore + if capitalizeNext && ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')) { + if ch >= 'a' && ch <= 'z' { + result.WriteRune(ch - 'a' + 'A') + } else { + result.WriteRune(ch) + } + capitalizeNext = false + } else { + result.WriteRune(ch) + capitalizeNext = false + } + } + + return result.String() +} + +func (g *generator) getProtoType(field *descriptorpb.FieldDescriptorProto) string { + // Check if it's a map field + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED && + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // It's a map field + keyField := msgType.Field[0] + valueField := msgType.Field[1] + keyType := g.getProtoTypeSimple(keyField) + valueType := g.getProtoTypeSimple(valueField) + return fmt.Sprintf("map<%s, %s>", keyType, valueType) + } + } + + label := "" + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + label = "repeated " + } else if field.Proto3Optional != nil && *field.Proto3Optional { + // Proto3 explicit optional + label = "optional " + } else if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL { + // Only show "optional" for proto2 optional fields (not oneof members) + isProto2 := g.file.GetSyntax() == "proto2" || g.file.GetSyntax() == "" + if isProto2 && field.OneofIndex == nil { + label = "optional " + } + } else if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REQUIRED { + label = "required " + } + + typeName := "" + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + typeName = g.getProtoTypeName(field.GetTypeName()) + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + typeName = g.getProtoTypeName(field.GetTypeName()) + default: + typeName = strings.ToLower(field.GetType().String()[5:]) // Remove TYPE_ prefix + } + + return label + typeName +} + +func (g *generator) getProtoTypeSimple(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + return g.getProtoTypeName(field.GetTypeName()) + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return g.getProtoTypeName(field.GetTypeName()) + default: + return strings.ToLower(field.GetType().String()[5:]) // Remove TYPE_ prefix + } +} + +func (g *generator) getProtoTypeName(typeName string) string { + // Remove leading dot + typeName = strings.TrimPrefix(typeName, ".") + // Keep package prefix and convert nested types + return strings.ReplaceAll(typeName, ".", ".") +} + +func (g *generator) stripPackage(typeName string) string { + // Check if this type has an import alias (collision-resolved name) + if alias, ok := g.importAliases[typeName]; ok { + return alias + } + // Also check with leading dot stripped + dotPrefixed := "." + strings.TrimPrefix(typeName, ".") + if alias, ok := g.importAliases[dotPrefixed]; ok { + return alias + } + + // Remove leading dot + typeName = strings.TrimPrefix(typeName, ".") + + // Check if this is from the EXACT same package (not a sub-package) + if g.file.Package != nil && *g.file.Package != "" { + prefix := *g.file.Package + "." + if strings.HasPrefix(typeName, prefix) { + // Could be same package or sub-package + // Extract what comes after the package prefix + remainder := strings.TrimPrefix(typeName, prefix) + + // To distinguish between same-package types and sub-packages: + // - Check if the type is defined in this file + // - If it's defined here, it's a same-package type (possibly nested) + // - If not, it's a sub-package + parts := strings.Split(remainder, ".") + if len(parts) > 0 { + // Check if the first part is a top-level message/enum in this file + isInThisFile := false + firstPart := parts[0] + for _, msg := range g.file.MessageType { + if msg.GetName() == firstPart { + isInThisFile = true + break + } + } + if !isInThisFile { + for _, enum := range g.file.EnumType { + if enum.GetName() == firstPart { + isInThisFile = true + break + } + } + } + + if isInThisFile { + // It's a type defined in this file (possibly nested) + // Replace dots with underscores for nested types + result := strings.ReplaceAll(remainder, ".", "_") + // For top-level types, apply keyword escaping + if !strings.Contains(remainder, ".") { + result = escapeTypescriptKeyword(result) + } + + // Check if this type has a collision suffix + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + remainder + if suffix, exists := g.typeNameSuffixes[fullProtoName]; exists && suffix > 0 { + result = result + fmt.Sprintf("$%d", suffix) + } + + return result + } + } + // Otherwise it's a sub-package, fall through to handle as external type + } + } + + // Different package - need to strip package but keep message.nested structure + // e.g., api.v1.HealthCheckResponse.Status -> HealthCheckResponse_Status + // or auth.UserProfile -> UserProfile (if imported) + + // Find the source file for this type to get its actual package + remainder := "" + if srcPkg := g.findPackageForType(typeName); srcPkg != "" { + remainder = strings.TrimPrefix(typeName, srcPkg+".") + } else { + // No package (empty package) — the entire typeName is the type path + remainder = typeName + } + + if remainder == "" { + return typeName + } + + parts := strings.Split(remainder, ".") + if len(parts) > 1 { + return strings.Join(parts, "_") + } + + return escapeTypescriptKeyword(parts[0]) +} + +// findPackageForType returns the package name for a fully-qualified type name +// by searching all known files. Returns "" if the type has no package. +func (g *generator) findPackageForType(typeName string) string { + typeName = strings.TrimPrefix(typeName, ".") + + checkFile := func(file *descriptorpb.FileDescriptorProto) bool { + pkg := file.GetPackage() + var remainder string + if pkg != "" { + if !strings.HasPrefix(typeName, pkg+".") { + return false + } + remainder = strings.TrimPrefix(typeName, pkg+".") + } else { + remainder = typeName + } + parts := strings.Split(remainder, ".") + for _, msg := range file.MessageType { + if msg.GetName() == parts[0] { + return true + } + } + for _, enum := range file.EnumType { + if enum.GetName() == parts[0] { + return true + } + } + return false + } + + // Check current file + if checkFile(g.file) { + return g.file.GetPackage() + } + // Check all files + for _, f := range g.allFiles { + if checkFile(f) { + return f.GetPackage() + } + } + return "" +} + +// findTypeInDescriptors checks whether the dot-separated parts path +// (e.g. ["Outer","Middle","Inner","Deep"]) exists as a nested message or +// enum inside the given top-level lists. Intermediate parts must match +// messages; the final part may match a message or an enum. +func findTypeInDescriptors(messages []*descriptorpb.DescriptorProto, enums []*descriptorpb.EnumDescriptorProto, parts []string) bool { + if len(parts) == 0 { + return false + } + if len(parts) == 1 { + for _, msg := range messages { + if msg.GetName() == parts[0] { + return true + } + } + for _, enum := range enums { + if enum.GetName() == parts[0] { + return true + } + } + return false + } + for _, msg := range messages { + if msg.GetName() == parts[0] { + return findTypeInDescriptors(msg.NestedType, msg.EnumType, parts[1:]) + } + } + return false +} + +func (g *generator) getTypescriptType(field *descriptorpb.FieldDescriptorProto) string { + baseType := g.getBaseTypescriptType(field) + + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + // Check if it's a map + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // It's a map entry + keyField := msgType.Field[0] + valueField := msgType.Field[1] + keyType := g.getBaseTypescriptType(keyField) + valueType := g.getBaseTypescriptType(valueField) + return fmt.Sprintf("{\n [key: %s]: %s;\n }", keyType, valueType) + } + } + return baseType + "[]" + } + + return baseType +} + +func (g *generator) getTypescriptTypeForMapKey(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "string" + case descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "number" + case descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // 64-bit integers as map keys use the same type as regular fields + return "string" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + // Boolean map keys are converted to strings in JavaScript/TypeScript + // because object keys are always strings + return "string" + default: + return "string" + } +} + +func (g *generator) getReaderMethodForMapKey(field *descriptorpb.FieldDescriptorProto) string { + // Map keys are always strings in JavaScript/TypeScript objects + // Boolean keys need .toString() conversion + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_BOOL { + return "reader.bool().toString()" + } + // Other key types use the standard reader method + return g.getReaderMethod(field) +} + +func (g *generator) getBaseTypescriptType(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "number" + case descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "number" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "bigint" + } + // JS_STRING falls through to use longType + } + return "string" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "boolean" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "string" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "Uint8Array" + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + return g.stripPackage(field.GetTypeName()) + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return g.stripPackage(field.GetTypeName()) + default: + return "any" + } +} + +func isJsTypeNormal(field *descriptorpb.FieldDescriptorProto) bool { + return field.Options != nil && field.GetOptions().Jstype != nil && + field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL +} + +func is64BitIntType(field *descriptorpb.FieldDescriptorProto) bool { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return true + } + return false +} + +func (g *generator) findMessageType(typeName string) *descriptorpb.DescriptorProto { + typeName = strings.TrimPrefix(typeName, ".") + + // Search in current file + currentPkg := "" + if g.file.Package != nil && *g.file.Package != "" { + currentPkg = *g.file.Package + } + for _, msg := range g.file.MessageType { + if found := g.findMessageTypeInMessage(msg, typeName, currentPkg); found != nil { + return found + } + } + + // Search in dependencies + for _, dep := range g.file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil { + depPkg := "" + if depFile.Package != nil && *depFile.Package != "" { + depPkg = *depFile.Package + } + for _, msg := range depFile.MessageType { + if found := g.findMessageTypeInMessage(msg, typeName, depPkg); found != nil { + return found + } + } + } + } + + return nil +} + +func (g *generator) findEnumType(typeName string) *descriptorpb.EnumDescriptorProto { + typeName = strings.TrimPrefix(typeName, ".") + + // Search in current file top-level enums + for _, enum := range g.file.EnumType { + fullName := "" + if g.file.Package != nil && *g.file.Package != "" { + fullName = *g.file.Package + "." + } + fullName += enum.GetName() + if typeName == fullName { + return enum + } + } + + // Search in current file nested enums + currentPkg := "" + if g.file.Package != nil && *g.file.Package != "" { + currentPkg = *g.file.Package + } + for _, msg := range g.file.MessageType { + if found := g.findEnumTypeInMessage(msg, typeName, currentPkg); found != nil { + return found + } + } + + // Search in dependencies (direct + transitive public) + searchFiles := make([]*descriptorpb.FileDescriptorProto, 0) + for _, dep := range g.file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil { + searchFiles = append(searchFiles, depFile) + } + } + searchFiles = append(searchFiles, g.collectTransitivePublicDeps(g.file)...) + for _, depFile := range searchFiles { + depPkg := "" + if depFile.Package != nil && *depFile.Package != "" { + depPkg = *depFile.Package + } + + for _, enum := range depFile.EnumType { + fullName := "" + if depPkg != "" { + fullName = depPkg + "." + } + fullName += enum.GetName() + if typeName == fullName { + return enum + } + } + for _, msg := range depFile.MessageType { + prefix := depPkg + if found := g.findEnumTypeInMessage(msg, typeName, prefix); found != nil { + return found + } + } + } + + return nil +} + +func (g *generator) findEnumTypeInMessage(msg *descriptorpb.DescriptorProto, typeName string, prefix string) *descriptorpb.EnumDescriptorProto { + msgFullName := prefix + if msgFullName != "" { + msgFullName += "." + } + msgFullName += msg.GetName() + + // Check nested enums + for _, enum := range msg.EnumType { + fullName := msgFullName + "." + enum.GetName() + if typeName == fullName { + return enum + } + } + + // Search nested messages + for _, nested := range msg.NestedType { + if found := g.findEnumTypeInMessage(nested, typeName, msgFullName); found != nil { + return found + } + } + + return nil +} + +func (g *generator) findMessageTypeInMessage(msg *descriptorpb.DescriptorProto, typeName string, prefix string) *descriptorpb.DescriptorProto { + fullName := prefix + if fullName != "" { + fullName += "." + } + fullName += msg.GetName() + + // Check if current message matches + if typeName == fullName { + return msg + } + + // Search nested types + for _, nested := range msg.NestedType { + if found := g.findMessageTypeInMessage(nested, typeName, fullName); found != nil { + return found + } + } + + return nil +} + +// generateFieldDescriptor generates a single field descriptor in the MessageType constructor +// oneofName is the proto snake_case name - it will be converted to camelCase for the descriptor +func (g *generator) generateFieldDescriptor(field *descriptorpb.FieldDescriptorProto, oneofName string, comma string) { + kind := "scalar" + t := g.getScalarTypeEnum(field) + extraFields := "" + + // Convert oneof name to camelCase for use in field descriptor + oneofCamelName := "" + if oneofName != "" { + oneofCamelName = g.toCamelCase(oneofName) + // Escape reserved property names + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + } + + // Determine field kind and extra fields + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // Map field + kind = "map" + keyField := msgType.Field[0] + valueField := msgType.Field[1] + keyT := g.getScalarTypeEnum(keyField) + keyTypeName := g.getScalarTypeName(keyField) + if valueField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + extraFields = fmt.Sprintf(", K: %s /*ScalarType.%s*/, V: { kind: \"message\", T: () => %s }", keyT, keyTypeName, g.stripPackage(valueField.GetTypeName())) + } else if valueField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM { + valueTypeName := g.stripPackage(valueField.GetTypeName()) + valueFullTypeName := g.getProtoTypeName(valueField.GetTypeName()) + enumType := g.findEnumType(valueField.GetTypeName()) + enumPrefix := "" + if enumType != nil { + enumPrefix = g.detectEnumPrefix(enumType) + } + if enumPrefix != "" { + extraFields = fmt.Sprintf(", K: %s /*ScalarType.%s*/, V: { kind: \"enum\", T: () => [\"%s\", %s, \"%s\"] }", keyT, keyTypeName, valueFullTypeName, valueTypeName, enumPrefix) + } else { + extraFields = fmt.Sprintf(", K: %s /*ScalarType.%s*/, V: { kind: \"enum\", T: () => [\"%s\", %s] }", keyT, keyTypeName, valueFullTypeName, valueTypeName) + } + } else { + valueT := g.getScalarTypeEnum(valueField) + valueTypeName := g.getScalarTypeName(valueField) + extraFields = fmt.Sprintf(", K: %s /*ScalarType.%s*/, V: { kind: \"scalar\", T: %s /*ScalarType.%s*/ }", keyT, keyTypeName, valueT, valueTypeName) + } + } else { + // Message field + kind = "message" + if oneofCamelName != "" { + extraFields = fmt.Sprintf(", oneof: \"%s\", T: () => %s", oneofCamelName, g.stripPackage(field.GetTypeName())) + } else { + extraFields = fmt.Sprintf(", T: () => %s", g.stripPackage(field.GetTypeName())) + } + } + } else if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_ENUM { + // Enum field + kind = "enum" + typeName := g.stripPackage(field.GetTypeName()) + fullTypeName := g.getProtoTypeName(field.GetTypeName()) + + // Get enum to detect prefix + enumType := g.findEnumType(field.GetTypeName()) + enumPrefix := "" + if enumType != nil { + enumPrefix = g.detectEnumPrefix(enumType) + } + + // Build T parameter + var tParam string + if enumPrefix != "" { + tParam = fmt.Sprintf("[\"%s\", %s, \"%s\"]", fullTypeName, typeName, enumPrefix) + } else { + tParam = fmt.Sprintf("[\"%s\", %s]", fullTypeName, typeName) + } + + if oneofCamelName != "" { + extraFields = fmt.Sprintf(", oneof: \"%s\", T: () => %s", oneofCamelName, tParam) + } else { + extraFields = fmt.Sprintf(", T: () => %s", tParam) + } + } else { + // Scalar field + if oneofCamelName != "" { + extraFields = fmt.Sprintf(", oneof: \"%s\"", oneofCamelName) + } + } + + // Add repeat field for repeated fields (not maps) + repeat := "" + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED && kind != "map" { + if g.isFieldPacked(field) { + repeat = ", repeat: 1 /*RepeatType.PACKED*/" + } else { + repeat = ", repeat: 2 /*RepeatType.UNPACKED*/" + } + } + + // Add localName when property name was escaped for reserved object properties + localNameField := "" + if g.needsLocalName(field) { + propertyName := g.propertyName(field) + localNameField = fmt.Sprintf(", localName: \"%s\"", propertyName) + } + + // Add jsonName when it differs from the TypeScript property name (before escaping) + jsonNameField := "" + if field.JsonName != nil { + // Compare against unescaped camelCase name + camelName := g.toCamelCase(field.GetName()) + actualJsonName := *field.JsonName + // Include jsonName if it differs from the unescaped camelCase name + if camelName != actualJsonName { + escaped := strings.ReplaceAll(actualJsonName, `\`, `\\`) + escaped = strings.ReplaceAll(escaped, `"`, `\"`) + escaped = strings.ReplaceAll(escaped, "\n", `\n`) + escaped = strings.ReplaceAll(escaped, "\r", `\r`) + escaped = strings.ReplaceAll(escaped, "\t", `\t`) + jsonNameField = fmt.Sprintf(", jsonName: \"%s\"", escaped) + } + } + + // Mark as optional for proto3 optional scalars/enums or proto2 optional scalars + opt := "" + isProto2 := g.file.GetSyntax() == "proto2" || g.file.GetSyntax() == "" + if field.Proto3Optional != nil && *field.Proto3Optional { + // Proto3 explicit optional - scalars and enums get opt flag, messages don't (they're already optional) + if field.GetType() != descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + opt = ", opt: true" + } + } else if isProto2 && field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL && + field.GetType() != descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && + field.OneofIndex == nil { + // Proto2 optional scalars get opt flag (not messages or oneof members) + opt = ", opt: true" + } + + // Check for jstype option to add L parameter + longTypeParam := "" + if field.Options != nil && field.GetOptions().Jstype != nil && is64BitIntType(field) { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + longTypeParam = ", L: 2 /*LongType.NUMBER*/" + } else if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + longTypeParam = ", L: 0 /*LongType.BIGINT*/" + } + } + + // Custom field options + customFieldOptsStr := "" + customFieldOpts := g.getCustomFieldOptions(field.Options) + if len(customFieldOpts) > 0 { + customFieldOptsStr = ", options: " + formatCustomOptions(customFieldOpts) + } + + // Generate the field descriptor + if kind == "scalar" && oneofName == "" { + // Regular scalar field needs T parameter + typeName := g.getScalarTypeName(field) + g.p("{ no: %d, name: \"%s\", kind: \"%s\"%s%s%s%s, T: %s /*ScalarType.%s*/%s%s }%s", + field.GetNumber(), field.GetName(), kind, localNameField, jsonNameField, repeat, opt, t, typeName, longTypeParam, customFieldOptsStr, comma) + } else if kind == "scalar" && oneofName != "" { + // Scalar oneof field - jsonName comes BEFORE oneof, oneof comes BEFORE T + typeName := g.getScalarTypeName(field) + g.p("{ no: %d, name: \"%s\", kind: \"%s\"%s%s%s, T: %s /*ScalarType.%s*/%s%s }%s", + field.GetNumber(), field.GetName(), kind, localNameField, jsonNameField, extraFields, t, typeName, longTypeParam, customFieldOptsStr, comma) + } else { + // Message, enum, or map field + g.p("{ no: %d, name: \"%s\", kind: \"%s\"%s%s%s%s%s%s }%s", + field.GetNumber(), field.GetName(), kind, localNameField, jsonNameField, repeat, opt, extraFields, customFieldOptsStr, comma) + } +} + +func (g *generator) generateMessageTypeClass(msg *descriptorpb.DescriptorProto, fullName string, protoName string) { + className := fullName + "$Type" + + g.pNoIndent("// @generated message type with reflection information, may provide speed optimized methods") + g.pNoIndent("class %s extends %s<%s> {", className, g.messageTypeRef, fullName) + g.indent = " " + + // Constructor + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + // protoName already uses dots as separators + typeName := pkgPrefix + protoName + + g.p("constructor() {") + g.indent = " " + + // Classify fields by type and sort by field number + type fieldInfo struct { + field *descriptorpb.FieldDescriptorProto + isProto3Optional bool + oneofName string // Proto snake_case oneof name (for real oneofs only) + } + + var allFields []fieldInfo + for _, field := range msg.Field { + // Skip GROUP type fields - they're deprecated and handled as nested messages + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + + info := fieldInfo{field: field} + + // Check if this field is part of a oneof + if field.OneofIndex != nil { + oneofIdx := field.GetOneofIndex() + if oneofIdx < int32(len(msg.OneofDecl)) { + oneofName := msg.OneofDecl[oneofIdx].GetName() + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + if isProto3Optional { + info.isProto3Optional = true + } else { + info.oneofName = oneofName + } + } + } + + allFields = append(allFields, info) + } + + // Keep fields in proto file order (don't sort) + // The order in msg.Field is the order they appear in the .proto file + + // Get custom message options + customMsgOpts := g.getCustomMessageOptions(msg.Options) + customMsgOptsStr := "" + if len(customMsgOpts) > 0 { + customMsgOptsStr = ", " + formatCustomOptions(customMsgOpts) + } + + // If no fields, use compact format + if len(allFields) == 0 { + g.p("super(\"%s\", []%s);", typeName, customMsgOptsStr) + } else { + g.p("super(\"%s\", [", typeName) + + // Generate field descriptors in field number order + g.indent = " " + for i, info := range allFields { + field := info.field + comma := "," + if i == len(allFields)-1 { + comma = "" + } + + // Generate field descriptor + g.generateFieldDescriptor(field, info.oneofName, comma) + } + + g.indent = " " + g.p("]%s);", customMsgOptsStr) + } + g.indent = " " + g.p("}") + + // Check if this is a well-known type that needs special handling + isTimestamp := g.file.Package != nil && *g.file.Package == "google.protobuf" && fullName == "Timestamp" + isDuration := g.file.Package != nil && *g.file.Package == "google.protobuf" && fullName == "Duration" + isFieldMask := g.file.Package != nil && *g.file.Package == "google.protobuf" && fullName == "FieldMask" + isStruct := g.file.Package != nil && *g.file.Package == "google.protobuf" && (fullName == "Struct" || fullName == "Value" || fullName == "ListValue") + isAny := g.file.Package != nil && *g.file.Package == "google.protobuf" && fullName == "Any" + isWrapper := g.file.Package != nil && *g.file.Package == "google.protobuf" && isWrapperTypeName(fullName) + isGoogleTypeDate := g.file.Package != nil && *g.file.Package == "google.type" && fullName == "Date" + isGoogleTypeColor := g.file.Package != nil && *g.file.Package == "google.type" && fullName == "Color" + isGoogleTypeDateTime := g.file.Package != nil && *g.file.Package == "google.type" && fullName == "DateTime" + isGoogleTypeTimeOfDay := g.file.Package != nil && *g.file.Package == "google.type" && fullName == "TimeOfDay" + + // Add special methods for well-known types BEFORE standard methods + if isTimestamp { + g.generateTimestampMethods() + } else if isDuration { + g.generateDurationMethods() + } else if isFieldMask { + g.generateFieldMaskMethods() + } else if isStruct { + g.generateStructMethods(fullName) + } else if isWrapper { + g.generateWrapperMethods(fullName) + } else if isAny { + g.generateAnyMethods() + } else if isGoogleTypeDate { + g.generateGoogleTypeDateMethods() + } else if isGoogleTypeColor { + g.generateGoogleTypeColorMethods() + } else if isGoogleTypeDateTime { + g.generateGoogleTypeDateTimeMethods() + } else if isGoogleTypeTimeOfDay { + g.generateGoogleTypeTimeOfDayMethods() + } + + // Skip create, internalBinaryRead, internalBinaryWrite when optimize_for = CODE_SIZE + if !g.isOptimizeCodeSize() { + // create method + g.p("create(value?: %s<%s>): %s {", g.partialMessageRef, fullName, fullName) + g.indent = " " + g.p("const message = globalThis.Object.create((this.messagePrototype!));") + + // Initialize fields and oneofs in field number order + // Build a list of all initialization items (fields and oneofs) with their field numbers + type initItem struct { + fieldNumber int32 + isOneof bool + oneofIdx int32 + oneofName string + fieldName string + defaultVal string + } + + var initItems []initItem + oneofSeen := make(map[int32]bool) + + for _, field := range msg.Field { + // Skip GROUP type fields - they're deprecated and handled as nested messages + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + + fieldNum := field.GetNumber() + + if field.OneofIndex != nil { + oneofIdx := field.GetOneofIndex() + if oneofIdx < int32(len(msg.OneofDecl)) { + oneofName := msg.OneofDecl[oneofIdx].GetName() + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + if !isProto3Optional { + // Real oneof - add initialization for it (only once) + if !oneofSeen[oneofIdx] { + oneofSeen[oneofIdx] = true + initItems = append(initItems, initItem{ + fieldNumber: fieldNum, + isOneof: true, + oneofIdx: oneofIdx, + oneofName: oneofName, + }) + } + continue + } + // Proto3 optional - treat as regular field, fall through + } + } + + // Regular field or proto3 optional + fieldName := g.propertyName(field) + defaultVal := g.getDefaultValue(field) + if defaultVal != "" { + initItems = append(initItems, initItem{ + fieldNumber: fieldNum, + isOneof: false, + fieldName: fieldName, + defaultVal: defaultVal, + }) + } + } + + // Deduplicate fields with the same property name (e.g. x123y and x_123_y both → x123Y) + // Last-write-wins: keep the LAST occurrence (matches JS Object.entries behavior) + fieldNameSeen := make(map[string]bool) + dedupItems := make([]initItem, 0, len(initItems)) + for i := len(initItems) - 1; i >= 0; i-- { + item := initItems[i] + if item.isOneof || !fieldNameSeen[item.fieldName] { + if !item.isOneof { + fieldNameSeen[item.fieldName] = true + } + dedupItems = append(dedupItems, item) + } + } + // Reverse to restore original order + for i, j := 0, len(dedupItems)-1; i < j; i, j = i+1, j-1 { + dedupItems[i], dedupItems[j] = dedupItems[j], dedupItems[i] + } + initItems = dedupItems + + // Reorder to match JavaScript Object.entries() enumeration: + // integer-like keys first (ascending numeric), then string keys (insertion order) + var intItems []initItem + var strItems []initItem + for _, item := range initItems { + key := item.fieldName + if item.isOneof { + oneofCamelName := g.toCamelCase(item.oneofName) + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + key = oneofCamelName + } + if isArrayIndex(key) { + intItems = append(intItems, item) + } else { + strItems = append(strItems, item) + } + } + sort.Slice(intItems, func(i, j int) bool { + ki := intItems[i].fieldName + if intItems[i].isOneof { + ki = g.toCamelCase(intItems[i].oneofName) + } + kj := intItems[j].fieldName + if intItems[j].isOneof { + kj = g.toCamelCase(intItems[j].oneofName) + } + a, _ := strconv.ParseUint(ki, 10, 64) + b, _ := strconv.ParseUint(kj, 10, 64) + return a < b + }) + initItems = append(intItems, strItems...) + + // Generate initializations + for _, item := range initItems { + if item.isOneof { + // Initialize oneof + oneofCamelName := g.toCamelCase(item.oneofName) + // Escape reserved property names + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + g.p("message.%s = { oneofKind: undefined };", oneofCamelName) + } else { + // Initialize regular field + g.p("message.%s = %s;", item.fieldName, item.defaultVal) + } + } + + g.p("if (value !== undefined)") + g.indent = " " + g.p("%s<%s>(this, message, value);", g.reflectionMergePartialRef, fullName) + g.indent = " " + g.p("return message;") + g.indent = " " + g.p("}") + + // internalBinaryRead method + g.p("internalBinaryRead(reader: %s, length: number, options: %s, target?: %s): %s {", g.iBinaryReaderRef, g.binaryReadOptionsRef, fullName, fullName) + g.indent = " " + g.p("let message = target ?? this.create(), end = reader.pos + length;") + g.p("while (reader.pos < end) {") + g.indent = " " + g.p("let [fieldNo, wireType] = reader.tag();") + g.p("switch (fieldNo) {") + + // Read each field + for _, field := range msg.Field { + // Skip GROUP type fields - they're deprecated and handled as nested messages + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + + g.indent = " " + fieldName := g.propertyName(field) + + // Build the options annotation + optionsAnnotation := g.formatFieldOptionsAnnotation(field) + + // Show field number if there are options + fieldNumberInComment := "" + if optionsAnnotation != "" { + fieldNumberInComment = fmt.Sprintf(" = %d", field.GetNumber()) + } + + g.p("case /* %s %s%s%s */ %d:", g.getProtoType(field), field.GetName(), fieldNumberInComment, optionsAnnotation, field.GetNumber()) + g.indent = " " + + // Check if this is a real oneof (not proto3 optional) + isRealOneof := false + var oneofCamelName string + if field.OneofIndex != nil { + oneofIdx := field.GetOneofIndex() + oneofName := msg.OneofDecl[oneofIdx].GetName() + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + if !isProto3Optional { + isRealOneof = true + oneofCamelName = g.toCamelCase(oneofName) + // Escape reserved property names + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + } + } + + if isRealOneof { + // Real oneof field + fieldJsonName := g.propertyName(field) + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + // For message types, support merging + g.p("message.%s = {", oneofCamelName) + g.indent = " " + g.p("oneofKind: \"%s\",", fieldJsonName) + g.p("%s: %s", fieldJsonName, g.getReaderMethodWithMerge(field, fmt.Sprintf("(message.%s as any).%s", oneofCamelName, fieldJsonName))) + g.indent = " " + g.p("};") + } else { + g.p("message.%s = {", oneofCamelName) + g.indent = " " + g.p("oneofKind: \"%s\",", fieldJsonName) + g.p("%s: %s", fieldJsonName, g.getReaderMethod(field)) + g.indent = " " + g.p("};") + } + } else if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // Map field + _ = msgType.Field[1] // valueField used in map generation + g.p("this.binaryReadMap%d(message.%s, reader, options);", field.GetNumber(), fieldName) + } else if g.isPackedType(field) { + // Packed repeated fields can come as either packed or unpacked + g.p("if (wireType === %s.LengthDelimited)", g.wireTypeRef) + g.indent = " " + g.p("for (let e = reader.int32() + reader.pos; reader.pos < e;)") + g.indent = " " + g.p("message.%s.push(%s);", fieldName, g.getReaderMethodSimple(field)) + g.indent = " " + g.p("else") + g.indent = " " + g.p("message.%s.push(%s);", fieldName, g.getReaderMethod(field)) + g.indent = " " + } else { + g.p("message.%s.push(%s);", fieldName, g.getReaderMethod(field)) + } + } else { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + // For message fields, pass existing message for merging + fieldName := g.propertyName(field) + g.p("message.%s = %s;", fieldName, g.getReaderMethodWithMerge(field, "message."+fieldName)) + } else { + g.p("message.%s = %s;", fieldName, g.getReaderMethod(field)) + } + } + + g.indent = " " + g.p("break;") + } + + g.indent = " " + g.p("default:") + g.indent = " " + g.p("let u = options.readUnknownField;") + g.p("if (u === \"throw\")") + g.indent = " " + g.p("throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);") + g.indent = " " + g.p("let d = reader.skip(wireType);") + g.p("if (u !== false)") + g.indent = " " + g.p("(u === true ? %s.onRead : u)(this.typeName, message, fieldNo, wireType, d);", g.unknownFieldHandlerRef) + g.indent = " " + g.p("}") + g.indent = " " + g.p("}") + g.p("return message;") + g.indent = " " + g.p("}") + + // Add map read helpers if needed + pkgPrefix = "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + protoTypeName := pkgPrefix + protoName + + for _, field := range msg.Field { + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + keyField := msgType.Field[0] + valueField := msgType.Field[1] + + fieldName := g.propertyName(field) + g.p("private binaryReadMap%d(map: %s[\"%s\"], reader: %s, options: %s): void {", + field.GetNumber(), + fullName, + fieldName, + g.iBinaryReaderRef, + g.binaryReadOptionsRef) + g.indent = " " + g.p("let len = reader.uint32(), end = reader.pos + len, key: keyof %s[\"%s\"] | undefined, val: %s[\"%s\"][any] | undefined;", + fullName, fieldName, fullName, fieldName) + g.p("while (reader.pos < end) {") + g.indent = " " + g.p("let [fieldNo, wireType] = reader.tag();") + g.p("switch (fieldNo) {") + g.indent = " " + g.p("case 1:") + g.indent = " " + g.p("key = %s;", g.getReaderMethodForMapKey(keyField)) + g.indent = " " + g.p("break;") + g.indent = " " + g.p("case 2:") + g.indent = " " + g.p("val = %s;", g.getReaderMethod(valueField)) + g.indent = " " + g.p("break;") + g.indent = " " + g.p("default: throw new globalThis.Error(\"unknown map entry field for %s.%s\");", protoTypeName, field.GetName()) + g.indent = " " + g.p("}") + g.indent = " " + g.p("}") + + // Generate proper default assignment + keyDefault := g.getMapKeyDefault(keyField) + valueDefault := g.getMapValueDefault(valueField) + g.p("map[key ?? %s] = val ?? %s;", keyDefault, valueDefault) + g.indent = " " + g.p("}") + } + } + } + + // internalBinaryWrite method + g.p("internalBinaryWrite(message: %s, writer: %s, options: %s): %s {", fullName, g.iBinaryWriterRef, g.binaryWriteOptionsRef, g.iBinaryWriterRef) + g.indent = " " + + // Sort fields by field number for write method (for efficiency) + sortedFields := make([]*descriptorpb.FieldDescriptorProto, len(msg.Field)) + copy(sortedFields, msg.Field) + // Using a simple bubble sort to avoid importing sort package + for i := 0; i < len(sortedFields); i++ { + for j := i + 1; j < len(sortedFields); j++ { + if sortedFields[i].GetNumber() > sortedFields[j].GetNumber() { + sortedFields[i], sortedFields[j] = sortedFields[j], sortedFields[i] + } + } + } + + for _, field := range sortedFields { + // Skip GROUP type fields - they're deprecated and handled as nested messages + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP { + continue + } + + fieldName := g.propertyName(field) + + optionsAnnotation := g.formatFieldOptionsAnnotation(field) + + g.p("/* %s %s = %d%s; */", g.getProtoType(field), field.GetName(), field.GetNumber(), optionsAnnotation) + + // Check if this is a real oneof (not proto3 optional) + isRealOneof := false + var oneofCamelName string + if field.OneofIndex != nil { + oneofIdx := field.GetOneofIndex() + oneofName := msg.OneofDecl[oneofIdx].GetName() + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + if !isProto3Optional { + isRealOneof = true + oneofCamelName = g.toCamelCase(oneofName) + // Escape reserved property names + if oneofCamelName == "__proto__" || oneofCamelName == "toString" || oneofCamelName == "oneofKind" { + oneofCamelName = oneofCamelName + "$" + } + } + } + + if isRealOneof { + // Real oneof field + fieldJsonName := g.propertyName(field) + g.p("if (message.%s.oneofKind === \"%s\")", oneofCamelName, fieldJsonName) + g.indent = " " + g.p("%s", g.getWriterMethod(field, "message."+oneofCamelName+"."+fieldJsonName)) + g.indent = " " + } else if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + // Map field + keyField := msgType.Field[0] + valueField := msgType.Field[1] + + // Check if key is numeric + isNumericKey := keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_INT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_INT64 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_UINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_UINT64 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SINT64 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_FIXED32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_FIXED64 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SFIXED32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SFIXED64 + + isBooleanKey := keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_BOOL + + if valueField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + // Message value - needs special handling + if isNumericKey { + keyVar := "k" + valueAccessor := "message." + fieldName + "[k]" + if keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_INT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_UINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_FIXED32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SFIXED32 { + keyVar = "parseInt(k)" + valueAccessor = "message." + fieldName + "[k as any]" + } + keyWriter := g.getMapKeyWriter(keyField, keyVar) + g.p("for (let k of globalThis.Object.keys(message.%s)) {", fieldName) + g.indent = " " + g.p("writer.tag(%d, %s.LengthDelimited).fork()%s;", field.GetNumber(), g.wireTypeRef, keyWriter) + g.p("writer.tag(2, %s.LengthDelimited).fork();", g.wireTypeRef) + g.p("%s.internalBinaryWrite(%s, writer, options);", g.stripPackage(valueField.GetTypeName()), valueAccessor) + g.p("writer.join().join();") + g.indent = " " + g.p("}") + } else if isBooleanKey { + g.p("for (let k of globalThis.Object.keys(message.%s)) {", fieldName) + g.indent = " " + g.p("writer.tag(%d, %s.LengthDelimited).fork().tag(1, %s.Varint).bool(k === \"true\");", field.GetNumber(), g.wireTypeRef, g.wireTypeRef) + g.p("writer.tag(2, %s.LengthDelimited).fork();", g.wireTypeRef) + g.p("%s.internalBinaryWrite(message.%s[k], writer, options);", g.stripPackage(valueField.GetTypeName()), fieldName) + g.p("writer.join().join();") + g.indent = " " + g.p("}") + } else { + g.p("for (let k of globalThis.Object.keys(message.%s)) {", fieldName) + g.indent = " " + g.p("writer.tag(%d, %s.LengthDelimited).fork().tag(1, %s.LengthDelimited).string(k);", field.GetNumber(), g.wireTypeRef, g.wireTypeRef) + g.p("writer.tag(2, %s.LengthDelimited).fork();", g.wireTypeRef) + g.p("%s.internalBinaryWrite(message.%s[k], writer, options);", g.stripPackage(valueField.GetTypeName()), fieldName) + g.p("writer.join().join();") + g.indent = " " + g.p("}") + } + } else { + // Scalar value + g.p("for (let k of globalThis.Object.keys(message.%s))", fieldName) + g.indent = " " + if isNumericKey { + // For 64-bit types and signed types that use string keys, use k directly + // For 32-bit types that use number keys, use parseInt(k) + keyVar := "k" + valueAccessor := "message." + fieldName + "[k]" + if keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_INT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_UINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SINT32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_FIXED32 || + keyField.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SFIXED32 { + keyVar = "parseInt(k)" + valueAccessor = "message." + fieldName + "[k as any]" + } + keyWriter := g.getMapKeyWriter(keyField, keyVar) + valueWriter := g.getMapValueWriter(valueField, valueAccessor) + g.p("writer.tag(%d, %s.LengthDelimited).fork()%s%s.join();", + field.GetNumber(), g.wireTypeRef, keyWriter, valueWriter) + } else if isBooleanKey { + valueWriter := g.getMapValueWriter(valueField, "message."+fieldName+"[k]") + g.p("writer.tag(%d, %s.LengthDelimited).fork().tag(1, %s.Varint).bool(k === \"true\")%s.join();", + field.GetNumber(), g.wireTypeRef, g.wireTypeRef, valueWriter) + } else { + valueWriter := g.getMapValueWriter(valueField, "message."+fieldName+"[k]") + g.p("writer.tag(%d, %s.LengthDelimited).fork().tag(1, %s.LengthDelimited).string(k)%s.join();", + field.GetNumber(), g.wireTypeRef, g.wireTypeRef, valueWriter) + } + g.indent = " " + } + } else if g.isFieldPacked(field) { + // Write packed repeated fields + g.p("if (message.%s.length) {", fieldName) + g.indent = " " + g.p("writer.tag(%d, %s.LengthDelimited).fork();", field.GetNumber(), g.wireTypeRef) + g.p("for (let i = 0; i < message.%s.length; i++)", fieldName) + g.indent = " " + method := g.getWriterMethodName(field) + g.p("writer.%s(message.%s[i]);", method, fieldName) + g.indent = " " + g.p("writer.join();") + g.indent = " " + g.p("}") + } else { + g.p("for (let i = 0; i < message.%s.length; i++)", fieldName) + g.indent = " " + g.p("%s", g.getWriterMethod(field, "message."+fieldName+"[i]")) + g.indent = " " + } + } else { + condition := g.getWriteCondition(field, fieldName) + if condition != "" { + g.p("if (%s)", condition) + g.indent = " " + } + g.p("%s", g.getWriterMethod(field, "message."+fieldName)) + if condition != "" { + g.indent = " " + } + } + } + + g.p("let u = options.writeUnknownFields;") + g.p("if (u !== false)") + g.indent = " " + g.p("(u == true ? %s.onWrite : u)(this.typeName, message, writer);", g.unknownFieldHandlerRef) + g.indent = " " + g.p("return writer;") + g.indent = " " + g.p("}") + } // end !isOptimizeCodeSize + + g.indent = "" + g.pNoIndent("}") + + // Export constant + g.pNoIndent("/**") + // Add @deprecated if message has deprecated option OR file is deprecated + if (msg.Options != nil && msg.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") + } + g.pNoIndent(" * @generated MessageType for protobuf message %s", typeName) + g.pNoIndent(" */") + g.pNoIndent("export const %s = new %s();", fullName, className) +} + +func (g *generator) getScalarTypeEnum(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + return "1" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + return "2" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + return "3" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + return "4" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + return "5" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + return "6" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + return "7" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "8" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "9" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "12" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + return "13" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "15" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return "16" + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + return "17" + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + return "18" + default: + return "9" // default to string + } +} + +func (g *generator) getScalarTypeName(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + return "DOUBLE" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + return "FLOAT" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + return "INT64" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + return "UINT64" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + return "INT32" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + return "FIXED64" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + return "FIXED32" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "BOOL" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "STRING" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "BYTES" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + return "UINT32" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "SFIXED32" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return "SFIXED64" + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + return "SINT32" + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + return "SINT64" + default: + return "STRING" + } +} + +// formatDefaultValueAnnotation formats a default value for the @generated comment annotation +func (g *generator) formatDefaultValueAnnotation(field *descriptorpb.FieldDescriptorProto, defaultVal string) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_STRING, + descriptorpb.FieldDescriptorProto_TYPE_BYTES: + // Match protobuf-ts: only escape the first double-quote (JS String.replace replaces first match only) + escaped := strings.Replace(defaultVal, `"`, `\"`, 1) + return fmt.Sprintf("\"%s\"", escaped) + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + // Enum defaults show the enum value name (not the number) + return defaultVal + case descriptorpb.FieldDescriptorProto_TYPE_BOOL, + descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // Numeric and boolean defaults are shown as-is + return defaultVal + default: + return defaultVal + } +} + +// formatFieldOptionsAnnotation builds a combined "[opt1, opt2, ...]" string for field comments. +// Order matches protobuf-ts: packed, default, json_name, jstype, deprecated. +func (g *generator) formatFieldOptionsAnnotation(field *descriptorpb.FieldDescriptorProto) string { + var options []string + + // 1. packed + if field.Options != nil && field.GetOptions().Packed != nil { + options = append(options, fmt.Sprintf("packed = %v", field.GetOptions().GetPacked())) + } + + // 2. default + if field.DefaultValue != nil { + formattedDefault := g.formatDefaultValueAnnotation(field, field.GetDefaultValue()) + options = append(options, fmt.Sprintf("default = %s", formattedDefault)) + } + + // 3. json_name + if field.JsonName != nil { + protocDefault := g.protocGeneratedJsonName(field.GetName()) + if protocDefault != *field.JsonName { + options = append(options, fmt.Sprintf("json_name = \"%s\"", *field.JsonName)) + } + } + + // 4. jstype + if field.Options != nil && field.GetOptions().Jstype != nil { + jstype := field.GetOptions().GetJstype() + if jstype == descriptorpb.FieldOptions_JS_STRING { + options = append(options, "jstype = JS_STRING") + } else if jstype == descriptorpb.FieldOptions_JS_NUMBER { + options = append(options, "jstype = JS_NUMBER") + } else if jstype == descriptorpb.FieldOptions_JS_NORMAL { + options = append(options, "jstype = JS_NORMAL") + } + } + + // 5. deprecated — protobuf-ts outputs [deprecated = true] whenever the option is explicitly set, + // even if the value is false (e.g., [deprecated = false] in proto still produces [deprecated = true]) + if field.Options != nil && field.GetOptions().Deprecated != nil { + options = append(options, "deprecated = true") + } + + if len(options) == 0 { + return "" + } + return " [" + strings.Join(options, ", ") + "]" +} + +func (g *generator) getDefaultValue(field *descriptorpb.FieldDescriptorProto) string { + if field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + msgType := g.findMessageType(field.GetTypeName()) + if msgType != nil && msgType.Options != nil && msgType.GetOptions().GetMapEntry() { + return "{}" + } + return "[]" + } + + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + return "" // optional messages don't get defaults + } + + // Proto2 optional scalars don't get defaults + isProto2 := g.file.GetSyntax() == "proto2" || g.file.GetSyntax() == "" + if isProto2 && field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL { + return "" + } + + // Proto3 explicit optional scalars don't get defaults + if field.Proto3Optional != nil && *field.Proto3Optional { + return "" + } + + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "0" + case descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "0" // JS_NUMBER uses number type + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "0n" // JS_NORMAL uses bigint type + } + // JS_STRING falls through to string default + } + return "\"0\"" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "false" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "\"\"" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "new Uint8Array(0)" + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return "0" + default: + return "" + } +} + +func (g *generator) getReaderMethod(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + return "reader.double()" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + return "reader.float()" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.int64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.int64().toBigInt()" + } + } + return "reader.int64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.uint64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.uint64().toBigInt()" + } + } + return "reader.uint64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + return "reader.int32()" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.fixed64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.fixed64().toBigInt()" + } + } + return "reader.fixed64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + return "reader.fixed32()" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "reader.bool()" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "reader.string()" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "reader.bytes()" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + return "reader.uint32()" + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return "reader.int32()" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "reader.sfixed32()" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.sfixed64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.sfixed64().toBigInt()" + } + } + return "reader.sfixed64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + return "reader.sint32()" + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.sint64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.sint64().toBigInt()" + } + } + return "reader.sint64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + typeName := g.stripPackage(field.GetTypeName()) + return fmt.Sprintf("%s.internalBinaryRead(reader, reader.uint32(), options)", typeName) + default: + return "reader.string()" + } +} + +func (g *generator) getReaderMethodWithMerge(field *descriptorpb.FieldDescriptorProto, existingVar string) string { + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + typeName := g.stripPackage(field.GetTypeName()) + return fmt.Sprintf("%s.internalBinaryRead(reader, reader.uint32(), options, %s)", typeName, existingVar) + } + return g.getReaderMethod(field) +} + +func (g *generator) getReaderMethodSimple(field *descriptorpb.FieldDescriptorProto) string { + // Simpler reader for packed repeated fields (no length prefix) + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + return "reader.double()" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + return "reader.float()" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.int64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.int64().toBigInt()" + } + } + return "reader.int64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.uint64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.uint64().toBigInt()" + } + } + return "reader.uint64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + return "reader.int32()" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.fixed64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.fixed64().toBigInt()" + } + } + return "reader.fixed64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + return "reader.fixed32()" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "reader.bool()" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + return "reader.uint32()" + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return "reader.int32()" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "reader.sfixed32()" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.sfixed64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.sfixed64().toBigInt()" + } + } + return "reader.sfixed64().toString()" + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + return "reader.sint32()" + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + // Check for jstype option + if field.Options != nil && field.GetOptions().Jstype != nil { + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NUMBER { + return "reader.sint64().toNumber()" + } + if field.GetOptions().GetJstype() == descriptorpb.FieldOptions_JS_NORMAL { + return "reader.sint64().toBigInt()" + } + } + return "reader.sint64().toString()" + default: + return "reader.int32()" + } +} + +func (g *generator) getWriterMethod(field *descriptorpb.FieldDescriptorProto, varName string) string { + wireType := g.getWireType(field) + + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + typeName := g.stripPackage(field.GetTypeName()) + return fmt.Sprintf("%s.internalBinaryWrite(%s, writer.tag(%d, %s).fork(), options).join();", + typeName, varName, field.GetNumber(), wireType) + default: + method := g.getWriterMethodName(field) + return fmt.Sprintf("writer.tag(%d, %s).%s(%s);", field.GetNumber(), wireType, method, varName) + } +} + +func (g *generator) getMapValueWriter(field *descriptorpb.FieldDescriptorProto, varName string) string { + wireType := g.getWireType(field) + methodName := g.getWriterMethodName(field) + return fmt.Sprintf(".tag(2, %s).%s(%s)", wireType, methodName, varName) +} + +func (g *generator) getMapKeyWriter(field *descriptorpb.FieldDescriptorProto, varName string) string { + wireType := g.getWireType(field) + writerMethod := g.getWriterMethodName(field) + return fmt.Sprintf(".tag(1, %s).%s(%s)", wireType, writerMethod, varName) +} + +func (g *generator) wireTypeImportAlias() string { + if g.wireTypeRef == "WireType$" { + return " as WireType$" + } + return "" +} + +func (g *generator) unknownFieldHandlerImport() string { + if g.unknownFieldHandlerRef == "UnknownFieldHandler$" { + return "UnknownFieldHandler as UnknownFieldHandler$" + } + return "UnknownFieldHandler" +} + +func (g *generator) partialMessageImport() string { + if g.partialMessageRef == "PartialMessage$" { + return "PartialMessage as PartialMessage$" + } + return "PartialMessage" +} + +func (g *generator) binaryReadOptionsImport() string { + if g.binaryReadOptionsRef == "BinaryReadOptions$" { + return "BinaryReadOptions as BinaryReadOptions$" + } + return "BinaryReadOptions" +} + +func (g *generator) binaryWriteOptionsImport() string { + if g.binaryWriteOptionsRef == "BinaryWriteOptions$" { + return "BinaryWriteOptions as BinaryWriteOptions$" + } + return "BinaryWriteOptions" +} + +func (g *generator) iBinaryReaderImport() string { + if g.iBinaryReaderRef == "IBinaryReader$" { + return "IBinaryReader as IBinaryReader$" + } + return "IBinaryReader" +} + +func (g *generator) iBinaryWriterImport() string { + if g.iBinaryWriterRef == "IBinaryWriter$" { + return "IBinaryWriter as IBinaryWriter$" + } + return "IBinaryWriter" +} + +func (g *generator) reflectionMergePartialImport() string { + if g.reflectionMergePartialRef == "reflectionMergePartial$" { + return "reflectionMergePartial as reflectionMergePartial$" + } + return "reflectionMergePartial" +} + +func (g *generator) scalarTypeImport() string { + if g.scalarTypeRef == "ScalarType$" { + return "ScalarType as ScalarType$" + } + return "ScalarType" +} + +func (g *generator) longTypeImport() string { + if g.longTypeRef == "LongType$" { + return "LongType as LongType$" + } + return "LongType" +} + +func (g *generator) pbLongImport() string { + if g.pbLongRef == "PbLong$" { + return "PbLong as PbLong$" + } + return "PbLong" +} + +func (g *generator) typeofJsonValueImport() string { + if g.typeofJsonValueRef == "typeofJsonValue$" { + return "typeofJsonValue as typeofJsonValue$" + } + return "typeofJsonValue" +} + +func (g *generator) isJsonObjectImport() string { + if g.isJsonObjectRef == "isJsonObject$" { + return "isJsonObject as isJsonObject$" + } + return "isJsonObject" +} + +func (g *generator) jsonObjectImport() string { + if g.jsonObjectRef == "JsonObject$" { + return "JsonObject as JsonObject$" + } + return "JsonObject" +} + +func (g *generator) jsonValueImport() string { + if g.jsonValueRef == "JsonValue$" { + return "JsonValue as JsonValue$" + } + return "JsonValue" +} + +func (g *generator) jsonWriteOptionsImport() string { + if g.jsonWriteOptionsRef == "jsonWriteOptions$" { + return "jsonWriteOptions as jsonWriteOptions$" + } + return "jsonWriteOptions" +} + +func (g *generator) jsonWriteOptionsTypeImport() string { + if g.jsonWriteOptionsTypeRef == "JsonWriteOptions$" { + return "JsonWriteOptions as JsonWriteOptions$" + } + return "JsonWriteOptions" +} + +func (g *generator) iMessageTypeImport() string { + if g.iMessageTypeRef == "IMessageType$" { + return "IMessageType as IMessageType$" + } + return "IMessageType" +} + +func (g *generator) jsonReadOptionsImport() string { + if g.jsonReadOptionsRef == "JsonReadOptions$" { + return "JsonReadOptions as JsonReadOptions$" + } + return "JsonReadOptions" +} + +func (g *generator) lowerCamelCaseImport() string { + if g.lowerCamelCaseRef == "lowerCamelCase$" { + return "lowerCamelCase as lowerCamelCase$" + } + return "lowerCamelCase" +} + +func methodCallTypeName(method *descriptorpb.MethodDescriptorProto) string { + cs := method.GetClientStreaming() + ss := method.GetServerStreaming() + if cs && ss { + return "DuplexStreamingCall" + } else if ss { + return "ServerStreamingCall" + } else if cs { + return "ClientStreamingCall" + } + return "UnaryCall" +} + +func (g *generator) callTypeImportClause(name string) string { + if ref, ok := g.callTypeRefs[name]; ok && ref != name { + return name + " as " + ref + } + return name +} + +func (g *generator) getWireType(field *descriptorpb.FieldDescriptorProto) string { + wt := g.wireTypeRef + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return wt + ".Bit64" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return wt + ".Bit32" + case descriptorpb.FieldDescriptorProto_TYPE_STRING, + descriptorpb.FieldDescriptorProto_TYPE_BYTES, + descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + return wt + ".LengthDelimited" + default: + return wt + ".Varint" + } +} + +func (g *generator) getWriterMethodName(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE: + return "double" + case descriptorpb.FieldDescriptorProto_TYPE_FLOAT: + return "float" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + return "int64" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + return "uint64" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + return "int32" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED64: + return "fixed64" + case descriptorpb.FieldDescriptorProto_TYPE_FIXED32: + return "fixed32" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "bool" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "string" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "bytes" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + return "uint32" + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return "int32" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "sfixed32" + case descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return "sfixed64" + case descriptorpb.FieldDescriptorProto_TYPE_SINT32: + return "sint32" + case descriptorpb.FieldDescriptorProto_TYPE_SINT64: + return "sint64" + default: + return "string" + } +} + +func (g *generator) getWriteCondition(field *descriptorpb.FieldDescriptorProto, fieldName string) string { + isProto2 := g.file.GetSyntax() == "proto2" || g.file.GetSyntax() == "" + isProto3Optional := field.Proto3Optional != nil && *field.Proto3Optional + + // Optional message fields (proto2, proto3 implicit or explicit optional) use truthy check + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && + field.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + return fmt.Sprintf("message.%s", fieldName) + } + + // Proto2 optional fields (non-message) need undefined check + if isProto2 && field.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL { + return fmt.Sprintf("message.%s !== undefined", fieldName) + } + // Proto3 optional SCALARS and ENUMS need undefined check + if isProto3Optional { + return fmt.Sprintf("message.%s !== undefined", fieldName) + } + + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_BYTES { + return fmt.Sprintf("message.%s.length", fieldName) + } + + defaultVal := g.getDefaultValue(field) + if defaultVal == "" || defaultVal == "[]" || defaultVal == "{}" { + return "" + } + return fmt.Sprintf("message.%s !== %s", fieldName, defaultVal) +} + +func (g *generator) generateEnum(enum *descriptorpb.EnumDescriptorProto, parentPrefix string, protoParentPrefix string, enumPath []int32) { + baseName := enum.GetName() + // Only escape top-level types (nested types don't need escaping) + escapedName := baseName + if parentPrefix == "" { + escapedName = escapeTypescriptKeyword(baseName) + } + enumName := parentPrefix + escapedName + protoName := protoParentPrefix + baseName + + // Check if this type has a collision suffix + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + fullProtoName := pkgPrefix + protoName + if suffix, exists := g.typeNameSuffixes[fullProtoName]; exists && suffix > 0 { + enumName = enumName + fmt.Sprintf("$%d", suffix) + } + + // Add leading detached comments before enum JSDoc + if len(enumPath) > 0 { + detachedComments := g.getLeadingDetachedComments(enumPath) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.pNoIndent("// ") + } else { + g.pNoIndent("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + } + + g.pNoIndent("/**") + + // Add leading and trailing comments if available + if len(enumPath) > 0 { + leadingComments, hasLeading := g.getLeadingComments(enumPath) + trailingComments := g.getEnumTrailingComments(enumPath) + + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + // Add separator after leading comments + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + // Add separator after trailing comments + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + + } + + // Add @deprecated if enum has deprecated option OR file is deprecated + if (enum.Options != nil && enum.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") + } + + // protoParentPrefix already has dots as separators + g.pNoIndent(" * @generated from protobuf enum %s%s", pkgPrefix, protoName) + g.pNoIndent(" */") + g.pNoIndent("export enum %s {", enumName) + + // Check if enum has a zero value + hasZero := false + for _, value := range enum.Value { + if value.GetNumber() == 0 { + hasZero = true + break + } + } + + // Add synthetic zero value if needed + if !hasZero { + g.indent = " " + g.p("/**") + g.p(" * @generated synthetic value - protobuf-ts requires all enums to have a 0 value") + g.p(" */") + g.p("UNSPECIFIED$ = 0,") + g.indent = "" + } + + // Detect common prefix + commonPrefix := g.detectEnumPrefix(enum) + + // Build map from number to first value name and index (for alias handling) + firstValueForNumber := make(map[int32]string) + firstValueIndexForNumber := make(map[int32]int) + for idx, value := range enum.Value { + num := value.GetNumber() + if _, exists := firstValueForNumber[num]; !exists { + firstValueForNumber[num] = value.GetName() + firstValueIndexForNumber[num] = idx + } + } + + for i, value := range enum.Value { + g.indent = " " + + // Build path to this enum value: [5 or 4, enumIndex, 2, valueIndex] + valuePath := append(enumPath, 2, int32(i)) + + // Check if this is an alias (not the first value with this number) + isAlias := value.GetName() != firstValueForNumber[value.GetNumber()] + + // For aliases, use the first value's comments + var leadingComments, trailingComments string + var hasLeading bool + if isAlias { + firstIdx := firstValueIndexForNumber[value.GetNumber()] + firstValuePath := append(enumPath, 2, int32(firstIdx)) + leadingComments, hasLeading = g.getLeadingComments(firstValuePath) + trailingComments = g.getTrailingComments(firstValuePath) + } else { + leadingComments, hasLeading = g.getLeadingComments(valuePath) + trailingComments = g.getTrailingComments(valuePath) + } + + g.p("/**") + + // Add leading comments if present + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + for _, line := range strings.Split(leadingComments, "\n") { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + // Add trailing comments if present (before @generated line) + // For aliases, we use the first value's trailing comments (fetched above) + if trailingComments != "" { + for _, line := range strings.Split(trailingComments, "\n") { + line = strings.TrimRight(line, " \t") + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + g.p(" *") + } + + // Add @deprecated if value has deprecated option OR file is deprecated + // For aliases, use the first value's deprecated status (not the alias's) + var checkValue *descriptorpb.EnumValueDescriptorProto + if isAlias { + checkValue = enum.Value[firstValueIndexForNumber[value.GetNumber()]] + } else { + checkValue = value + } + valueIsDeprecated := checkValue.Options != nil && checkValue.GetOptions().GetDeprecated() + if valueIsDeprecated || g.isFileDeprecated() { + g.p(" * @deprecated") + } + + // Build the @generated line with deprecated annotation if applicable + deprecatedAnnotation := "" + if valueIsDeprecated { + deprecatedAnnotation = " [deprecated = true]" + } + + // For aliases (multiple values with same number), show the first value's name + nameToShow := firstValueForNumber[value.GetNumber()] + g.p(" * @generated from protobuf enum value: %s = %d%s;", nameToShow, value.GetNumber(), deprecatedAnnotation) + g.p(" */") + + // Strip common prefix + tsName := value.GetName() + if commonPrefix != "" { + tsName = strings.TrimPrefix(tsName, commonPrefix) + } + + // No comma on last value + comma := "," + if i == len(enum.Value)-1 { + comma = "" + } + g.p("%s = %d%s", tsName, value.GetNumber(), comma) + } + + g.indent = "" + g.pNoIndent("}") +} + +func (g *generator) detectEnumPrefix(enum *descriptorpb.EnumDescriptorProto) string { + if len(enum.Value) == 0 { + return "" + } + + // Create possible prefix from enum name + // Convert enum name to UPPER_SNAKE_CASE + // For example, "MyEnum" => "MY_ENUM_", "const_enum" => "CONST_ENUM_" + enumName := enum.GetName() + + // Match protobuf-ts algorithm: + // 1. Prepend "_" before every uppercase letter + // 2. Strip leading "_" if present + // 3. Uppercase + // 4. Append "_" + var prefixBuilder strings.Builder + for _, r := range enumName { + if r >= 'A' && r <= 'Z' { + prefixBuilder.WriteRune('_') + } + prefixBuilder.WriteRune(r) + } + intermediate := prefixBuilder.String() + if len(intermediate) > 0 && intermediate[0] == '_' { + intermediate = intermediate[1:] + } + enumPrefix := strings.ToUpper(intermediate) + "_" + + // Check if all enum values start with this prefix + allHavePrefix := true + for _, v := range enum.Value { + if !strings.HasPrefix(v.GetName(), enumPrefix) { + allHavePrefix = false + break + } + } + + if !allHavePrefix { + return "" + } + + // Check if stripped names are valid (start with uppercase letter, at least 2 chars) + for _, v := range enum.Value { + stripped := strings.TrimPrefix(v.GetName(), enumPrefix) + // Must have at least 2 characters and start with uppercase letter + if len(stripped) < 2 || !(stripped[0] >= 'A' && stripped[0] <= 'Z') { + return "" + } + } + + return enumPrefix +} + +func generateClientFile(file *descriptorpb.FileDescriptorProto, allFiles []*descriptorpb.FileDescriptorProto, params params) string { + g := &generator{ + params: params, + file: file, + allFiles: allFiles, + importedTypeNames: make(map[string]bool), + localTypeNames: make(map[string]bool), + importAliases: make(map[string]string), + rawImportNames: make(map[string]string), + wireTypeRef: "WireType", + scalarTypeRef: "ScalarType", + } + + // Header + g.pNoIndent("// @generated by protobuf-ts 2.11.1 with parameter long_type_string") + pkgComment := "" + syntax := file.GetSyntax() + if syntax == "" { + syntax = "proto2" // Default to proto2 when syntax is not specified + } + if file.Package != nil && *file.Package != "" { + pkgComment = fmt.Sprintf(" (package \"%s\", syntax %s)", *file.Package, syntax) + } else { + pkgComment = fmt.Sprintf(" (syntax %s)", syntax) + } + g.pNoIndent("// @generated from protobuf file \"%s\"%s", file.GetName(), pkgComment) + g.pNoIndent("// tslint:disable") + // Add file-level deprecation comment if the entire file is deprecated + if g.isFileDeprecated() { + g.pNoIndent("// @deprecated") + } + + // Add file-level leading detached comments (license headers, etc.) + if file.SourceCodeInfo != nil { + for _, loc := range file.SourceCodeInfo.Location { + // Check for syntax field with detached comments + if len(loc.Path) == 1 && loc.Path[0] == 12 && len(loc.LeadingDetachedComments) > 0 { + // Blank line before the license header + g.pNoIndent("//") + for blockIdx, detached := range loc.LeadingDetachedComments { + if strings.TrimRight(detached, "\n") != "" { + lines := strings.Split(detached, "\n") + hasTrailingNewline := len(lines) > 0 && lines[len(lines)-1] == "" + endIdx := len(lines) + if hasTrailingNewline { + endIdx = len(lines) - 1 + } + for i := 0; i < endIdx; i++ { + line := lines[i] + if line == "" { + g.pNoIndent("//") + } else { + g.pNoIndent("//%s", line) + } + } + if hasTrailingNewline { + g.pNoIndent("//") + } + // Add // separator between blocks (not after last block) + if blockIdx < len(loc.LeadingDetachedComments)-1 { + g.pNoIndent("//") + } + } + } + } + } + } + + // Add package-level leading detached comments (path [2]) + if file.SourceCodeInfo != nil { + for _, loc := range file.SourceCodeInfo.Location { + if len(loc.Path) == 1 && loc.Path[0] == 2 && len(loc.LeadingDetachedComments) > 0 { + g.pNoIndent("//") + for blockIdx, detached := range loc.LeadingDetachedComments { + if strings.TrimRight(detached, "\n") != "" { + lines := strings.Split(detached, "\n") + hasTrailingNewline := len(lines) > 0 && lines[len(lines)-1] == "" + endIdx := len(lines) + if hasTrailingNewline { + endIdx = len(lines) - 1 + } + for i := 0; i < endIdx; i++ { + line := lines[i] + if line == "" { + g.pNoIndent("//") + } else { + g.pNoIndent("//%s", line) + } + } + if hasTrailingNewline { + g.pNoIndent("//") + } + if blockIdx < len(loc.LeadingDetachedComments)-1 { + g.pNoIndent("//") + } + } + } + } + } + } + + baseFileName := strings.TrimSuffix(filepath.Base(file.GetName()), ".proto") + + // Build depFiles and pre-compute import aliases for type name collision detection + depFiles := make(map[string]*descriptorpb.FileDescriptorProto) + currentFileDir := filepath.Dir(file.GetName()) + for _, dep := range file.Dependency { + depFile := g.findFileByName(dep) + if depFile != nil { + depPath := strings.TrimSuffix(dep, ".proto") + relPath := g.getRelativeImportPath(currentFileDir, depPath) + depFiles[relPath] = depFile + } + } + // Include files transitively reachable via import public + for _, pubFile := range g.collectTransitivePublicDeps(file) { + depPath := strings.TrimSuffix(pubFile.GetName(), ".proto") + relPath := g.getRelativeImportPath(currentFileDir, depPath) + if _, exists := depFiles[relPath]; !exists { + depFiles[relPath] = pubFile + } + } + g.precomputeImportAliases(depFiles) + + // Detect cross-file type name collisions in client imports. + // In the client file, local types (from current file) are also imported, + // so they can collide with external imports that have the same TS name. + // Registration order: method by method, input then output — first to claim a name wins. + { + claimed := make(map[string]string) // raw tsName → first proto type + seenProto := make(map[string]bool) + for _, service := range file.Service { + for _, method := range service.Method { + for _, typeName := range []string{method.GetInputType(), method.GetOutputType()} { + if seenProto[typeName] { + continue + } + seenProto[typeName] = true + if _, alreadyAliased := g.importAliases[typeName]; alreadyAliased { + continue + } + tsName := g.stripPackage(typeName) + if existing, ok := claimed[tsName]; ok && existing != typeName { + taken := make(map[string]bool) + for _, v := range claimed { + taken[v] = true + } + for _, v := range g.importAliases { + taken[v] = true + } + alias := tsName + "$" + for counter := 2; taken[alias]; counter++ { + alias = tsName + "$" + strconv.Itoa(counter) + } + g.importAliases[typeName] = alias + g.rawImportNames[typeName] = tsName + } else if !ok { + claimed[tsName] = typeName + } + } + } + } + } + + // Build set of runtime-rpc names actually imported based on methods + usedCallTypes := make(map[string]bool) + hasAnyMethod := false + for _, service := range file.Service { + for _, method := range service.Method { + hasAnyMethod = true + cs := method.GetClientStreaming() + ss := method.GetServerStreaming() + if cs && ss { + usedCallTypes["DuplexStreamingCall"] = true + } else if ss { + usedCallTypes["ServerStreamingCall"] = true + } else if cs { + usedCallTypes["ClientStreamingCall"] = true + } else { + usedCallTypes["UnaryCall"] = true + } + } + } + if hasAnyMethod { + usedCallTypes["RpcOptions"] = true + usedCallTypes["stackIntercept"] = true + } + // Actual streaming/unary call type names (as opposed to other runtime-rpc names like RpcOptions) + actualCallTypeNames := map[string]bool{ + "UnaryCall": true, "ServerStreamingCall": true, + "ClientStreamingCall": true, "DuplexStreamingCall": true, + } + // Check for runtime-rpc name collisions in client file. + // Proto types imported from ./test may collide with runtime-rpc imports + // only when that runtime-rpc name is actually imported. + // For call type names (UnaryCall, ServerStreamingCall, etc.), the collision + // resolution depends on protobuf-ts registration order: within each method, + // the call type is registered BEFORE input/output types. The first + // registration of a name wins; the second gets aliased with $. + g.callTypeRefs = map[string]string{ + "UnaryCall": "UnaryCall", "ServerStreamingCall": "ServerStreamingCall", + "ClientStreamingCall": "ClientStreamingCall", "DuplexStreamingCall": "DuplexStreamingCall", + } + runtimeClaimed := make(map[string]bool) // call type names registered so far + protoClaimed := make(map[string]bool) // proto types that claimed a call type name first + for _, service := range file.Service { + for _, method := range service.Method { + // Step 1: Register call type (registered before input/output in protobuf-ts) + callType := methodCallTypeName(method) + runtimeClaimed[callType] = true + + // Step 2: Check input/output types + for _, typeName := range []string{method.GetInputType(), method.GetOutputType()} { + if _, alreadyAliased := g.importAliases[typeName]; alreadyAliased { + continue + } + tsName := g.stripPackage(typeName) + if tsName == "stackIntercept" { + continue + } + if actualCallTypeNames[tsName] { + if protoClaimed[tsName] { + continue // already handled + } + if runtimeClaimed[tsName] && usedCallTypes[tsName] { + // Runtime was registered first → alias the proto type + g.importAliases[typeName] = tsName + "$" + g.rawImportNames[typeName] = tsName + } else if usedCallTypes[tsName] { + // Proto registered first → runtime will be aliased + protoClaimed[tsName] = true + g.callTypeRefs[tsName] = tsName + "$" + } + continue + } + if usedCallTypes[tsName] { + g.importAliases[typeName] = tsName + "$" + g.rawImportNames[typeName] = tsName + } + } + } + } + + // Check if any service name collides with runtime-rpc import names. + // When a service is named "RpcTransport" or "ServiceInfo", the runtime-rpc import is aliased. + // When a service is named after a call type (UnaryCall, ServerStreamingCall, etc.), + // the proto service import is aliased instead (call type stays unaliased for method signatures). + g.stackInterceptRef = "stackIntercept" + g.rpcTransportRef = "RpcTransport" + g.serviceInfoRef = "ServiceInfo" + g.serviceImportAliases = make(map[string]string) + for _, service := range file.Service { + svcName := escapeTypescriptKeyword(service.GetName()) + if service.GetName() == "RpcTransport" { + g.rpcTransportRef = "RpcTransport$" + } + if service.GetName() == "ServiceInfo" { + g.serviceInfoRef = "ServiceInfo$" + } + if usedCallTypes[svcName] { + g.serviceImportAliases[svcName] = svcName + "$" + } + } + // Also check if any proto message type used in service methods collides with + // RpcTransport, ServiceInfo, or stackIntercept runtime-rpc imports. + for _, service := range file.Service { + for _, method := range service.Method { + for _, typeName := range []string{method.GetInputType(), method.GetOutputType()} { + tsName := g.stripPackage(typeName) + if tsName == "RpcTransport" { + g.rpcTransportRef = "RpcTransport$" + } + if tsName == "ServiceInfo" { + g.serviceInfoRef = "ServiceInfo$" + } + if tsName == "stackIntercept" { + g.stackInterceptRef = "stackIntercept$" + } + } + } + } + + // Collect imports + seen := make(map[string]bool) + + // Find the first service with methods (the "primary" service whose types get special positioning) + primaryServiceIdx := 0 + if len(file.Service) > 0 { + for si := 0; si < len(file.Service); si++ { + if len(file.Service[si].Method) > 0 { + primaryServiceIdx = si + break + } + } + } + + // Collect all types used in primary service to avoid importing them early + service1Types := make(map[string]bool) + if len(file.Service) > 0 { + for _, method := range file.Service[primaryServiceIdx].Method { + service1Types[g.stripPackage(method.GetOutputType())] = true + service1Types[g.stripPackage(method.GetInputType())] = true + } + } + + // Find first service (forward order) with a unary method — determines where UnaryCall import goes + firstUnaryServiceIdx := -1 + for si := 0; si < len(file.Service); si++ { + for _, m := range file.Service[si].Method { + if !m.GetClientStreaming() && !m.GetServerStreaming() { + firstUnaryServiceIdx = si + break + } + } + if firstUnaryServiceIdx >= 0 { + break + } + } + + // For services 2..N (in reverse order), output Service + all method types + streaming call types + seenCallTypes := make(map[string]bool) + for svcIdx := len(file.Service) - 1; svcIdx >= 1; svcIdx-- { + service := file.Service[svcIdx] + escapedServiceName := escapeTypescriptKeyword(service.GetName()) + svcImportClause := escapedServiceName + if alias, ok := g.serviceImportAliases[escapedServiceName]; ok { + svcImportClause = escapedServiceName + " as " + alias + } + g.pNoIndent("import { %s } from \"./%s\";", svcImportClause, baseFileName) + + // When service 0 is empty, the first service with methods takes over its role + isFirstMethodService := primaryServiceIdx > 0 && svcIdx == primaryServiceIdx + + if isFirstMethodService && hasAnyMethod { + if g.stackInterceptRef == "stackIntercept$" { + g.pNoIndent("import { stackIntercept as stackIntercept$ } from \"@protobuf-ts/runtime-rpc\";") + } else { + g.pNoIndent("import { stackIntercept } from \"@protobuf-ts/runtime-rpc\";") + } + } + + // Pre-compute which method first uses each type (forward order) for this service + svcFirstMethodForType := map[string]int{} + for mi := 0; mi < len(service.Method); mi++ { + m := service.Method[mi] + rt := g.stripPackage(m.GetOutputType()) + rq := g.stripPackage(m.GetInputType()) + if _, ok := svcFirstMethodForType[rt]; !ok { + svcFirstMethodForType[rt] = mi + } + if _, ok := svcFirstMethodForType[rq]; !ok { + svcFirstMethodForType[rq] = mi + } + } + + for i := len(service.Method) - 1; i >= 0; i-- { + method := service.Method[i] + resType := g.stripPackage(method.GetOutputType()) + reqType := g.stripPackage(method.GetInputType()) + resTypeImport := g.formatTypeImport(method.GetOutputType()) + reqTypeImport := g.formatTypeImport(method.GetInputType()) + resTypePath := g.getImportPathForType(method.GetOutputType()) + reqTypePath := g.getImportPathForType(method.GetInputType()) + + if isFirstMethodService { + // Primary service ordering: types first, then call type (same as service 0 section) + if svcFirstMethodForType[resType] == i && !seen[resType] { + g.pNoIndent("import type { %s } from \"%s\";", resTypeImport, resTypePath) + seen[resType] = true + } + if svcFirstMethodForType[reqType] == i && !seen[reqType] { + g.pNoIndent("import type { %s } from \"%s\";", reqTypeImport, reqTypePath) + seen[reqType] = true + } + if method.GetClientStreaming() || method.GetServerStreaming() { + var callTypeImport string + if method.GetClientStreaming() && method.GetServerStreaming() { + callTypeImport = "DuplexStreamingCall" + } else if method.GetServerStreaming() { + callTypeImport = "ServerStreamingCall" + } else { + callTypeImport = "ClientStreamingCall" + } + if callTypeImport != "" && !seenCallTypes[callTypeImport] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause(callTypeImport)) + seenCallTypes[callTypeImport] = true + } + } else { + if !seenCallTypes["UnaryCall"] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause("UnaryCall")) + seenCallTypes["UnaryCall"] = true + } + } + } else { + // Types first (only at the method that first uses them), then call type + if svcFirstMethodForType[resType] == i && !seen[resType] && !service1Types[resType] { + g.pNoIndent("import type { %s } from \"%s\";", resTypeImport, resTypePath) + seen[resType] = true + } + if svcFirstMethodForType[reqType] == i && !seen[reqType] && !service1Types[reqType] { + g.pNoIndent("import type { %s } from \"%s\";", reqTypeImport, reqTypePath) + seen[reqType] = true + } + + if method.GetClientStreaming() || method.GetServerStreaming() { + var callTypeImport string + if method.GetClientStreaming() && method.GetServerStreaming() { + callTypeImport = "DuplexStreamingCall" + } else if method.GetServerStreaming() { + callTypeImport = "ServerStreamingCall" + } else { + callTypeImport = "ClientStreamingCall" + } + if callTypeImport != "" && !seenCallTypes[callTypeImport] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause(callTypeImport)) + seenCallTypes[callTypeImport] = true + } + } else { + // Unary method — emit UnaryCall import only if this is the first service with unary (forward order) + if firstUnaryServiceIdx == svcIdx && !seenCallTypes["UnaryCall"] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause("UnaryCall")) + seenCallTypes["UnaryCall"] = true + } + } + } + } + + if isFirstMethodService && hasAnyMethod { + g.pNoIndent("import type { RpcOptions } from \"@protobuf-ts/runtime-rpc\";") + } + } + + // RPC imports + if g.rpcTransportRef == "RpcTransport$" { + g.pNoIndent("import type { RpcTransport as RpcTransport$ } from \"@protobuf-ts/runtime-rpc\";") + } else { + g.pNoIndent("import type { RpcTransport } from \"@protobuf-ts/runtime-rpc\";") + } + if g.serviceInfoRef == "ServiceInfo$" { + g.pNoIndent("import type { ServiceInfo as ServiceInfo$ } from \"@protobuf-ts/runtime-rpc\";") + } else { + g.pNoIndent("import type { ServiceInfo } from \"@protobuf-ts/runtime-rpc\";") + } + + // First service + methods types with special ordering + if len(file.Service) > 0 { + service := file.Service[0] + escapedServiceName := escapeTypescriptKeyword(service.GetName()) + svcImportClause := escapedServiceName + if alias, ok := g.serviceImportAliases[escapedServiceName]; ok { + svcImportClause = escapedServiceName + " as " + alias + } + g.pNoIndent("import { %s } from \"./%s\";", svcImportClause, baseFileName) + + if primaryServiceIdx == 0 { + // Collect method 0 types for filtering + method0Types := make(map[string]bool) + if len(service.Method) > 0 { + method0 := service.Method[0] + method0Types[g.stripPackage(method0.GetOutputType())] = true + method0Types[g.stripPackage(method0.GetInputType())] = true + } + + // Import entry: either a type import or a streaming call type import + type importEntry struct { + typeName string // TS name (with alias if applicable, used for dedup) + typeImport string // Import clause (e.g., "Data" or "Data as Data$") + typePath string + callType string // non-empty for streaming call type imports ("duplex", "client", "server") + } + + var imports []importEntry + + // Pre-compute which method index first uses each type (forward order). + // This determines where the type import should appear in the N→1 prepend stack. + firstMethodForType := map[string]int{} + for i := 0; i < len(service.Method); i++ { + method := service.Method[i] + resType := g.stripPackage(method.GetOutputType()) + reqType := g.stripPackage(method.GetInputType()) + if _, ok := firstMethodForType[resType]; !ok { + firstMethodForType[resType] = i + } + if _, ok := firstMethodForType[reqType]; !ok { + firstMethodForType[reqType] = i + } + } + + // Collect all method imports in N→1 order (matching protobuf-ts prepend semantics). + // For each method, only add type imports for types that are FIRST used by this method. + var deferredInputs []importEntry + + for i := len(service.Method) - 1; i >= 1; i-- { + method := service.Method[i] + + resType := g.stripPackage(method.GetOutputType()) + reqType := g.stripPackage(method.GetInputType()) + resTypeImport := g.formatTypeImport(method.GetOutputType()) + reqTypeImport := g.formatTypeImport(method.GetInputType()) + resTypePath := g.getImportPathForType(method.GetOutputType()) + reqTypePath := g.getImportPathForType(method.GetInputType()) + + isStreaming := method.GetClientStreaming() || method.GetServerStreaming() + + // Skip non-streaming methods if both types are in method 0 + if !isStreaming && method0Types[resType] && method0Types[reqType] { + continue + } + + if isStreaming { + // Only add types that are first used by this method + if firstMethodForType[resType] == i && !method0Types[resType] && !seen[resType] { + imports = append(imports, importEntry{typeName: resType, typeImport: resTypeImport, typePath: resTypePath}) + seen[resType] = true + } + if firstMethodForType[reqType] == i && !method0Types[reqType] && !seen[reqType] { + imports = append(imports, importEntry{typeName: reqType, typeImport: reqTypeImport, typePath: reqTypePath}) + seen[reqType] = true + } + + // Add call type marker + var callType string + if method.GetClientStreaming() && method.GetServerStreaming() { + callType = "duplex" + } else if method.GetServerStreaming() { + callType = "server" + } else if method.GetClientStreaming() { + callType = "client" + } + imports = append(imports, importEntry{callType: callType}) + } else { + // Non-streaming: collect types (includes types first used by this or lower methods) + // Output first + if !method0Types[resType] && !seen[resType] { + imports = append(imports, importEntry{typeName: resType, typeImport: resTypeImport, typePath: resTypePath}) + seen[resType] = true + + // Check if any deferred inputs match this output's path and emit them now + var remainingDeferred []importEntry + for _, deferred := range deferredInputs { + if deferred.typePath == resTypePath { + imports = append(imports, deferred) + } else { + remainingDeferred = append(remainingDeferred, deferred) + } + } + deferredInputs = remainingDeferred + } + + // Input: emit immediately if same path as output, otherwise defer + if !method0Types[reqType] && !seen[reqType] { + if reqType == resType || reqTypePath == resTypePath { + imports = append(imports, importEntry{typeName: reqType, typeImport: reqTypeImport, typePath: reqTypePath}) + seen[reqType] = true + } else { + deferredInputs = append(deferredInputs, importEntry{typeName: reqType, typeImport: reqTypeImport, typePath: reqTypePath}) + seen[reqType] = true + } + } + } + } + + // Append any remaining deferred inputs + imports = append(imports, deferredInputs...) + + // Determine method 0's call type (if streaming) so we don't duplicate it + method0CallType := "" + if len(service.Method) > 0 { + m0 := service.Method[0] + if m0.GetClientStreaming() || m0.GetServerStreaming() { + if m0.GetClientStreaming() && m0.GetServerStreaming() { + method0CallType = "duplex" + } else if m0.GetServerStreaming() { + method0CallType = "server" + } else { + method0CallType = "client" + } + } + } + + // When method 0 is streaming and there are unary methods later, + // UnaryCall is prepended last (appears above other streaming call types) + hasUnaryInService := false + if method0CallType != "" { + for _, m := range service.Method { + if !m.GetClientStreaming() && !m.GetServerStreaming() { + hasUnaryInService = true + break + } + } + if hasUnaryInService && !seenCallTypes["UnaryCall"] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause("UnaryCall")) + seenCallTypes["UnaryCall"] = true + } + } + + { + // Deduplicate streaming call types: only emit at last occurrence + // (which corresponds to first registration in protobuf-ts's forward/prepend model) + lastCallTypeIdx := map[string]int{} + for i, entry := range imports { + if entry.callType != "" && entry.callType != method0CallType { + lastCallTypeIdx[entry.callType] = i + } + } + + for i, entry := range imports { + if entry.callType != "" { + // Call type entry: only emit at last occurrence, skip method 0's call type + if entry.callType == method0CallType { + continue + } + if idx, ok := lastCallTypeIdx[entry.callType]; ok && idx != i { + continue + } + var callTypeImport string + switch entry.callType { + case "duplex": + callTypeImport = "DuplexStreamingCall" + case "client": + callTypeImport = "ClientStreamingCall" + case "server": + callTypeImport = "ServerStreamingCall" + } + if callTypeImport != "" && !seenCallTypes[callTypeImport] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause(callTypeImport)) + seenCallTypes[callTypeImport] = true + } + } else { + // Type import entry + g.pNoIndent("import type { %s } from \"%s\";", entry.typeImport, entry.typePath) + } + } + } + } // primaryServiceIdx == 0 + } + + // 4. Check if we need stackIntercept (for any method - unary or streaming) + hasUnary := false + for _, service := range file.Service { + for _, method := range service.Method { + if !method.GetClientStreaming() && !method.GetServerStreaming() { + hasUnary = true + break + } + } + if hasUnary { + break + } + } + + // Compute method0IsStreaming for later use + method0IsStreaming := false + if len(file.Service) > 0 && len(file.Service[0].Method) > 0 { + m0 := file.Service[0].Method[0] + method0IsStreaming = m0.GetClientStreaming() || m0.GetServerStreaming() + } + + if hasAnyMethod && primaryServiceIdx == 0 { + if g.stackInterceptRef == "stackIntercept$" { + g.pNoIndent("import { stackIntercept as stackIntercept$ } from \"@protobuf-ts/runtime-rpc\";") + } else { + g.pNoIndent("import { stackIntercept } from \"@protobuf-ts/runtime-rpc\";") + } + } + + // 5. Emit method 0 types (output first, then input) + if len(file.Service) > 0 && len(file.Service[0].Method) > 0 { + method := file.Service[0].Method[0] + resType := g.stripPackage(method.GetOutputType()) + reqType := g.stripPackage(method.GetInputType()) + resTypeImport := g.formatTypeImport(method.GetOutputType()) + reqTypeImport := g.formatTypeImport(method.GetInputType()) + resTypePath := g.getImportPathForType(method.GetOutputType()) + reqTypePath := g.getImportPathForType(method.GetInputType()) + + // Import output type first + if !seen[resType] { + g.pNoIndent("import type { %s } from \"%s\";", resTypeImport, resTypePath) + seen[resType] = true + } + // Import input type second + if !seen[reqType] { + g.pNoIndent("import type { %s } from \"%s\";", reqTypeImport, reqTypePath) + seen[reqType] = true + } + + // If method 0 is streaming, emit its call type + if method.GetClientStreaming() || method.GetServerStreaming() { + var callTypeImport string + if method.GetClientStreaming() && method.GetServerStreaming() { + callTypeImport = "DuplexStreamingCall" + } else if method.GetServerStreaming() { + callTypeImport = "ServerStreamingCall" + } else if method.GetClientStreaming() { + callTypeImport = "ClientStreamingCall" + } + if callTypeImport != "" && !seenCallTypes[callTypeImport] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause(callTypeImport)) + seenCallTypes[callTypeImport] = true + } + } + } + + // Emit UnaryCall (if method 0 is unary) and RpcOptions + if len(file.Service) > 0 && primaryServiceIdx == 0 { + if hasUnary && !method0IsStreaming && !seenCallTypes["UnaryCall"] { + g.pNoIndent("import type { %s } from \"@protobuf-ts/runtime-rpc\";", g.callTypeImportClause("UnaryCall")) + seenCallTypes["UnaryCall"] = true + } + if hasAnyMethod { + g.pNoIndent("import type { RpcOptions } from \"@protobuf-ts/runtime-rpc\";") + } + } + + // Generate service clients + for _, service := range file.Service { + g.generateServiceClient(service) + } + + return g.b.String() +} + +func (g *generator) generateServiceClient(service *descriptorpb.ServiceDescriptorProto) { + baseName := service.GetName() + serviceName := escapeTypescriptKeyword(baseName) + serviceRef := serviceName + if alias, ok := g.serviceImportAliases[serviceName]; ok { + serviceRef = alias + } + clientName := "I" + serviceName + "Client" + + pkgPrefix := "" + if g.file.Package != nil && *g.file.Package != "" { + pkgPrefix = *g.file.Package + "." + } + + // Get service index for comments + svcIndex := -1 + for i, s := range g.file.Service { + if s.GetName() == baseName { + svcIndex = i + break + } + } + + // Interface - detached comments + if svcIndex >= 0 { + detachedComments := g.getLeadingDetachedComments([]int32{6, int32(svcIndex)}) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.pNoIndent("// ") + } else { + g.pNoIndent("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + } + + g.pNoIndent("/**") + + // Add service-level leading comments if available + if svcIndex >= 0 { + leadingComments, hasLeading := g.getLeadingComments([]int32{6, int32(svcIndex)}) + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + + trailingComments := g.getEnumTrailingComments([]int32{6, int32(svcIndex)}) + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + } + + // Add @deprecated if service has deprecated option OR file is deprecated + if (service.Options != nil && service.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") + } + + g.pNoIndent(" * @generated from protobuf service %s%s", pkgPrefix, baseName) + g.pNoIndent(" */") + g.pNoIndent("export interface %s {", clientName) + g.indent = " " + + for methodIdx, method := range service.Method { + reqType := g.stripPackage(method.GetInputType()) + resType := g.stripPackage(method.GetOutputType()) + methodName := escapeMethodName(g.toCamelCase(method.GetName())) + + methodPath := []int32{6, int32(svcIndex), 2, int32(methodIdx)} + detachedComments := g.getLeadingDetachedComments(methodPath) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.p("// ") + } else { + g.p("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + + g.p("/**") + + // Add method-level leading comments if available + leadingComments, hasLeading := g.getLeadingComments(methodPath) + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + trailingComments := g.getEnumTrailingComments(methodPath) + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + // Add @deprecated if method has deprecated option OR file is deprecated + if (method.Options != nil && method.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.p(" * @deprecated") + } + + g.p(" * @generated from protobuf rpc: %s", method.GetName()) + g.p(" */") + + // Determine call type and signature based on streaming + if method.GetClientStreaming() && method.GetServerStreaming() { + // Bidirectional streaming + g.p("%s(options?: RpcOptions): %s<%s, %s>;", methodName, g.callTypeRefs["DuplexStreamingCall"], reqType, resType) + } else if method.GetServerStreaming() { + // Server streaming + g.p("%s(input: %s, options?: RpcOptions): %s<%s, %s>;", methodName, reqType, g.callTypeRefs["ServerStreamingCall"], reqType, resType) + } else if method.GetClientStreaming() { + // Client streaming + g.p("%s(options?: RpcOptions): %s<%s, %s>;", methodName, g.callTypeRefs["ClientStreamingCall"], reqType, resType) + } else { + // Unary + g.p("%s(input: %s, options?: RpcOptions): %s<%s, %s>;", methodName, reqType, g.callTypeRefs["UnaryCall"], reqType, resType) + } + } + + g.indent = "" + g.pNoIndent("}") + + // Implementation - detached comments + if svcIndex >= 0 { + detachedComments := g.getLeadingDetachedComments([]int32{6, int32(svcIndex)}) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.pNoIndent("// ") + } else { + g.pNoIndent("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + } + + g.pNoIndent("/**") + + // Add service-level leading comments if available + if svcIndex >= 0 { + leadingComments, hasLeading := g.getLeadingComments([]int32{6, int32(svcIndex)}) + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + } + + // Add service-level trailing comments if available + if svcIndex >= 0 { + trailingComments := g.getEnumTrailingComments([]int32{6, int32(svcIndex)}) + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.pNoIndent(" *") + } else { + g.pNoIndent(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.pNoIndent(" *") + g.pNoIndent(" *") + } else { + g.pNoIndent(" *") + } + } + } + + // Add @deprecated if service has deprecated option OR file is deprecated + if (service.Options != nil && service.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") + } + + g.pNoIndent(" * @generated from protobuf service %s%s", pkgPrefix, baseName) + g.pNoIndent(" */") + g.pNoIndent("export class %sClient implements %s, %s {", serviceName, clientName, g.serviceInfoRef) + g.indent = " " + g.p("typeName = %s.typeName;", serviceRef) + g.p("methods = %s.methods;", serviceRef) + g.p("options = %s.options;", serviceRef) + g.p("constructor(private readonly _transport: %s) {", g.rpcTransportRef) + g.p("}") + + for methodIdx, method := range service.Method { + reqType := g.stripPackage(method.GetInputType()) + resType := g.stripPackage(method.GetOutputType()) + methodName := escapeMethodName(g.toCamelCase(method.GetName())) + + methodPath := []int32{6, int32(svcIndex), 2, int32(methodIdx)} + detachedComments := g.getLeadingDetachedComments(methodPath) + if len(detachedComments) > 0 { + for idx, detached := range detachedComments { + detached = strings.TrimRight(detached, "\n") + for _, line := range strings.Split(detached, "\n") { + if line == "" { + g.p("// ") + } else { + g.p("// %s", line) + } + } + if idx < len(detachedComments)-1 { + g.pNoIndent("") + } + } + g.pNoIndent("") + } + + g.p("/**") + + // Add method-level leading comments if available + leadingComments, hasLeading := g.getLeadingComments(methodPath) + if hasLeading { + hasTrailingBlank := strings.HasSuffix(leadingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + leadingComments = strings.TrimSuffix(leadingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(leadingComments, "\n") + for _, line := range lines { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + trailingComments := g.getEnumTrailingComments(methodPath) + if trailingComments != "" { + hasTrailingBlank := strings.HasSuffix(trailingComments, "__HAS_TRAILING_BLANK__") + if hasTrailingBlank { + trailingComments = strings.TrimSuffix(trailingComments, "\n__HAS_TRAILING_BLANK__") + } + + lines := strings.Split(trailingComments, "\n") + for _, line := range lines { + if line == "" { + g.p(" *") + } else { + g.p(" * %s", escapeJSDocComment(line)) + } + } + if hasTrailingBlank { + g.p(" *") + g.p(" *") + } else { + g.p(" *") + } + } + + // Add @deprecated if method has deprecated option OR file is deprecated + if (method.Options != nil && method.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.p(" * @deprecated") + } + + g.p(" * @generated from protobuf rpc: %s", method.GetName()) + g.p(" */") + + // Determine call type and implementation based on streaming + if method.GetClientStreaming() && method.GetServerStreaming() { + // Bidirectional streaming + g.p("%s(options?: RpcOptions): %s<%s, %s> {", methodName, g.callTypeRefs["DuplexStreamingCall"], reqType, resType) + g.indent = " " + g.p("const method = this.methods[%d], opt = this._transport.mergeOptions(options);", g.findMethodIndex(service, method)) + g.p("return %s<%s, %s>(\"duplex\", this._transport, method, opt);", g.stackInterceptRef, reqType, resType) + g.indent = " " + g.p("}") + } else if method.GetServerStreaming() { + // Server streaming + g.p("%s(input: %s, options?: RpcOptions): %s<%s, %s> {", methodName, reqType, g.callTypeRefs["ServerStreamingCall"], reqType, resType) + g.indent = " " + g.p("const method = this.methods[%d], opt = this._transport.mergeOptions(options);", g.findMethodIndex(service, method)) + g.p("return %s<%s, %s>(\"serverStreaming\", this._transport, method, opt, input);", g.stackInterceptRef, reqType, resType) + g.indent = " " + g.p("}") + } else if method.GetClientStreaming() { + // Client streaming + g.p("%s(options?: RpcOptions): %s<%s, %s> {", methodName, g.callTypeRefs["ClientStreamingCall"], reqType, resType) + g.indent = " " + g.p("const method = this.methods[%d], opt = this._transport.mergeOptions(options);", g.findMethodIndex(service, method)) + g.p("return %s<%s, %s>(\"clientStreaming\", this._transport, method, opt);", g.stackInterceptRef, reqType, resType) + g.indent = " " + g.p("}") + } else { + // Unary + g.p("%s(input: %s, options?: RpcOptions): %s<%s, %s> {", methodName, reqType, g.callTypeRefs["UnaryCall"], reqType, resType) + g.indent = " " + g.p("const method = this.methods[%d], opt = this._transport.mergeOptions(options);", g.findMethodIndex(service, method)) + g.p("return %s<%s, %s>(\"unary\", this._transport, method, opt, input);", g.stackInterceptRef, reqType, resType) + g.indent = " " + g.p("}") + } + } + + g.indent = "" + g.pNoIndent("}") +} + +func (g *generator) findMethodIndex(service *descriptorpb.ServiceDescriptorProto, method *descriptorpb.MethodDescriptorProto) int { + for i, m := range service.Method { + if m == method { + return i + } + } + return 0 +} + +func (g *generator) lowerFirst(s string) string { + if len(s) == 0 { + return s + } + return strings.ToLower(s[:1]) + s[1:] +} + +func (g *generator) isPackedType(field *descriptorpb.FieldDescriptorProto) bool { + // Check if the type can be packed (numeric and bool types) + // This determines if we need to handle both packed and unpacked wire formats during deserialization + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_BOOL, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_ENUM, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT64: + return true + default: + return false + } +} + +func (g *generator) isFieldPacked(field *descriptorpb.FieldDescriptorProto) bool { + // Determine if this field should be marked as packed in metadata + // This affects how it's serialized and the RepeatType in metadata + + // Only packable types can be packed + if !g.isPackedType(field) { + return false + } + + // If packed option is explicitly set, use it + if field.Options != nil && field.GetOptions().Packed != nil { + return field.GetOptions().GetPacked() + } + + // Default behavior depends on syntax: + // - proto3: packed by default + // - proto2: unpacked by default + isProto3 := g.file.GetSyntax() == "proto3" + return isProto3 +} + +func (g *generator) getMapKeyDefault(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + if field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_INT64 || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_UINT64 || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SINT64 || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_FIXED64 || + field.GetType() == descriptorpb.FieldDescriptorProto_TYPE_SFIXED64 { + return "\"0\"" + } + return "0" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + // Boolean keys are stored as strings in TypeScript object keys + return "\"false\"" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "\"\"" + default: + return "\"\"" + } +} + +func (g *generator) getMapValueDefault(field *descriptorpb.FieldDescriptorProto) string { + switch field.GetType() { + case descriptorpb.FieldDescriptorProto_TYPE_DOUBLE, + descriptorpb.FieldDescriptorProto_TYPE_FLOAT, + descriptorpb.FieldDescriptorProto_TYPE_INT32, + descriptorpb.FieldDescriptorProto_TYPE_UINT32, + descriptorpb.FieldDescriptorProto_TYPE_SINT32, + descriptorpb.FieldDescriptorProto_TYPE_FIXED32, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED32: + return "0" + case descriptorpb.FieldDescriptorProto_TYPE_INT64, + descriptorpb.FieldDescriptorProto_TYPE_UINT64, + descriptorpb.FieldDescriptorProto_TYPE_SINT64, + descriptorpb.FieldDescriptorProto_TYPE_FIXED64, + descriptorpb.FieldDescriptorProto_TYPE_SFIXED64: + return "\"0\"" + case descriptorpb.FieldDescriptorProto_TYPE_BOOL: + return "false" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + return "\"\"" + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + return "new Uint8Array(0)" + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return "0" + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + typeName := g.stripPackage(field.GetTypeName()) + return fmt.Sprintf("%s.create()", typeName) + default: + return "\"\"" + } +} + +func (g *generator) generateService(svc *descriptorpb.ServiceDescriptorProto) { +pkgPrefix := "" +if g.file.Package != nil && *g.file.Package != "" { +pkgPrefix = *g.file.Package + "." +} + +svcName := svc.GetName() +escapedSvcName := escapeTypescriptKeyword(svcName) +fullName := pkgPrefix + svcName + +g.pNoIndent("/**") +// Add @deprecated if service has deprecated option OR file is deprecated +if (svc.Options != nil && svc.GetOptions().GetDeprecated()) || g.isFileDeprecated() { + g.pNoIndent(" * @deprecated") +} +g.pNoIndent(" * @generated ServiceType for protobuf service %s", fullName) +g.pNoIndent(" */") + +if len(svc.Method) == 0 { + customSvcOpts := g.getCustomServiceOptions(svc.Options) + if len(customSvcOpts) > 0 { + g.pNoIndent("export const %s = new %s(\"%s\", [], %s);", escapedSvcName, g.serviceTypeRef, fullName, formatCustomOptions(customSvcOpts)) + } else { + g.pNoIndent("export const %s = new %s(\"%s\", []);", escapedSvcName, g.serviceTypeRef, fullName) + } +} else { +g.pNoIndent("export const %s = new %s(\"%s\", [", escapedSvcName, g.serviceTypeRef, fullName) + +// Generate method descriptors +g.indent = " " +for i, method := range svc.Method { +inputType := g.stripPackage(method.GetInputType()) +outputType := g.stripPackage(method.GetOutputType()) +comma := "," +if i == len(svc.Method)-1 { +comma = "" +} + + // Check if method name needs escaping and add localName + methodName := g.toCamelCase(method.GetName()) + escapedName := escapeMethodName(methodName) + localNameField := "" + if escapedName != methodName { + localNameField = fmt.Sprintf("localName: \"%s\", ", escapedName) + } + + // Add idempotency field if specified + idempotencyField := "" + if method.Options != nil { + idempotencyLevel := method.GetOptions().GetIdempotencyLevel() + switch idempotencyLevel { + case descriptorpb.MethodOptions_NO_SIDE_EFFECTS: + idempotencyField = "idempotency: \"NO_SIDE_EFFECTS\", " + case descriptorpb.MethodOptions_IDEMPOTENT: + idempotencyField = "idempotency: \"IDEMPOTENT\", " + } + } + + // Build streaming flags + streamingFlags := "" + if method.GetServerStreaming() { + streamingFlags += "serverStreaming: true, " + } + if method.GetClientStreaming() { + streamingFlags += "clientStreaming: true, " + } + + // Extract custom method options + customOpts := g.getCustomMethodOptions(method.Options) + optsStr := formatCustomOptions(customOpts) + + g.p("{ name: \"%s\", %s%s%soptions: %s, I: %s, O: %s }%s", + method.GetName(), localNameField, idempotencyField, streamingFlags, optsStr, inputType, outputType, comma) +} +g.indent = "" +customSvcOpts := g.getCustomServiceOptions(svc.Options) +if len(customSvcOpts) > 0 { + g.pNoIndent("], %s);", formatCustomOptions(customSvcOpts)) +} else { + g.pNoIndent("]);") +} +} +} + +func (g *generator) generateTimestampMethods() { +g.indent = " " + +// now() method +g.p("/**") +g.p(" * Creates a new `Timestamp` for the current time.") +g.p(" */") +g.p("now(): Timestamp {") +g.indent = " " +g.p("const msg = this.create();") +g.p("const ms = Date.now();") +g.p("msg.seconds = %s.from(Math.floor(ms / 1000)).toString();", g.pbLongRef) +g.p("msg.nanos = (ms %% 1000) * 1000000;") +g.p("return msg;") +g.indent = " " +g.p("}") + +// toDate() method +g.p("/**") +g.p(" * Converts a `Timestamp` to a JavaScript Date.") +g.p(" */") +g.p("toDate(message: Timestamp): Date {") +g.indent = " " +g.p("return new Date(%s.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));", g.pbLongRef) +g.indent = " " +g.p("}") + +// fromDate() method +g.p("/**") +g.p(" * Converts a JavaScript Date to a `Timestamp`.") +g.p(" */") +g.p("fromDate(date: Date): Timestamp {") +g.indent = " " +g.p("const msg = this.create();") +g.p("const ms = date.getTime();") +g.p("msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();") +g.p("msg.nanos = ((ms %% 1000) + (ms < 0 && ms %% 1000 !== 0 ? 1000 : 0)) * 1000000;") +g.p("return msg;") +g.indent = " " +g.p("}") + +// internalJsonWrite() method +g.p("/**") +g.p(" * In JSON format, the `Timestamp` type is encoded as a string") +g.p(" * in the RFC 3339 format.") +g.p(" */") +g.p("internalJsonWrite(message: Timestamp, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") +g.indent = " " +g.p("let ms = PbLong.from(message.seconds).toNumber() * 1000;") +g.p("if (ms < Date.parse(\"0001-01-01T00:00:00Z\") || ms > Date.parse(\"9999-12-31T23:59:59Z\"))") +g.indent = " " +g.p("throw new Error(\"Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.\");") +g.indent = " " +g.p("if (message.nanos < 0)") +g.indent = " " +g.p("throw new Error(\"Unable to encode invalid Timestamp to JSON. Nanos must not be negative.\");") +g.indent = " " +g.p("let z = \"Z\";") +g.p("if (message.nanos > 0) {") +g.indent = " " +g.p("let nanosStr = (message.nanos + 1000000000).toString().substring(1);") +g.p("if (nanosStr.substring(3) === \"000000\")") +g.indent = " " +g.p("z = \".\" + nanosStr.substring(0, 3) + \"Z\";") +g.indent = " " +g.p("else if (nanosStr.substring(6) === \"000\")") +g.indent = " " +g.p("z = \".\" + nanosStr.substring(0, 6) + \"Z\";") +g.indent = " " +g.p("else") +g.indent = " " +g.p("z = \".\" + nanosStr + \"Z\";") +g.indent = " " +g.p("}") +g.p("return new Date(ms).toISOString().replace(\".000Z\", z);") +g.indent = " " +g.p("}") + +// internalJsonRead() method +g.p("/**") +g.p(" * In JSON format, the `Timestamp` type is encoded as a string") +g.p(" * in the RFC 3339 format.") +g.p(" */") +g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: Timestamp): Timestamp {") +g.indent = " " +g.p("if (typeof json !== \"string\")") +g.indent = " " +g.p("throw new Error(\"Unable to parse Timestamp from JSON \" + %s(json) + \".\");", g.typeofJsonValueRef) +g.indent = " " +g.p("let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);") +g.p("if (!matches)") +g.indent = " " +g.p("throw new Error(\"Unable to parse Timestamp from JSON. Invalid format.\");") +g.indent = " " +g.p("let ms = Date.parse(matches[1] + \"-\" + matches[2] + \"-\" + matches[3] + \"T\" + matches[4] + \":\" + matches[5] + \":\" + matches[6] + (matches[8] ? matches[8] : \"Z\"));") +g.p("if (Number.isNaN(ms))") +g.indent = " " +g.p("throw new Error(\"Unable to parse Timestamp from JSON. Invalid value.\");") +g.indent = " " +g.p("if (ms < Date.parse(\"0001-01-01T00:00:00Z\") || ms > Date.parse(\"9999-12-31T23:59:59Z\"))") +g.indent = " " +g.p("throw new globalThis.Error(\"Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.\");") +g.indent = " " +g.p("if (!target)") +g.indent = " " +g.p("target = this.create();") +g.indent = " " +g.p("target.seconds = PbLong.from(ms / 1000).toString();") +g.p("target.nanos = 0;") +g.p("if (matches[7])") +g.indent = " " +g.p("target.nanos = (parseInt(\"1\" + matches[7] + \"0\".repeat(9 - matches[7].length)) - 1000000000);") +g.indent = " " +g.p("return target;") +g.indent = " " +g.p("}") +} + +func (g *generator) generateDurationMethods() { +g.indent = " " + +// internalJsonWrite() method +g.p("/**") +g.p(" * Encode `Duration` to JSON string like \"3.000001s\".") +g.p(" */") +g.p("internalJsonWrite(message: Duration, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") +g.indent = " " +g.p("let s = PbLong.from(message.seconds).toNumber();") +g.p("if (s > 315576000000 || s < -315576000000)") +g.indent = " " +g.p("throw new Error(\"Duration value out of range.\");") +g.indent = " " +g.p("let text = message.seconds.toString();") +g.p("if (s === 0 && message.nanos < 0)") +g.indent = " " +g.p("text = \"-\" + text;") +g.indent = " " +g.p("if (message.nanos !== 0) {") +g.indent = " " +g.p("let nanosStr = Math.abs(message.nanos).toString();") +g.p("nanosStr = \"0\".repeat(9 - nanosStr.length) + nanosStr;") +g.p("if (nanosStr.substring(3) === \"000000\")") +g.indent = " " +g.p("nanosStr = nanosStr.substring(0, 3);") +g.indent = " " +g.p("else if (nanosStr.substring(6) === \"000\")") +g.indent = " " +g.p("nanosStr = nanosStr.substring(0, 6);") +g.indent = " " +g.p("text += \".\" + nanosStr;") +g.indent = " " +g.p("}") +g.p("return text + \"s\";") +g.indent = " " +g.p("}") + +// internalJsonRead() method +g.p("/**") +g.p(" * Decode `Duration` from JSON string like \"3.000001s\"") +g.p(" */") +g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: Duration): Duration {") +g.indent = " " +g.p("if (typeof json !== \"string\")") +g.indent = " " +g.p("throw new Error(\"Unable to parse Duration from JSON \" + %s(json) + \". Expected string.\");", g.typeofJsonValueRef) +g.indent = " " +g.p("let match = json.match(/^(-?)([0-9]+)(?:\\.([0-9]+))?s/);") +g.p("if (match === null)") +g.indent = " " +g.p("throw new Error(\"Unable to parse Duration from JSON string. Invalid format.\");") +g.indent = " " +g.p("if (!target)") +g.indent = " " +g.p("target = this.create();") +g.indent = " " +g.p("let [, sign, secs, nanos] = match;") +g.p("let longSeconds = PbLong.from(sign + secs);") +g.p("if (longSeconds.toNumber() > 315576000000 || longSeconds.toNumber() < -315576000000)") +g.indent = " " +g.p("throw new Error(\"Unable to parse Duration from JSON string. Value out of range.\");") +g.indent = " " +g.p("target.seconds = longSeconds.toString();") +g.p("if (typeof nanos == \"string\") {") +g.indent = " " +g.p("let nanosStr = sign + nanos + \"0\".repeat(9 - nanos.length);") +g.p("target.nanos = parseInt(nanosStr);") +g.indent = " " +g.p("}") +g.p("return target;") +g.indent = " " +g.p("}") +} + +func (g *generator) generateFieldMaskMethods() { + g.indent = " " + + // internalJsonWrite() method + g.p("/**") + g.p(" * Encode `FieldMask` to JSON object.") + g.p(" */") + g.p("internalJsonWrite(message: FieldMask, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") + g.indent = " " + g.p("const invalidFieldMaskJsonRegex = /[A-Z]|(_([.0-9_]|$))/g;") + g.p("return message.paths.map(p => {") + g.indent = " " + g.p("if (invalidFieldMaskJsonRegex.test(p))") + g.indent = " " + g.p("%s", "throw new Error(\"Unable to encode FieldMask to JSON. lowerCamelCase of path name \\\"\" + p + \"\\\" is irreversible.\");") + g.indent = " " + g.p("return %s(p);", g.lowerCamelCaseRef) + g.indent = " " + g.p("}).join(\",\");") + g.indent = " " + g.p("}") + + // internalJsonRead() method + g.p("/**") + g.p(" * Decode `FieldMask` from JSON object.") + g.p(" */") + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: FieldMask): FieldMask {") + g.indent = " " + g.p("if (typeof json !== \"string\")") + g.indent = " " + g.p("throw new Error(\"Unable to parse FieldMask from JSON \" + %s(json) + \". Expected string.\");", g.typeofJsonValueRef) + g.indent = " " + g.p("if (!target)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + g.p("if (json === \"\")") + g.indent = " " + g.p("return target;") + g.indent = " " + g.p("let camelToSnake = (str: string) => {") + g.indent = " " + g.p("if (str.includes(\"_\"))") + g.indent = " " + g.p("throw new Error(\"Unable to parse FieldMask from JSON. Path names must be lowerCamelCase.\");") + g.indent = " " + g.p("%s", "let sc = str.replace(/[A-Z]/g, letter => \"_\" + letter.toLowerCase());") + g.p("return sc;") + g.indent = " " + g.p("};") + g.p("target.paths = json.split(\",\").map(camelToSnake);") + g.p("return target;") + g.indent = " " + g.p("}") +} + +func (g *generator) generateStructMethods(typeName string) { + g.indent = " " + + if typeName == "Struct" { + // internalJsonWrite for Struct + g.p("/**") + g.p(" * Encode `Struct` to JSON object.") + g.p(" */") + g.p("internalJsonWrite(message: Struct, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") + g.indent = " " + g.p("let json: %s = {};", g.jsonObjectRef) + g.p("for (let [k, v] of Object.entries(message.fields)) {") + g.indent = " " + g.p("json[k] = Value.toJson(v);") + g.indent = " " + g.p("}") + g.p("return json;") + g.indent = " " + g.p("}") + + // internalJsonRead for Struct + g.p("/**") + g.p(" * Decode `Struct` from JSON object.") + g.p(" */") + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: Struct): Struct {") + g.indent = " " + g.p("if (!%s(json))", g.isJsonObjectRef) + g.indent = " " + g.p("throw new globalThis.Error(\"Unable to parse message \" + this.typeName + \" from JSON \" + %s(json) + \".\");", g.typeofJsonValueRef) + g.indent = " " + g.p("if (!target)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + g.p("for (let [k, v] of globalThis.Object.entries(json)) {") + g.indent = " " + g.p("target.fields[k] = Value.fromJson(v);") + g.indent = " " + g.p("}") + g.p("return target;") + g.indent = " " + g.p("}") + } else if typeName == "Value" { + // internalJsonWrite for Value + g.p("/**") + g.p(" * Encode `Value` to JSON value.") + g.p(" */") + g.p("internalJsonWrite(message: Value, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") + g.indent = " " + g.p("if (message.kind.oneofKind === undefined)") + g.indent = " " + g.p("%s", "throw new globalThis.Error();") + g.indent = " " + g.p("switch (message.kind.oneofKind) {") + g.indent = " " + g.p("case undefined: throw new globalThis.Error();") + g.p("case \"boolValue\": return message.kind.boolValue;") + g.p("case \"nullValue\": return null;") + g.p("case \"numberValue\":") + g.indent = " " + g.p("let numberValue = message.kind.numberValue;") + g.p("if (typeof numberValue == \"number\" && !Number.isFinite(numberValue))") + g.indent = " " + g.p("%s", "throw new globalThis.Error();") + g.indent = " " + g.p("return numberValue;") + g.indent = " " + g.p("case \"stringValue\": return message.kind.stringValue;") + g.p("case \"listValue\":") + g.indent = " " + g.p("let listValueField = this.fields.find(f => f.no === 6);") + g.p("if (listValueField?.kind !== \"message\")") + g.indent = " " + g.p("%s", "throw new globalThis.Error();") + g.indent = " " + g.p("return listValueField.T().toJson(message.kind.listValue);") + g.indent = " " + g.p("case \"structValue\":") + g.indent = " " + g.p("let structValueField = this.fields.find(f => f.no === 5);") + g.p("if (structValueField?.kind !== \"message\")") + g.indent = " " + g.p("%s", "throw new globalThis.Error();") + g.indent = " " + g.p("return structValueField.T().toJson(message.kind.structValue);") + g.indent = " " + g.p("}") + g.indent = " " + g.p("}") + + // internalJsonRead for Value + g.p("/**") + g.p(" * Decode `Value` from JSON value.") + g.p(" */") + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: Value): Value {") + g.indent = " " + g.p("if (!target)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + g.p("switch (typeof json) {") + g.indent = " " + g.p("case \"number\":") + g.indent = " " + g.p("target.kind = { oneofKind: \"numberValue\", numberValue: json };") + g.p("break;") + g.indent = " " + g.p("case \"string\":") + g.indent = " " + g.p("target.kind = { oneofKind: \"stringValue\", stringValue: json };") + g.p("break;") + g.indent = " " + g.p("case \"boolean\":") + g.indent = " " + g.p("target.kind = { oneofKind: \"boolValue\", boolValue: json };") + g.p("break;") + g.indent = " " + g.p("case \"object\":") + g.indent = " " + g.p("if (json === null) {") + g.indent = " " + g.p("target.kind = { oneofKind: \"nullValue\", nullValue: NullValue.NULL_VALUE };") + g.indent = " " + g.p("}") + g.p("else if (globalThis.Array.isArray(json)) {") + g.indent = " " + g.p("target.kind = { oneofKind: \"listValue\", listValue: ListValue.fromJson(json) };") + g.indent = " " + g.p("}") + g.p("else {") + g.indent = " " + g.p("target.kind = { oneofKind: \"structValue\", structValue: Struct.fromJson(json) };") + g.indent = " " + g.p("}") + g.p("break;") + g.indent = " " + g.p("default: throw new globalThis.Error(\"Unable to parse \" + this.typeName + \" from JSON \" + %s(json));", g.typeofJsonValueRef) + g.indent = " " + g.p("}") + g.p("return target;") + g.indent = " " + g.p("}") + } else if typeName == "ListValue" { + // internalJsonWrite for ListValue + g.p("/**") + g.p(" * Encode `ListValue` to JSON array.") + g.p(" */") + g.p("internalJsonWrite(message: ListValue, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") + g.indent = " " + g.p("return message.values.map(v => Value.toJson(v));") + g.indent = " " + g.p("}") + + // internalJsonRead for ListValue + g.p("/**") + g.p(" * Decode `ListValue` from JSON array.") + g.p(" */") + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: ListValue): ListValue {") + g.indent = " " + g.p("if (!globalThis.Array.isArray(json))") + g.indent = " " + g.p("throw new globalThis.Error(\"Unable to parse \" + this.typeName + \" from JSON \" + %s(json));", g.typeofJsonValueRef) + g.indent = " " + g.p("if (!target)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + g.p("let values = json.map(v => Value.fromJson(v));") + g.p("target.values.push(...values);") + g.p("return target;") + g.indent = " " + g.p("}") + } +} + +func (g *generator) generateWrapperMethods(typeName string) { + g.indent = " " + + // internalJsonWrite() method + g.p("/**") + switch typeName { + case "DoubleValue": + g.p(" * Encode `%s` to JSON number.", typeName) + case "FloatValue": + g.p(" * Encode `%s` to JSON number.", typeName) + case "Int64Value": + g.p(" * Encode `%s` to JSON string.", typeName) + case "UInt64Value": + g.p(" * Encode `%s` to JSON string.", typeName) + case "Int32Value": + g.p(" * Encode `%s` to JSON string.", typeName) + case "UInt32Value": + g.p(" * Encode `%s` to JSON string.", typeName) + case "BoolValue": + g.p(" * Encode `%s` to JSON bool.", typeName) + case "StringValue": + g.p(" * Encode `%s` to JSON string.", typeName) + case "BytesValue": + g.p(" * Encode `%s` to JSON string.", typeName) + } + g.p(" */") + g.p("internalJsonWrite(message: %s, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {", typeName) + g.indent = " " + + // Handle write based on type + switch typeName { + case "DoubleValue": + g.p("return this.refJsonWriter.scalar(2, message.value, \"value\", false, true);") + case "FloatValue": + g.p("return this.refJsonWriter.scalar(1, message.value, \"value\", false, true);") + case "Int64Value": + g.p("return this.refJsonWriter.scalar(%s.INT64, message.value, \"value\", false, true);", g.scalarTypeRef) + case "UInt64Value": + g.p("return this.refJsonWriter.scalar(%s.UINT64, message.value, \"value\", false, true);", g.scalarTypeRef) + case "Int32Value": + g.p("return this.refJsonWriter.scalar(5, message.value, \"value\", false, true);") + case "UInt32Value": + g.p("return this.refJsonWriter.scalar(13, message.value, \"value\", false, true);") + case "BoolValue": + g.p("return message.value;") + case "StringValue": + g.p("return message.value;") + case "BytesValue": + g.p("return this.refJsonWriter.scalar(12, message.value, \"value\", false, true);") + } + + g.indent = " " + g.p("}") + + // internalJsonRead() method + g.p("/**") + switch typeName { + case "DoubleValue": + g.p(" * Decode `%s` from JSON number.", typeName) + case "FloatValue": + g.p(" * Decode `%s` from JSON number.", typeName) + case "Int64Value": + g.p(" * Decode `%s` from JSON string.", typeName) + case "UInt64Value": + g.p(" * Decode `%s` from JSON string.", typeName) + case "Int32Value": + g.p(" * Decode `%s` from JSON string.", typeName) + case "UInt32Value": + g.p(" * Decode `%s` from JSON string.", typeName) + case "BoolValue": + g.p(" * Decode `%s` from JSON bool.", typeName) + case "StringValue": + g.p(" * Decode `%s` from JSON string.", typeName) + case "BytesValue": + g.p(" * Decode `%s` from JSON string.", typeName) + } + g.p(" */") + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: %s): %s {", typeName, typeName) + g.indent = " " + g.p("if (!target)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + + // Handle read based on type + switch typeName { + case "DoubleValue": + g.p("target.value = this.refJsonReader.scalar(json, 1, undefined, \"value\") as number;") + case "FloatValue": + g.p("target.value = this.refJsonReader.scalar(json, 1, undefined, \"value\") as number;") + case "Int64Value": + g.p("target.value = this.refJsonReader.scalar(json, %s.INT64, %s.STRING, \"value\") as any;", g.scalarTypeRef, g.longTypeRef) + case "UInt64Value": + g.p("target.value = this.refJsonReader.scalar(json, %s.UINT64, %s.STRING, \"value\") as any;", g.scalarTypeRef, g.longTypeRef) + case "Int32Value": + g.p("target.value = this.refJsonReader.scalar(json, 5, undefined, \"value\") as number;") + case "UInt32Value": + g.p("target.value = this.refJsonReader.scalar(json, 13, undefined, \"value\") as number;") + case "BoolValue": + g.p("target.value = this.refJsonReader.scalar(json, 8, undefined, \"value\") as boolean;") + case "StringValue": + g.p("target.value = this.refJsonReader.scalar(json, 9, undefined, \"value\") as string;") + case "BytesValue": + g.p("target.value = this.refJsonReader.scalar(json, 12, undefined, \"value\") as Uint8Array;") + } + + g.p("return target;") + g.indent = " " + g.p("}") +} + +func (g *generator) generateAnyMethods() { + g.indent = " " + + // pack() method + g.p("/**") + g.p(" * Pack the message into a new `Any`.") + g.p(" *") + g.p(" * Uses 'type.googleapis.com/full.type.name' as the type URL.") + g.p(" */") + g.p("pack(message: T, type: %s): Any {", g.iMessageTypeRef) + g.indent = " " + g.p("return {") + g.indent = " " + g.p("typeUrl: this.typeNameToUrl(type.typeName), value: type.toBinary(message),") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") + + // unpack() method + g.p("/**") + g.p(" * Unpack the message from the `Any`.") + g.p(" */") + g.p("unpack(any: Any, type: %s, options?: Partial<%s>): T {", g.iMessageTypeRef, g.binaryReadOptionsRef) + g.indent = " " + g.p("if (!this.contains(any, type))") + g.indent = " " + g.p("throw new Error(\"Cannot unpack google.protobuf.Any with typeUrl '\" + any.typeUrl + \"' as \" + type.typeName + \".\");") + g.indent = " " + g.p("return type.fromBinary(any.value, options);") + g.indent = " " + g.p("}") + + // contains() method + g.p("/**") + g.p(" * Does the given `Any` contain a packed message of the given type?") + g.p(" */") + g.p("contains(any: Any, type: %s | string): boolean {", g.iMessageTypeRef) + g.indent = " " + g.p("if (!any.typeUrl.length)") + g.indent = " " + g.p("return false;") + g.indent = " " + g.p("let wants = typeof type == \"string\" ? type : type.typeName;") + g.p("let has = this.typeUrlToName(any.typeUrl);") + g.p("return wants === has;") + g.indent = " " + g.p("}") + + // internalJsonWrite() method + g.p("/**") + g.p(" * Convert the message to canonical JSON value.") + g.p(" *") + g.p(" * You have to provide the `typeRegistry` option so that the") + g.p(" * packed message can be converted to JSON.") + g.p(" *") + g.p(" * The `typeRegistry` option is also required to read") + g.p(" * `google.protobuf.Any` from JSON format.") + g.p(" */") + g.p("internalJsonWrite(any: Any, options: " + g.jsonWriteOptionsTypeRef + "): " + g.jsonValueRef + " {") + g.indent = " " + g.p("if (any.typeUrl === \"\")") + g.indent = " " + g.p("return {};") + g.indent = " " + g.p("let typeName = this.typeUrlToName(any.typeUrl);") + g.p("let opt = %s(options);", g.jsonWriteOptionsRef) + g.p("let type = opt.typeRegistry?.find(t => t.typeName === typeName);") + g.p("if (!type)") + g.indent = " " + g.p("throw new globalThis.Error(\"Unable to convert google.protobuf.Any with typeUrl '\" + any.typeUrl + \"' to JSON. The specified type \" + typeName + \" is not available in the type registry.\");") + g.indent = " " + g.p("let value = type.fromBinary(any.value, { readUnknownField: false });") + g.p("let json = type.internalJsonWrite(value, opt);") + g.p("if (typeName.startsWith(\"google.protobuf.\") || !%s(json))", g.isJsonObjectRef) + g.indent = " " + g.p("json = { value: json };") + g.indent = " " + g.p("json[\"@type\"] = any.typeUrl;") + g.p("return json;") + g.indent = " " + g.p("}") + + // internalJsonRead() method + g.p("internalJsonRead(json: " + g.jsonValueRef + ", options: " + g.jsonReadOptionsRef + ", target?: Any): Any {") + g.indent = " " + g.p("if (!%s(json))", g.isJsonObjectRef) + g.indent = " " + g.p("throw new globalThis.Error(\"Unable to parse google.protobuf.Any from JSON \" + %s(json) + \".\");", g.typeofJsonValueRef) + g.indent = " " + g.p("if (typeof json[\"@type\"] != \"string\" || json[\"@type\"] == \"\")") + g.indent = " " + g.p("return this.create();") + g.indent = " " + g.p("let typeName = this.typeUrlToName(json[\"@type\"]);") + g.p("let type = options?.typeRegistry?.find(t => t.typeName == typeName);") + g.p("if (!type)") + g.indent = " " + g.p("throw new globalThis.Error(\"Unable to parse google.protobuf.Any from JSON. The specified type \" + typeName + \" is not available in the type registry.\");") + g.indent = " " + g.p("let value;") + g.p("if (typeName.startsWith(\"google.protobuf.\") && json.hasOwnProperty(\"value\"))") + g.indent = " " + g.p("value = type.fromJson(json[\"value\"], options);") + g.indent = " " + g.p("else {") + g.indent = " " + g.p("let copy = Object.assign({}, json);") + g.p("delete copy[\"@type\"];") + g.p("value = type.fromJson(copy, options);") + g.indent = " " + g.p("}") + g.p("if (target === undefined)") + g.indent = " " + g.p("target = this.create();") + g.indent = " " + g.p("target.typeUrl = json[\"@type\"];") + g.p("target.value = type.toBinary(value);") + g.p("return target;") + g.indent = " " + g.p("}") + + // typeNameToUrl() method + g.p("typeNameToUrl(name: string): string {") + g.indent = " " + g.p("if (!name.length)") + g.indent = " " + g.p("throw new Error(\"invalid type name: \" + name);") + g.indent = " " + g.p("return \"type.googleapis.com/\" + name;") + g.indent = " " + g.p("}") + + // typeUrlToName() method + g.p("typeUrlToName(url: string): string {") + g.indent = " " + g.p("if (!url.length)") + g.indent = " " + g.p("throw new Error(\"invalid type url: \" + url);") + g.indent = " " + g.p("let slash = url.lastIndexOf(\"/\");") + g.p("let name = slash > 0 ? url.substring(slash + 1) : url;") + g.p("if (!name.length)") + g.indent = " " + g.p("throw new Error(\"invalid type url: \" + url);") + g.indent = " " + g.p("return name;") + g.indent = " " + g.p("}") +} + +func (g *generator) generateGoogleTypeDateMethods() { + g.indent = " " + + // toJsDate() method + g.p("/**") + g.p(" * Creates a javascript Date object from the message.") + g.p(" *") + g.p(" * If you do not provide the optional parameters for time,") + g.p(" * the current time is used.") + g.p(" */") + g.p("toJsDate(message: Date, hours?: number, minutes?: number, seconds?: number, ms?: number): globalThis.Date {") + g.indent = " " + g.p("let now = new globalThis.Date();") + g.p("return new globalThis.Date(message.year, message.month - 1, message.day, hours ?? now.getHours(), minutes ?? now.getMinutes(), seconds ?? now.getSeconds(), ms ?? now.getMilliseconds());") + g.indent = " " + g.p("}") + + // fromJsDate() method + g.p("/**") + g.p(" * Creates a Date message from a javascript Date object.") + g.p(" */") + g.p("fromJsDate(date: globalThis.Date): Date {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("year: date.getFullYear(), month: date.getMonth() + 1, day: date.getDate(),") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") +} + +func (g *generator) generateGoogleTypeColorMethods() { + g.indent = " " + + // toHex() method + g.p("/**") + g.p(" * Returns hexadecimal notation of the color: #RRGGBB[AA]") + g.p(" *") + g.p(" * R (red), G (green), B (blue), and A (alpha) are hexadecimal characters") + g.p(" * (0–9, A–F). A is optional. For example, #ff0000 is equivalent to") + g.p(" * #ff0000ff.") + g.p(" *") + g.p(" * See https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#RGB_colors") + g.p(" */") + g.p("toHex(message: Color): string {") + g.indent = " " + g.p("let hex = [") + g.indent = " " + g.p("message.red.toString(16), message.green.toString(16), message.blue.toString(16),") + g.indent = " " + g.p("];") + g.p("if (message.alpha) {") + g.indent = " " + g.p("let alpha = Math.max(Math.min(message.alpha.value, 1), 0);") + g.p("hex.push(Math.round(alpha * 255).toString(16));") + g.indent = " " + g.p("}") + g.p(`return "#" + hex.map(i => i.length < 2 ? "0" + i : i).join("");`) + g.indent = " " + g.p("}") + + // fromHex() method + g.p("/**") + g.p(" * Parses a hexadecimal color notation.") + g.p(" *") + g.p(" * Recognizes the following forms:") + g.p(" * - three-digit (#RGB)") + g.p(" * - six-digit (#RRGGBB)") + g.p(" * - four-digit (#RGBA)") + g.p(" * - eight-digit (#RRGGBBAA)") + g.p(" */") + g.p("fromHex(hex: string): Color {") + g.indent = " " + g.p("if (/^#(?:[0-9a-fA-F]{3}){1}$/.test(hex)) {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("red: parseInt(hex.substring(1, 2) + hex.substring(1, 2), 16), green: parseInt(hex.substring(2, 3) + hex.substring(2, 3), 16), blue: parseInt(hex.substring(3, 4) + hex.substring(3, 4), 16),") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") + g.p("else if (/^#(?:[0-9a-fA-F]{3}){2}$/.test(hex)) {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("red: parseInt(hex.substring(1, 3), 16), green: parseInt(hex.substring(3, 5), 16), blue: parseInt(hex.substring(5, 7), 16),") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") + g.p("else if (/^#(?:[0-9a-fA-F]{4}){1}$/.test(hex)) {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("red: parseInt(hex.substring(1, 2) + hex.substring(1, 2), 16), green: parseInt(hex.substring(2, 3) + hex.substring(2, 3), 16), blue: parseInt(hex.substring(3, 4) + hex.substring(3, 4), 16), alpha: {") + g.indent = " " + g.p("value: parseInt(hex.substring(4, 5) + hex.substring(4, 5), 16) / 255,") + g.indent = " " + g.p("}") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") + g.p("else if (/^#(?:[0-9a-fA-F]{4}){2}$/.test(hex)) {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("red: parseInt(hex.substring(1, 3), 16), green: parseInt(hex.substring(3, 5), 16), blue: parseInt(hex.substring(5, 7), 16), alpha: {") + g.indent = " " + g.p("value: parseInt(hex.substring(7, 9), 16) / 255,") + g.indent = " " + g.p("}") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") + g.p(`throw new Error("invalid hex color");`) + g.indent = " " + g.p("}") +} + +func (g *generator) generateGoogleTypeDateTimeMethods() { + g.indent = " " + + utcOffsetField := "utcOffset" + timeOffsetField := "timeOffset" + timeZoneField := "timeZone" + + // now() method + g.p("/**") + g.p(" * Creates `DateTime` for the current time.") + g.p(" */") + g.p("now(): DateTime {") + g.indent = " " + g.p("return this.fromJsDate(new globalThis.Date());") + g.indent = " " + g.p("}") + + // toJsDate() method + g.p("/**") + g.p(" * Creates a javascript Date object from the message.") + g.p(" *") + g.p(" * If a the message has a UTC offset, the javascript Date is converted") + g.p(" * into your local time zone, because javascript Dates are always in the") + g.p(" * local time zone.") + g.p(" *") + g.p(" * If the message has an offset given as an IANA timezone id, an error is") + g.p(" * thrown, because javascript has no on-board support for IANA time zone") + g.p(" * ids.") + g.p(" */") + g.p("toJsDate(message: DateTime): globalThis.Date {") + g.indent = " " + g.p("let dt = new globalThis.Date(message.year, message.month - 1, message.day, message.hours, message.minutes, message.seconds, message.nanos / 1000), to = message.%s;", timeOffsetField) + g.p("if (to) {") + g.indent = " " + g.p("if (to.oneofKind === \"%s\")", timeZoneField) + g.indent = " " + g.p("throw new globalThis.Error(\"IANA time zone not supported\");") + g.indent = " " + g.p("if (to.oneofKind === \"%s\") {", utcOffsetField) + g.indent = " " + g.p("let s = PbLong.from(to.%s.seconds).toNumber();", utcOffsetField) + g.p("dt = new globalThis.Date(dt.getTime() - (s * 1000));") + g.indent = " " + g.p("}") + g.indent = " " + g.p("}") + g.p("return dt;") + g.indent = " " + g.p("}") + + // fromJsDate() method + g.p("/**") + g.p(" * Creates a Date message from a javascript Date object.") + g.p(" *") + g.p(" * Values are in local time and a proper UTF offset is provided.") + g.p(" */") + g.p("fromJsDate(date: globalThis.Date): DateTime {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("year: date.getFullYear(), month: date.getMonth() + 1, day: date.getDate(), hours: date.getHours(), minutes: date.getMinutes(), seconds: date.getSeconds(), nanos: date.getMilliseconds() * 1000, %s: {", timeOffsetField) + g.indent = " " + g.p("oneofKind: \"%s\", %s: {", utcOffsetField, utcOffsetField) + g.indent = " " + g.p("seconds: PbLong.from(date.getTimezoneOffset() * 60).toString(), nanos: 0,") + g.indent = " " + g.p("}") + g.indent = " " + g.p("}") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") +} + +func (g *generator) generateGoogleTypeTimeOfDayMethods() { + g.indent = " " + + // fromJsDate() method + g.p("/**") + g.p(" * Creates a TimeOfDay message from a javascript Date object.") + g.p(" */") + g.p("fromJsDate(date: globalThis.Date): TimeOfDay {") + g.indent = " " + g.p("return {") + g.indent = " " + g.p("hours: date.getHours(), minutes: date.getMinutes(), seconds: date.getSeconds(), nanos: date.getMilliseconds() * 1000,") + g.indent = " " + g.p("};") + g.indent = " " + g.p("}") +} diff --git a/protoc-gen-kaja/package-lock.json b/protoc-gen-kaja/package-lock.json new file mode 100644 index 00000000..b7f88590 --- /dev/null +++ b/protoc-gen-kaja/package-lock.json @@ -0,0 +1,143 @@ +{ + "name": "protoc-gen-kaja", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "devDependencies": { + "@protobuf-ts/plugin": "^2.11.1" + } + }, + "node_modules/@bufbuild/protobuf": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.11.0.tgz", + "integrity": "sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==", + "dev": true, + "license": "(Apache-2.0 AND BSD-3-Clause)" + }, + "node_modules/@bufbuild/protoplugin": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@bufbuild/protoplugin/-/protoplugin-2.11.0.tgz", + "integrity": "sha512-lyZVNFUHArIOt4W0+dwYBe5GBwbKzbOy8ObaloEqsw9Mmiwv2O48TwddDoHN4itylC+BaEGqFdI1W8WQt2vWJQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@bufbuild/protobuf": "2.11.0", + "@typescript/vfs": "^1.6.2", + "typescript": "5.4.5" + } + }, + "node_modules/@bufbuild/protoplugin/node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/@protobuf-ts/plugin": { + "version": "2.11.1", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.11.1.tgz", + "integrity": "sha512-HyuprDcw0bEEJqkOWe1rnXUP0gwYLij8YhPuZyZk6cJbIgc/Q0IFgoHQxOXNIXAcXM4Sbehh6kjVnCzasElw1A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@bufbuild/protobuf": "^2.4.0", + "@bufbuild/protoplugin": "^2.4.0", + "@protobuf-ts/protoc": "^2.11.1", + "@protobuf-ts/runtime": "^2.11.1", + "@protobuf-ts/runtime-rpc": "^2.11.1", + "typescript": "^3.9" + }, + "bin": { + "protoc-gen-dump": "bin/protoc-gen-dump", + "protoc-gen-ts": "bin/protoc-gen-ts" + } + }, + "node_modules/@protobuf-ts/protoc": { + "version": "2.11.1", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.11.1.tgz", + "integrity": "sha512-mUZJaV0daGO6HUX90o/atzQ6A7bbN2RSuHtdwo8SSF2Qoe3zHwa4IHyCN1evftTeHfLmdz+45qo47sL+5P8nyg==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "protoc": "protoc.js" + } + }, + "node_modules/@protobuf-ts/runtime": { + "version": "2.11.1", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.11.1.tgz", + "integrity": "sha512-KuDaT1IfHkugM2pyz+FwiY80ejWrkH1pAtOBOZFuR6SXEFTsnb/jiQWQ1rCIrcKx2BtyxnxW6BWwsVSA/Ie+WQ==", + "dev": true, + "license": "(Apache-2.0 AND BSD-3-Clause)" + }, + "node_modules/@protobuf-ts/runtime-rpc": { + "version": "2.11.1", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.11.1.tgz", + "integrity": "sha512-4CqqUmNA+/uMz00+d3CYKgElXO9VrEbucjnBFEjqI4GuDrEQ32MaI3q+9qPBvIGOlL4PmHXrzM32vBPWRhQKWQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@protobuf-ts/runtime": "^2.11.1" + } + }, + "node_modules/@typescript/vfs": { + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.4.tgz", + "integrity": "sha512-PJFXFS4ZJKiJ9Qiuix6Dz/OwEIqHD7Dme1UwZhTK11vR+5dqW2ACbdndWQexBzCx+CPuMe5WBYQWCsFyGlQLlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.4.3" + }, + "peerDependencies": { + "typescript": "*" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + } + } +} diff --git a/protoc-gen-kaja/package.json b/protoc-gen-kaja/package.json new file mode 100644 index 00000000..e3c44c79 --- /dev/null +++ b/protoc-gen-kaja/package.json @@ -0,0 +1,6 @@ +{ + "private": true, + "devDependencies": { + "@protobuf-ts/plugin": "^2.11.1" + } +} diff --git a/protoc-gen-kaja/protoc-gen-kaja b/protoc-gen-kaja/protoc-gen-kaja new file mode 100755 index 00000000..757b4898 Binary files /dev/null and b/protoc-gen-kaja/protoc-gen-kaja differ diff --git a/protoc-gen-kaja/scripts/diff b/protoc-gen-kaja/scripts/diff new file mode 100755 index 00000000..c68dfb90 --- /dev/null +++ b/protoc-gen-kaja/scripts/diff @@ -0,0 +1,155 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" +TESTS_DIR="$PROJECT_DIR/tests" +RESULTS_DIR="$PROJECT_DIR/results" + +# Usage: scripts/diff [test_name] +# Without arguments: shows diff for all failing tests +# With argument: shows detailed diff for a specific test + +TARGET="$1" + +show_diff() { + local test_dir="$1" + local test_name="$(basename "$test_dir")" + local expected_dir="$RESULTS_DIR/$test_name/expected" + local actual_dir="$RESULTS_DIR/$test_name/actual" + local ts_error_file="$RESULTS_DIR/$test_name/ts_error.txt" + local kaja_error_file="$RESULTS_DIR/$test_name/kaja_error.txt" + + if [ ! -d "$expected_dir" ] && [ ! -f "$ts_error_file" ]; then + echo "No results for $test_name. Run scripts/test first." + return + fi + + echo "=== $test_name ===" + echo "" + + # Check if both generators failed + if [ -f "$ts_error_file" ] && [ -f "$kaja_error_file" ]; then + ts_err=$(cat "$ts_error_file" 2>/dev/null) + kaja_err=$(cat "$kaja_error_file" 2>/dev/null) + + if [ -n "$ts_err" ] || [ -n "$kaja_err" ]; then + echo "Both generators failed:" + echo "" + + # Normalize and compare + ts_err_normalized=$(echo "$ts_err" | sed 's/[[:space:]]*$//' | sed '/^$/d') + kaja_err_normalized=$(echo "$kaja_err" | sed 's/[[:space:]]*$//' | sed '/^$/d') + + if [ "$ts_err_normalized" = "$kaja_err_normalized" ]; then + echo "STATUS: PASS (both failed with identical error)" + echo "" + echo "Error message:" + echo "$ts_err" + else + echo "STATUS: FAIL (error messages differ)" + echo "" + diff_output=$(diff -u \ + <(echo "$ts_err" | sed 's/[[:space:]]*$//') \ + <(echo "$kaja_err" | sed 's/[[:space:]]*$//') \ + 2>&1) || true + echo "--- protoc-gen-ts error" + echo "+++ protoc-gen-kaja error" + echo "$diff_output" + fi + echo "" + return + fi + fi + + # Check if only one failed + if [ -f "$ts_error_file" ] && [ -s "$ts_error_file" ]; then + echo "STATUS: FAIL (protoc-gen-ts failed but protoc-gen-kaja succeeded)" + echo "" + echo "TS error:" + cat "$ts_error_file" + echo "" + return + fi + + if [ -f "$kaja_error_file" ] && [ -s "$kaja_error_file" ]; then + echo "STATUS: FAIL (protoc-gen-kaja failed but protoc-gen-ts succeeded)" + echo "" + echo "Kaja error:" + cat "$kaja_error_file" + echo "" + return + fi + + # Both succeeded - compare generated files + if [ ! -d "$expected_dir" ]; then + echo "No generated files to compare." + echo "" + return + fi + + # List expected files + expected_files=$(cd "$expected_dir" && find . -type f -name "*.ts" | sort) + actual_files="" + if [ -d "$actual_dir" ]; then + actual_files=$(cd "$actual_dir" && find . -type f -name "*.ts" | sort) + fi + + for f in $expected_files; do + if [ ! -f "$actual_dir/$f" ]; then + lines=$(wc -l < "$expected_dir/$f") + echo "MISSING $f ($lines lines)" + fi + done + + # Files only in actual (extra) + for f in $actual_files; do + if [ ! -f "$expected_dir/$f" ]; then + echo "EXTRA $f" + fi + done + + # Files in both - show diff + for f in $expected_files; do + if [ -f "$actual_dir/$f" ]; then + file_diff=$(diff -u "$expected_dir/$f" "$actual_dir/$f" 2>&1) || true + if [ -z "$file_diff" ]; then + echo "MATCH $f" + else + total_expected=$(wc -l < "$expected_dir/$f") + total_actual=$(wc -l < "$actual_dir/$f") + diff_lines=$(echo "$file_diff" | grep -c "^[-+]" || true) + echo "DIFF $f (expected: $total_expected lines, actual: $total_actual lines, $diff_lines changed lines)" + echo "$file_diff" + fi + fi + done + echo "" +} + +if [ -n "$TARGET" ]; then + test_dir="$TESTS_DIR/$TARGET" + if [ ! -d "$test_dir" ]; then + # Try matching by prefix + test_dir=$(find "$TESTS_DIR" -maxdepth 1 -type d -name "*$TARGET*" | head -1) + fi + if [ -z "$test_dir" ] || [ ! -d "$test_dir" ]; then + echo "Test not found: $TARGET" + echo "Available tests:" + ls -1 "$TESTS_DIR" + exit 1 + fi + show_diff "$test_dir" +else + for test_dir in "$TESTS_DIR"/*/; do + test_name="$(basename "$test_dir")" + expected_dir="$RESULTS_DIR/$test_name/expected" + actual_dir="$RESULTS_DIR/$test_name/actual" + if [ -d "$expected_dir" ]; then + diff_output=$(diff -ruN "$expected_dir" "$actual_dir" 2>&1) || true + if [ -n "$diff_output" ]; then + show_diff "$test_dir" + fi + fi + done +fi diff --git a/protoc-gen-kaja/scripts/loop b/protoc-gen-kaja/scripts/loop new file mode 100755 index 00000000..659eabe3 --- /dev/null +++ b/protoc-gen-kaja/scripts/loop @@ -0,0 +1,66 @@ +#!/usr/bin/env bash +set -euo pipefail + +MODEL="claude-opus-4.6" +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" +PROMPT_FILE="$PROJECT_DIR/RALPH.md" +NELSON_FILE="$PROJECT_DIR/NELSON.md" +STATUS_FILE="$PROJECT_DIR/status.txt" +MAX_LOOPS=1000 + +if [[ ! -f "$PROMPT_FILE" ]]; then + echo "Error: $PROMPT_FILE not found" + exit 1 +fi + +if [[ ! -f "$NELSON_FILE" ]]; then + echo "Error: $NELSON_FILE not found" + exit 1 +fi + +for ((i=1; i<=MAX_LOOPS; i++)); do + echo "=== Loop $i/$MAX_LOOPS ===" + + # Check if status.txt contains "DONE" + if [[ -f "$STATUS_FILE" ]]; then + status=$(cat "$STATUS_FILE" | tr -d '\r\n' | tr -d '[:space:]') + if [[ "$status" == "DONE" ]]; then + echo "Status is 'DONE'. Running NELSON.md task..." + + # Run NELSON.md task + nelson_prompt=$(cat "$NELSON_FILE") + copilot --model "$MODEL" --yolo -p "$nelson_prompt" || { + echo "Error: GitHub Copilot CLI command failed for NELSON.md" + exit 1 + } + + # Check status again after NELSON task + if [[ -f "$STATUS_FILE" ]]; then + status=$(cat "$STATUS_FILE" | tr -d '\r\n' | tr -d '[:space:]') + if [[ "$status" == "DONE" ]]; then + echo "Status is still 'DONE' after NELSON task. Exiting." + exit 0 + else + echo "Status changed after NELSON task. Continuing with RALPH.md..." + fi + else + echo "Status file removed after NELSON task. Continuing with RALPH.md..." + fi + fi + fi + + # Read prompt from RALPH.md + prompt=$(cat "$PROMPT_FILE") + + # Run GitHub Copilot CLI in non-interactive mode with all permissions enabled + echo "Running GitHub Copilot CLI with prompt from $PROMPT_FILE" + copilot --model "$MODEL" --yolo -p "$prompt" || { + echo "Error: GitHub Copilot CLI command failed" + exit 1 + } + + echo "" +done + +echo "Reached maximum loops ($MAX_LOOPS). Exiting." diff --git a/protoc-gen-kaja/scripts/test b/protoc-gen-kaja/scripts/test new file mode 100755 index 00000000..d64cc1c3 --- /dev/null +++ b/protoc-gen-kaja/scripts/test @@ -0,0 +1,265 @@ +#!/bin/bash +set -e + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)" +ROOT_DIR="$(cd "$PROJECT_DIR/.." && pwd)" +TESTS_DIR="$PROJECT_DIR/tests" +RESULTS_DIR="$PROJECT_DIR/results" + +SERVER_BUILD_DIR="$ROOT_DIR/server/build" +PROTOC_GEN_TS="$PROJECT_DIR/node_modules/.bin/protoc-gen-ts" +PROTOC_GEN_KAJA="$SERVER_BUILD_DIR/protoc-gen-kaja" + +# Parse flags +SUMMARY_ONLY=false +for arg in "$@"; do + case "$arg" in + --summary) SUMMARY_ONLY=true ;; + esac +done + +# -- setup --------------------------------------------------------------- + +PROTOC="$SERVER_BUILD_DIR/protoc" +INCLUDE_DIR="$SERVER_BUILD_DIR/include" + +# Install protoc if needed +source "$ROOT_DIR/scripts/common" +install_protoc "$SERVER_BUILD_DIR" + +# Install protoc-gen-ts if needed +if [ ! -x "$PROTOC_GEN_TS" ]; then + echo "Installing protoc-gen-ts..." + (cd "$PROJECT_DIR" && npm i) +fi + +# Build protoc-gen-kaja into server/build +echo "Building protoc-gen-kaja..." +mkdir -p "$SERVER_BUILD_DIR" +(cd "$PROJECT_DIR" && go build -o "$PROTOC_GEN_KAJA" .) + +# -- test loop ------------------------------------------------------------ + +# Clean and create results directory +rm -rf "$RESULTS_DIR" +mkdir -p "$RESULTS_DIR" + +WORKSPACE_DIR="$ROOT_DIR/workspace" + +# Millisecond timer helper (uses perl for portability) +now_ms() { + perl -MTime::HiRes=time -e 'printf "%d\n", time()*1000' +} + +# Each test writes a result file: $RESULTS_DIR/$test_name/result.txt +# Format: STATUS ts_ms kaja_ms +# STATUS is PASS or FAIL +# On failure, details go to $RESULTS_DIR/$test_name/failure.txt + +run_test() { + local test_name="$1" + local test_dir="${2%/}" # Remove trailing slash if present + local recursive="$3" + local expected_dir="$RESULTS_DIR/$test_name/expected" + local actual_dir="$RESULTS_DIR/$test_name/actual" + local result_file="$RESULTS_DIR/$test_name/result.txt" + local failure_file="$RESULTS_DIR/$test_name/failure.txt" + + mkdir -p "$expected_dir" "$actual_dir" + + # Collect proto files + if [ "$recursive" = "true" ]; then + proto_files=$(find "$test_dir" -name "*.proto" | sort) + else + proto_files=$(find "$test_dir" -maxdepth 1 -name "*.proto" | sort) + fi + + if [ -z "$proto_files" ]; then + echo "PASS 0 0" > "$result_file" + return + fi + + # Convert to relative paths from test_dir + proto_basenames="" + for f in $proto_files; do + rel_path="${f#$test_dir/}" + proto_basenames="$proto_basenames $rel_path" + done + + # Run protoc-gen-ts (expected output) + ts_error_file="$RESULTS_DIR/$test_name/ts_error.txt" + ts_ok=true + local ts_start=$(now_ms) + (cd "$test_dir" && "$PROTOC" \ + --plugin="protoc-gen-ts=$PROTOC_GEN_TS" \ + --ts_out="$expected_dir" \ + --ts_opt=long_type_string \ + -I"$INCLUDE_DIR" \ + -I. \ + $proto_basenames) 2>"$ts_error_file" || ts_ok=false + local ts_elapsed=$(( $(now_ms) - ts_start )) + + # Run protoc-gen-kaja (actual output) + kaja_error_file="$RESULTS_DIR/$test_name/kaja_error.txt" + kaja_ok=true + local kaja_start=$(now_ms) + (cd "$test_dir" && "$PROTOC" \ + --plugin="protoc-gen-kaja=$PROTOC_GEN_KAJA" \ + --kaja_out="$actual_dir" \ + -I"$INCLUDE_DIR" \ + -I. \ + $proto_basenames) 2>"$kaja_error_file" || kaja_ok=false + local kaja_elapsed=$(( $(now_ms) - kaja_start )) + + # If both failed, compare error messages + if [ "$ts_ok" = false ] && [ "$kaja_ok" = false ]; then + ts_err=$(cat "$ts_error_file" 2>/dev/null || echo "") + kaja_err=$(cat "$kaja_error_file" 2>/dev/null || echo "") + + # Normalize whitespace and compare + ts_err_normalized=$(echo "$ts_err" | sed 's/[[:space:]]*$//' | sed '/^$/d') + kaja_err_normalized=$(echo "$kaja_err" | sed 's/[[:space:]]*$//' | sed '/^$/d') + + if [ "$ts_err_normalized" = "$kaja_err_normalized" ]; then + echo "PASS $ts_elapsed $kaja_elapsed" > "$result_file" + else + echo "FAIL $ts_elapsed $kaja_elapsed" > "$result_file" + { + echo "error mismatch" + echo "--- protoc-gen-ts error" + echo "+++ protoc-gen-kaja error" + diff -u \ + <(echo "$ts_err" | sed 's/[[:space:]]*$//') \ + <(echo "$kaja_err" | sed 's/[[:space:]]*$//') \ + 2>&1 || true + } > "$failure_file" + fi + return + fi + + # If one succeeded and the other failed, it's a failure + if [ "$ts_ok" != "$kaja_ok" ]; then + echo "FAIL $ts_elapsed $kaja_elapsed" > "$result_file" + if [ "$ts_ok" = true ]; then + { + echo "protoc-gen-kaja failed but protoc-gen-ts succeeded" + echo "" + echo "Kaja error:" + cat "$kaja_error_file" 2>/dev/null || echo "(no error output)" + } > "$failure_file" + else + { + echo "protoc-gen-ts failed but protoc-gen-kaja succeeded" + echo "" + echo "TS error:" + cat "$ts_error_file" 2>/dev/null || echo "(no error output)" + } > "$failure_file" + fi + return + fi + + # Compare outputs + diff_output=$(diff -ruN "$expected_dir" "$actual_dir" 2>&1) || true + + if [ -z "$diff_output" ]; then + echo "PASS $ts_elapsed $kaja_elapsed" > "$result_file" + else + echo "FAIL $ts_elapsed $kaja_elapsed" > "$result_file" + echo "$diff_output" > "$failure_file" + fi +} + +# Launch all tests in parallel +PIDS=() +TEST_NAMES=() + +for test_dir in "$TESTS_DIR"/*/; do + if [ -d "$test_dir" ]; then + test_name="$(basename "$test_dir")" + # Tests that need recursive scanning due to subdirectories + if [ "$test_name" = "000_big" ] || [ "$test_name" = "205_cross_dir_wiretype_import" ]; then + run_test "$test_name" "$test_dir" "true" & + else + run_test "$test_name" "$test_dir" "false" & + fi + PIDS+=($!) + TEST_NAMES+=("$test_name") + fi +done + +for project in grpcbin quirks teams users; do + proto_dir="$WORKSPACE_DIR/$project/proto" + if [ -d "$proto_dir" ]; then + run_test "$project" "$proto_dir" "true" & + PIDS+=($!) + TEST_NAMES+=("$project") + fi +done + +# Wait for all tests to finish +wait "${PIDS[@]}" + +# -- collect results ------------------------------------------------------ + +PASS=0 +FAIL=0 +ERRORS="" +TS_TOTAL_MS=0 +KAJA_TOTAL_MS=0 + +for test_name in "${TEST_NAMES[@]}"; do + result_file="$RESULTS_DIR/$test_name/result.txt" + failure_file="$RESULTS_DIR/$test_name/failure.txt" + + if [ ! -f "$result_file" ]; then + continue + fi + + read -r status ts_ms kaja_ms < "$result_file" + TS_TOTAL_MS=$(( TS_TOTAL_MS + ts_ms )) + KAJA_TOTAL_MS=$(( KAJA_TOTAL_MS + kaja_ms )) + + if [ "$status" = "PASS" ]; then + PASS=$((PASS + 1)) + if [ "$SUMMARY_ONLY" = false ]; then + echo "PASS $test_name" + fi + else + FAIL=$((FAIL + 1)) + if [ "$SUMMARY_ONLY" = false ]; then + echo "FAIL $test_name" + if [ -f "$failure_file" ]; then + head -40 "$failure_file" + echo "" + fi + fi + if [ -f "$failure_file" ]; then + ERRORS="$ERRORS\n--- $test_name ---\n$(cat "$failure_file")" + fi + fi +done + +# -- summary -------------------------------------------------------------- + +TOTAL=$((PASS + FAIL)) +echo "" +echo "Results: $PASS/$TOTAL passed, $FAIL/$TOTAL failed" + +TS_SEC=$(echo "scale=1; $TS_TOTAL_MS / 1000" | bc) +KAJA_SEC=$(echo "scale=1; $KAJA_TOTAL_MS / 1000" | bc) +if [ "$KAJA_TOTAL_MS" -gt 0 ]; then + SPEEDUP=$(echo "scale=1; $TS_TOTAL_MS / $KAJA_TOTAL_MS" | bc) +else + SPEEDUP="N/A" +fi +echo "Performance: protoc-gen-ts ${TS_SEC}s, protoc-gen-kaja ${KAJA_SEC}s (${SPEEDUP}x faster)" + +if [ "$FAIL" -gt 0 ]; then + if [ "$SUMMARY_ONLY" = true ]; then + echo "" + echo "Failing tests:" + echo -e "$ERRORS" | grep "^--- " | sed 's/^--- / /' | sed 's/ ---$//' + fi + exit 1 +fi diff --git a/protoc-gen-kaja/tests/000_big/analytics/events.proto b/protoc-gen-kaja/tests/000_big/analytics/events.proto new file mode 100644 index 00000000..c9a49e18 --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/analytics/events.proto @@ -0,0 +1,129 @@ +syntax = "proto3"; + +package ecommerce.analytics; + +import "google/protobuf/timestamp.proto"; +import "common/types.proto"; + +// Event type enum with reserved values +enum EventType { + EVENT_TYPE_UNSPECIFIED = 0; + EVENT_TYPE_PAGE_VIEW = 1; + EVENT_TYPE_CLICK = 2; + EVENT_TYPE_SEARCH = 3; + EVENT_TYPE_PURCHASE = 4; + EVENT_TYPE_ADD_TO_CART = 5; + reserved 6, 7; + reserved "EVENT_TYPE_REMOVED", "EVENT_TYPE_LEGACY"; +} + +// Same-name type as common.Status to test cross-package name collision +enum Status { + ANALYTICS_STATUS_UNSPECIFIED = 0; + ANALYTICS_STATUS_PROCESSED = 1; + ANALYTICS_STATUS_FAILED = 2; +} + +// Event with oneof payload including reserved name member +message Event { + string id = 1; + string user_id = 2; + string session_id = 3; + EventType type = 4; + google.protobuf.Timestamp timestamp = 5; + + oneof payload { + PageViewEvent page_view = 6; + ClickEvent click = 7; + SearchEvent search = 8; + PurchaseEvent purchase = 9; + // Oneof member with JS reserved name + int32 constructor = 12; + } + + map properties = 10; + common.Metadata metadata = 11; + // Local status vs common.Status + Status analytics_status = 13; +} + +message PageViewEvent { + string url = 1; + string referrer = 2; + int32 duration_ms = 3; +} + +message ClickEvent { + string element_id = 1; + string element_type = 2; + string page_url = 3; +} + +message SearchEvent { + string query = 1; + int32 results_count = 2; + repeated string filters = 3; +} + +message PurchaseEvent { + string order_id = 1; + common.Money total = 2; + repeated string product_ids = 3; +} + +message TrackEventRequest { + Event event = 1; +} + +message TrackEventResponse { + bool success = 1; +} + +// Streaming request for real-time event feed +message StreamEventsRequest { + string user_id = 1; + repeated EventType types = 2; +} + +// Request for batch event ingestion +message BatchEvent { + Event event = 1; + string source = 2; +} + +// Response for batch event ingestion +message BatchEventResponse { + int32 processed_count = 1; + int32 failed_count = 2; +} + +message GetEventsRequest { + string user_id = 1; + EventType type = 2; + google.protobuf.Timestamp start_time = 3; + google.protobuf.Timestamp end_time = 4; + common.PageInfo page = 5; +} + +message GetEventsResponse { + repeated Event events = 1; + common.PageInfo page = 2; +} + +// Analytics service with all 4 streaming types +service AnalyticsService { + // Unary + rpc TrackEvent(TrackEventRequest) returns (TrackEventResponse); + // Unary + rpc GetEvents(GetEventsRequest) returns (GetEventsResponse); + // Server-streaming + rpc StreamEvents(StreamEventsRequest) returns (stream Event); + // Client-streaming + rpc BatchIngest(stream BatchEvent) returns (BatchEventResponse); + // Bidirectional streaming + rpc LiveAnalytics(stream TrackEventRequest) returns (stream GetEventsResponse); +} + +// Empty service with no methods +service DebugService { +} diff --git a/protoc-gen-kaja/tests/000_big/auth/user.proto b/protoc-gen-kaja/tests/000_big/auth/user.proto new file mode 100644 index 00000000..ec065295 --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/auth/user.proto @@ -0,0 +1,170 @@ +syntax = "proto3"; + +package ecommerce.auth; + +import "common/options.proto"; +import "common/types.proto"; + +// User role enum +enum Role { + ROLE_UNSPECIFIED = 0; + ROLE_CUSTOMER = 1; + ROLE_ADMIN = 2; + ROLE_SUPPORT = 3; +} + +// Nested permission message +message Permission { + string resource = 1; + repeated string actions = 2; + + message Scope { + string type = 1; + repeated string ids = 2; + } + + Scope scope = 3; +} + +// User profile with nested messages +message UserProfile { + option (common.resource) = { + type: "auth.googleapis.com/User" + pattern: "users/{user_id}" + }; + + string id = 1; + string username = 2; + string email = 3 [(common.sensitive) = true]; + Role role = 4; + common.Address address = 5; + repeated Permission permissions = 6; + common.Metadata metadata = 7; + common.AuditInfo audit = 8; + + // Nested preferences + message Preferences { + string language = 1; + string timezone = 2; + bool email_notifications = 3; + bool sms_notifications = 4; + } + + Preferences preferences = 9; + // Field with json_name + string display_name = 10 [json_name = "displayName"]; + // Reserved property name + bool __proto__ = 11; +} + +// Authentication request with oneof +message AuthRequest { + oneof credentials { + string password = 1; + string oauth_token = 2; + string api_key = 3; + } + string username = 4; + string device_id = 5; +} + +message AuthResponse { + string access_token = 1; + string refresh_token = 2; + UserProfile profile = 3; +} + +message CreateUserRequest { + string username = 1; + string email = 2; + string password = 3 [(common.sensitive) = true]; + Role role = 4; + common.Address address = 5; +} + +message CreateUserResponse { + UserProfile profile = 1; +} + +message GetUserRequest { + string id = 1; +} + +message GetUserResponse { + UserProfile profile = 1; +} + +// Request for listing users with server streaming +message ListUsersRequest { + Role role = 1; + common.PageInfo page = 2; +} + +// Response for listing users +message ListUsersResponse { + UserProfile profile = 1; +} + +// Request for bulk creating users with client streaming +message BulkCreateUsersRequest { + string username = 1; + string email = 2; + string password = 3; + Role role = 4; +} + +// Response for bulk user creation +message BulkCreateUsersResponse { + int32 created_count = 1; + repeated string user_ids = 2; +} + +// Chat message for bidirectional streaming +message UserChatMessage { + string user_id = 1; + string content = 2; +} + +message UpdateUserRequest { + string id = 1; + UserProfile profile = 2; +} + +message UpdateUserResponse { + UserProfile profile = 1; +} + +// This is a detached comment about the AuthService. + +// It provides authentication and user management. + +// Auth service with all RPC types +service AuthService { + // Unary RPC + rpc Authenticate(AuthRequest) returns (AuthResponse); + + // Unary with custom method options + rpc CreateUser(CreateUserRequest) returns (CreateUserResponse) { + option (common.api_version) = "v2"; + option (common.rate_limit) = 100; + } + + // Unary with NO_SIDE_EFFECTS idempotency + rpc GetUser(GetUserRequest) returns (GetUserResponse) { + option idempotency_level = NO_SIDE_EFFECTS; + } + + // Unary with IDEMPOTENT idempotency + rpc UpdateUser(UpdateUserRequest) returns (UpdateUserResponse) { + option idempotency_level = IDEMPOTENT; + } + + // Server-streaming RPC + rpc ListUsers(ListUsersRequest) returns (stream ListUsersResponse); + + // Client-streaming RPC + rpc BulkCreateUsers(stream BulkCreateUsersRequest) returns (BulkCreateUsersResponse); + + // Bidirectional streaming RPC + rpc UserChat(stream UserChatMessage) returns (stream UserChatMessage); +} diff --git a/protoc-gen-kaja/tests/000_big/common/options.proto b/protoc-gen-kaja/tests/000_big/common/options.proto new file mode 100644 index 00000000..6fb37d9b --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/common/options.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package ecommerce.common; + +import "google/protobuf/descriptor.proto"; + +// Resource information for message-level option +message ResourceInfo { + string type = 1; + string pattern = 2; +} + +// Custom method options +extend google.protobuf.MethodOptions { + string api_version = 51001; + int32 rate_limit = 51002; + bool deprecated_api = 51003; +} + +// Custom message option with message-typed value +extend google.protobuf.MessageOptions { + ResourceInfo resource = 51004; +} + +// Custom field option +extend google.protobuf.FieldOptions { + bool sensitive = 51005; +} diff --git a/protoc-gen-kaja/tests/000_big/common/types.proto b/protoc-gen-kaja/tests/000_big/common/types.proto new file mode 100644 index 00000000..58d013ea --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/common/types.proto @@ -0,0 +1,165 @@ +syntax = "proto3"; + +package ecommerce.common; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +// Common status enum used across services +enum Status { + STATUS_UNSPECIFIED = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; + STATUS_PENDING = 3; + STATUS_DELETED = 4; +} + +// Enum with deprecated values +enum Priority { + PRIORITY_UNSPECIFIED = 0; + PRIORITY_LOW = 1; + PRIORITY_MEDIUM = 2; + PRIORITY_HIGH = 3; + // Deprecated: use PRIORITY_HIGH instead + PRIORITY_CRITICAL = 4 [deprecated = true]; +} + +// Enum with allow_alias - multiple names for the same value +enum Region { + option allow_alias = true; + + REGION_UNSPECIFIED = 0; + REGION_US_EAST = 1; + REGION_US_WEST = 2; + REGION_EU = 3; + REGION_EUROPE = 3; + REGION_ASIA = 4; + REGION_AP = 4; +} + +// Message containing all 15 proto3 scalar types +message AllScalars { + double double_field = 1; + float float_field = 2; + int32 int32_field = 3; + int64 int64_field = 4; + uint32 uint32_field = 5; + uint64 uint64_field = 6; + sint32 sint32_field = 7; + sint64 sint64_field = 8; + fixed32 fixed32_field = 9; + fixed64 fixed64_field = 10; + sfixed32 sfixed32_field = 11; + sfixed64 sfixed64_field = 12; + bool bool_field = 13; + string string_field = 14; + bytes bytes_field = 15; + // Repeated bytes field + repeated bytes repeated_bytes = 16; +} + +// Message containing all well-known types +message AllWellKnownTypes { + google.protobuf.Any any_field = 1; + google.protobuf.Duration duration_field = 2; + google.protobuf.Empty empty_field = 3; + google.protobuf.FieldMask mask_field = 4; + google.protobuf.Struct struct_field = 5; + google.protobuf.Timestamp timestamp_field = 6; + google.protobuf.DoubleValue double_value = 7; + google.protobuf.FloatValue float_value = 8; + google.protobuf.Int64Value int64_value = 9; + google.protobuf.UInt64Value uint64_value = 10; + google.protobuf.Int32Value int32_value = 11; + google.protobuf.UInt32Value uint32_value = 12; + google.protobuf.BoolValue bool_value = 13; + google.protobuf.StringValue string_value = 14; + google.protobuf.BytesValue bytes_value = 15; +} + +// Address with scalar types. +// Contains location data with latitude/longitude. +// +// Special chars in comment: & "quotes" and backslash-n \n +message Address { + string street = 1; + string city = 2; + string state = 3; + string zip_code = 4; + string country = 5; + double latitude = 6; // trailing: latitude in degrees + double longitude = 7; // trailing: longitude in degrees + bool is_primary = 8; +} + +// Metadata with various map key types +message Metadata { + map labels = 1; + map counters = 2; + map flags = 3; + map indexed_data = 4; + // Bool map key + map bool_map = 5; + // Int64 map key + map long_map = 6; + // Map with message values + map price_map = 7; +} + +// Audit info with timestamps +message AuditInfo { + google.protobuf.Timestamp created_at = 1; + google.protobuf.Timestamp updated_at = 2; + string created_by = 3; + string updated_by = 4; + int32 version = 5; +} + +// Money representation +message Money { + string currency = 1; + int64 amount_cents = 2; +} + +// Pagination +message PageInfo { + int32 page = 1; + int32 page_size = 2; + int32 total_count = 3; + bool has_next = 4; + bool has_prev = 5; +} + +// This is a detached comment about reserved fields + +// Message with reserved field numbers and names +message ReservedExample { + string active_field = 1; + reserved 2, 3, 4; + reserved 15 to 20; + reserved "old_field", "deprecated_field", "removed_field"; + string current_field = 5; +} + +// Deprecated: use FeatureFlags instead +message LegacyConfig { + option deprecated = true; + string key = 1; + string value = 2; +} + +// Message with deprecated fields +message FeatureFlags { + string name = 1; + bool enabled = 2; + // Deprecated: use enabled instead + bool active = 3 [deprecated = true]; + string description = 4; + // Deprecated: use description instead + string info = 5 [deprecated = true]; +} diff --git a/protoc-gen-kaja/tests/000_big/internal/collision.proto b/protoc-gen-kaja/tests/000_big/internal/collision.proto new file mode 100644 index 00000000..852fbf67 --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/internal/collision.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; + +package ecommerce.internal; + +import "common/types.proto"; + +// Message named WireType - collides with protobuf-ts runtime import +message WireType { + string name = 1; + int32 id = 2; +} + +// Message named MessageType - collides with protobuf-ts runtime import +message MessageType { + string name = 1; + string description = 2; +} + +// Same-name type as common.Metadata to test cross-package collision +message Metadata { + string internal_id = 1; + string source = 2; +} + +// Container with nested messages referencing collision types +message Container { + WireType wire = 1; + MessageType msg = 2; + Metadata meta = 3; + common.Metadata common_meta = 4; + + message Inner { + string value = 1; + WireType wire_ref = 2; + } + + Inner inner = 5; +} diff --git a/protoc-gen-kaja/tests/000_big/root.proto b/protoc-gen-kaja/tests/000_big/root.proto new file mode 100644 index 00000000..bf86b54e --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/root.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +package ecommerce; + +// Re-export common types via import public +import public "common/types.proto"; +import "common/options.proto"; +import "auth/user.proto"; +import "store/product.proto"; +import "store/order.proto"; +import "analytics/events.proto"; +import "internal/collision.proto"; + +// Aggregated response combining data from multiple services +message Dashboard { + auth.UserProfile user = 1; + repeated store.Product featured_products = 2; + repeated store.Order recent_orders = 3; + repeated analytics.Event recent_events = 4; + common.Metadata metadata = 5; + // Circular self-reference via optional + optional Dashboard parent = 6; + // Proto3 optional scalar fields + optional string title = 7; + optional int32 refresh_interval_seconds = 8; + optional bool is_public = 9; + // Reference to internal collision types + internal.Container internal_data = 10; +} + +// Top-level message named Entry to coexist with map entry types +message Entry { + string key = 1; + string value = 2; +} + +// Settings with a map field (generates SettingsEntry internally) +message Settings { + map properties = 1; + map entries = 2; +} diff --git a/protoc-gen-kaja/tests/000_big/store/order.proto b/protoc-gen-kaja/tests/000_big/store/order.proto new file mode 100644 index 00000000..c9b55363 --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/store/order.proto @@ -0,0 +1,172 @@ +syntax = "proto3"; + +package ecommerce.store; + +import "google/protobuf/timestamp.proto"; +import "common/types.proto"; +import "store/product.proto"; + +// Order status enum +enum OrderStatus { + ORDER_STATUS_UNSPECIFIED = 0; + ORDER_STATUS_PENDING = 1; + ORDER_STATUS_CONFIRMED = 2; + ORDER_STATUS_SHIPPED = 3; + ORDER_STATUS_DELIVERED = 4; + ORDER_STATUS_CANCELLED = 5; + ORDER_STATUS_REFUNDED = 6; +} + +// Payment method with allow_alias +enum PaymentMethod { + option allow_alias = true; + + PAYMENT_METHOD_UNSPECIFIED = 0; + PAYMENT_METHOD_CREDIT_CARD = 1; + PAYMENT_METHOD_VISA = 1; + PAYMENT_METHOD_DEBIT_CARD = 2; + PAYMENT_METHOD_PAYPAL = 3; + PAYMENT_METHOD_CRYPTO = 4; + PAYMENT_METHOD_BITCOIN = 4; +} + +// Order with 4-level deep nesting +message Order { + string id = 1; + string user_id = 2; + OrderStatus status = 3; + + message LineItem { + string product_id = 1; + string variant_id = 2; + int32 quantity = 3; + common.Money unit_price = 4; + common.Money total_price = 5; + Product product = 6; + } + + repeated LineItem items = 4; + common.Money subtotal = 5; + common.Money tax = 6; + common.Money shipping = 7; + common.Money total = 8; + + message ShippingInfo { + common.Address address = 1; + string carrier = 2; + string tracking_number = 3; + google.protobuf.Timestamp estimated_delivery = 4; + } + + ShippingInfo shipping_info = 9; + + message PaymentInfo { + PaymentMethod method = 1; + string transaction_id = 2; + common.Money amount = 3; + google.protobuf.Timestamp processed_at = 4; + } + + PaymentInfo payment = 10; + common.AuditInfo audit = 11; + + // 4-level deep nesting: Order > Fulfillment > Tracking > Checkpoint + message Fulfillment { + string fulfillment_id = 1; + repeated LineItem items = 2; + + message Tracking { + string carrier = 1; + string tracking_number = 2; + + message Checkpoint { + string location = 1; + string status = 2; + google.protobuf.Timestamp timestamp = 3; + string description = 4; + } + + repeated Checkpoint checkpoints = 3; + Checkpoint latest = 4; + } + + Tracking tracking = 3; + google.protobuf.Timestamp shipped_at = 4; + } + + repeated Fulfillment fulfillments = 12; + + // Multiple oneofs in one message + oneof payment_source { + string credit_card_id = 13; + string bank_account_id = 14; + string wallet_id = 15; + } + + oneof discount { + string coupon_code = 16; + int32 loyalty_points = 17; + double percentage_off = 18; + } + + // Proto3 optional fields + optional string notes = 19; + optional bool gift_wrap = 20; + optional int32 priority_level = 21; + + // Bool map key + map flags = 22; + + // Field named oneof_kind (discriminator collision) + string oneof_kind = 23; +} + +// Self-referential message +message OrderGroup { + string id = 1; + string name = 2; + repeated OrderGroup children = 3; + repeated Order orders = 4; +} + +message CreateOrderRequest { + string user_id = 1; + + message Item { + string product_id = 1; + string variant_id = 2; + int32 quantity = 3; + } + + repeated Item items = 2; + common.Address shipping_address = 3; + PaymentMethod payment_method = 4; +} + +message CreateOrderResponse { + Order order = 1; +} + +message GetOrderRequest { + string id = 1; +} + +message GetOrderResponse { + Order order = 1; +} + +message UpdateOrderStatusRequest { + string id = 1; + OrderStatus status = 2; +} + +message UpdateOrderStatusResponse { + Order order = 1; +} + +// Order service +service OrderService { + rpc CreateOrder(CreateOrderRequest) returns (CreateOrderResponse); + rpc GetOrder(GetOrderRequest) returns (GetOrderResponse); + rpc UpdateOrderStatus(UpdateOrderStatusRequest) returns (UpdateOrderStatusResponse); +} diff --git a/protoc-gen-kaja/tests/000_big/store/product.proto b/protoc-gen-kaja/tests/000_big/store/product.proto new file mode 100644 index 00000000..d0275e43 --- /dev/null +++ b/protoc-gen-kaja/tests/000_big/store/product.proto @@ -0,0 +1,126 @@ +// This file documents the product catalog. +// It includes a JSDoc-breaking */ character in this comment. +syntax = "proto3"; + +package ecommerce.store; + +import "common/types.proto"; + +// Product category enum +enum Category { + CATEGORY_UNSPECIFIED = 0; + CATEGORY_ELECTRONICS = 1; + CATEGORY_CLOTHING = 2; + CATEGORY_BOOKS = 3; + CATEGORY_HOME = 4; + CATEGORY_SPORTS = 5; +} + +// Product with nested variants +message Product { + string id = 1; + string name = 2; + string description = 3; + Category category = 4; + common.Money price = 5; + repeated string tags = 6; + repeated string image_urls = 7; + common.Status status = 8; + + // Nested variant message (also see top-level Product_Variant) + message Variant { + string id = 1; + string name = 2; + map attributes = 3; + common.Money price = 4; + int32 stock_quantity = 5; + string sku = 6; + } + + repeated Variant variants = 9; + common.Metadata metadata = 10; + common.AuditInfo audit = 11; + // jstype JS_STRING on int64 field + int64 view_count = 12 [jstype = JS_STRING]; + // jstype JS_NUMBER on uint64 field + uint64 serial_number = 13 [jstype = JS_NUMBER]; + // json_name with special characters + string internal_code = 14 [json_name = "@code"]; + // TypeScript keywords as field names + string const = 15; + string class = 16; + string typeof = 17; + string delete = 18; +} + +// Top-level message that collides with Product.Variant in generated code +message Product_Variant { + string variant_id = 1; + string product_id = 2; + string label = 3; +} + +// Review with nested rating breakdown +message Review { + string id = 1; + string product_id = 2; + string user_id = 3; + int32 rating = 4; + string title = 5; + string comment = 6; + repeated string image_urls = 7; + bool verified_purchase = 8; + int32 helpful_count = 9; + + message RatingBreakdown { + int32 quality = 1; + int32 value = 2; + int32 accuracy = 3; + } + + RatingBreakdown breakdown = 10; + common.AuditInfo audit = 11; +} + +message CreateProductRequest { + string name = 1; + string description = 2; + Category category = 3; + common.Money price = 4; + repeated string tags = 5; +} + +message CreateProductResponse { + Product product = 1; +} + +message ListProductsRequest { + Category category = 1; + repeated string tags = 2; + common.PageInfo page = 3; +} + +message ListProductsResponse { + repeated Product products = 1; + common.PageInfo page = 2; +} + +message GetProductRequest { + string id = 1; +} + +message GetProductResponse { + Product product = 1; + repeated Review reviews = 2; +} + +// Product service +service ProductService { + rpc CreateProduct(CreateProductRequest) returns (CreateProductResponse); + rpc GetProduct(GetProductRequest) returns (GetProductResponse); + rpc ListProducts(ListProductsRequest) returns (ListProductsResponse); + // Deprecated method + rpc LegacySearch(ListProductsRequest) returns (ListProductsResponse) { + option deprecated = true; + } +} diff --git a/protoc-gen-kaja/tests/001_basic_message/basic_message.proto b/protoc-gen-kaja/tests/001_basic_message/basic_message.proto new file mode 100644 index 00000000..ce5dffc2 --- /dev/null +++ b/protoc-gen-kaja/tests/001_basic_message/basic_message.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +message Person { + string name = 1; + int32 age = 2; + string email = 3; +} + +message Address { + string street = 1; + string city = 2; + string country = 3; + string zip_code = 4; +} diff --git a/protoc-gen-kaja/tests/002_scalar_types/scalar_types.proto b/protoc-gen-kaja/tests/002_scalar_types/scalar_types.proto new file mode 100644 index 00000000..57afe885 --- /dev/null +++ b/protoc-gen-kaja/tests/002_scalar_types/scalar_types.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +message ScalarTypes { + double double_field = 1; + float float_field = 2; + int32 int32_field = 3; + int64 int64_field = 4; + uint32 uint32_field = 5; + uint64 uint64_field = 6; + sint32 sint32_field = 7; + sint64 sint64_field = 8; + fixed32 fixed32_field = 9; + fixed64 fixed64_field = 10; + sfixed32 sfixed32_field = 11; + sfixed64 sfixed64_field = 12; + bool bool_field = 13; + string string_field = 14; + bytes bytes_field = 15; +} diff --git a/protoc-gen-kaja/tests/003_enums/enums.proto b/protoc-gen-kaja/tests/003_enums/enums.proto new file mode 100644 index 00000000..38601ec8 --- /dev/null +++ b/protoc-gen-kaja/tests/003_enums/enums.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package enums; + +enum Color { + COLOR_UNKNOWN = 0; + COLOR_RED = 1; + COLOR_GREEN = 2; + COLOR_BLUE = 3; +} + +enum Size { + SIZE_UNKNOWN = 0; + SIZE_SMALL = 1; + SIZE_MEDIUM = 2; + SIZE_LARGE = 3; +} + +message Paint { + string name = 1; + Color color = 2; + Size size = 3; +} diff --git a/protoc-gen-kaja/tests/004_nested_messages/nested_messages.proto b/protoc-gen-kaja/tests/004_nested_messages/nested_messages.proto new file mode 100644 index 00000000..4b97afd9 --- /dev/null +++ b/protoc-gen-kaja/tests/004_nested_messages/nested_messages.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package nested; + +message Outer { + message Middle { + message Inner { + string value = 1; + } + Inner inner = 1; + string name = 2; + } + + enum Status { + STATUS_UNKNOWN = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; + } + + Middle middle = 1; + Status status = 2; + string label = 3; +} diff --git a/protoc-gen-kaja/tests/005_repeated_fields/repeated_fields.proto b/protoc-gen-kaja/tests/005_repeated_fields/repeated_fields.proto new file mode 100644 index 00000000..075d8478 --- /dev/null +++ b/protoc-gen-kaja/tests/005_repeated_fields/repeated_fields.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +message Tag { + string key = 1; + string value = 2; +} + +message Collection { + repeated string names = 1; + repeated int32 scores = 2; + repeated bool flags = 3; + repeated Tag tags = 4; + repeated bytes data = 5; +} diff --git a/protoc-gen-kaja/tests/006_map_fields/map_fields.proto b/protoc-gen-kaja/tests/006_map_fields/map_fields.proto new file mode 100644 index 00000000..8acfe37f --- /dev/null +++ b/protoc-gen-kaja/tests/006_map_fields/map_fields.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +message Value { + string data = 1; +} + +message MapContainer { + map string_map = 1; + map counts = 2; + map index = 3; + map objects = 4; + map flags = 5; +} diff --git a/protoc-gen-kaja/tests/007_oneof/oneof.proto b/protoc-gen-kaja/tests/007_oneof/oneof.proto new file mode 100644 index 00000000..3dbfd58b --- /dev/null +++ b/protoc-gen-kaja/tests/007_oneof/oneof.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package oneof; + +message Payload { + string text = 1; +} + +message Event { + string id = 1; + oneof body { + string text = 2; + int32 number = 3; + bool flag = 4; + Payload payload = 5; + } +} diff --git a/protoc-gen-kaja/tests/008_service/service.proto b/protoc-gen-kaja/tests/008_service/service.proto new file mode 100644 index 00000000..e5b3839d --- /dev/null +++ b/protoc-gen-kaja/tests/008_service/service.proto @@ -0,0 +1,33 @@ +syntax = "proto3"; + +package myservice; + +message SearchRequest { + string query = 1; + int32 page = 2; + int32 page_size = 3; +} + +message SearchResponse { + repeated Result results = 1; + int32 total_count = 2; +} + +message Result { + string title = 1; + string url = 2; + string snippet = 3; +} + +message GetRequest { + string id = 1; +} + +message GetResponse { + Result result = 1; +} + +service SearchService { + rpc Search(SearchRequest) returns (SearchResponse); + rpc Get(GetRequest) returns (GetResponse); +} diff --git a/protoc-gen-kaja/tests/009_imports/main.proto b/protoc-gen-kaja/tests/009_imports/main.proto new file mode 100644 index 00000000..b143c262 --- /dev/null +++ b/protoc-gen-kaja/tests/009_imports/main.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package main; + +import "shared.proto"; + +message Container { + string name = 1; + shared.Status status = 2; + repeated shared.Metadata metadata = 3; +} diff --git a/protoc-gen-kaja/tests/009_imports/shared.proto b/protoc-gen-kaja/tests/009_imports/shared.proto new file mode 100644 index 00000000..a0a8b43d --- /dev/null +++ b/protoc-gen-kaja/tests/009_imports/shared.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package shared; + +enum Status { + STATUS_UNKNOWN = 0; + STATUS_OK = 1; + STATUS_ERROR = 2; +} + +message Metadata { + string key = 1; + string value = 2; +} diff --git a/protoc-gen-kaja/tests/010_well_known_types/well_known_types.proto b/protoc-gen-kaja/tests/010_well_known_types/well_known_types.proto new file mode 100644 index 00000000..784f4e61 --- /dev/null +++ b/protoc-gen-kaja/tests/010_well_known_types/well_known_types.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package wkt; + +import "google/protobuf/timestamp.proto"; + +message Event { + string name = 1; + google.protobuf.Timestamp created_at = 2; + google.protobuf.Timestamp updated_at = 3; +} + +message Schedule { + string title = 1; + google.protobuf.Timestamp start_time = 2; + google.protobuf.Timestamp end_time = 3; + repeated Event events = 4; +} diff --git a/protoc-gen-kaja/tests/011_multiple_services/multiple_services.proto b/protoc-gen-kaja/tests/011_multiple_services/multiple_services.proto new file mode 100644 index 00000000..f5eb6391 --- /dev/null +++ b/protoc-gen-kaja/tests/011_multiple_services/multiple_services.proto @@ -0,0 +1,101 @@ +syntax = "proto3"; + +package platform; + +// User management messages +message User { + string id = 1; + string name = 2; + string email = 3; + UserRole role = 4; +} + +enum UserRole { + USER_ROLE_UNSPECIFIED = 0; + USER_ROLE_ADMIN = 1; + USER_ROLE_USER = 2; + USER_ROLE_GUEST = 3; +} + +message CreateUserRequest { + string name = 1; + string email = 2; +} + +message CreateUserResponse { + User user = 1; +} + +message GetUserRequest { + string id = 1; +} + +message GetUserResponse { + User user = 1; +} + +// Product management messages +message Product { + string id = 1; + string name = 2; + double price = 3; + int32 quantity = 4; +} + +message CreateProductRequest { + string name = 1; + double price = 2; + int32 quantity = 3; +} + +message CreateProductResponse { + Product product = 1; +} + +message ListProductsRequest { + int32 page_size = 1; + string page_token = 2; +} + +message ListProductsResponse { + repeated Product products = 1; + string next_page_token = 2; +} + +// Order management messages +message Order { + string id = 1; + string user_id = 2; + repeated OrderItem items = 3; + double total = 4; +} + +message OrderItem { + string product_id = 1; + int32 quantity = 2; + double price = 3; +} + +message CreateOrderRequest { + string user_id = 1; + repeated OrderItem items = 2; +} + +message CreateOrderResponse { + Order order = 1; +} + +// Multiple services in one file +service UserService { + rpc CreateUser(CreateUserRequest) returns (CreateUserResponse); + rpc GetUser(GetUserRequest) returns (GetUserResponse); +} + +service ProductService { + rpc CreateProduct(CreateProductRequest) returns (CreateProductResponse); + rpc ListProducts(ListProductsRequest) returns (ListProductsResponse); +} + +service OrderService { + rpc CreateOrder(CreateOrderRequest) returns (CreateOrderResponse); +} diff --git a/protoc-gen-kaja/tests/012_nested_dirs/api/v1/service.proto b/protoc-gen-kaja/tests/012_nested_dirs/api/v1/service.proto new file mode 100644 index 00000000..43b62b1f --- /dev/null +++ b/protoc-gen-kaja/tests/012_nested_dirs/api/v1/service.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package api.v1; + +// Nested directory structure with versioned API +message ApiInfo { + string version = 1; + string name = 2; +} + +message HealthCheckRequest { + string service = 1; +} + +message HealthCheckResponse { + enum Status { + STATUS_UNSPECIFIED = 0; + STATUS_SERVING = 1; + STATUS_NOT_SERVING = 2; + STATUS_UNKNOWN = 3; + } + + Status status = 1; + ApiInfo info = 2; +} + +service HealthService { + rpc Check(HealthCheckRequest) returns (HealthCheckResponse); +} diff --git a/protoc-gen-kaja/tests/012_nested_dirs/root.proto b/protoc-gen-kaja/tests/012_nested_dirs/root.proto new file mode 100644 index 00000000..249e169a --- /dev/null +++ b/protoc-gen-kaja/tests/012_nested_dirs/root.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package test; + +import "api/v1/service.proto"; + +// Test message that uses types from nested directory +message NestedTest { + api.v1.ApiInfo info = 1; + api.v1.HealthCheckResponse.Status status = 2; +} diff --git a/protoc-gen-kaja/tests/013_cross_package/common/types.proto b/protoc-gen-kaja/tests/013_cross_package/common/types.proto new file mode 100644 index 00000000..74539e67 --- /dev/null +++ b/protoc-gen-kaja/tests/013_cross_package/common/types.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package common; + +// Common types shared across packages +message Timestamp { + int64 seconds = 1; + int32 nanos = 2; +} + +message Address { + string street = 1; + string city = 2; + string state = 3; + string zip = 4; + string country = 5; +} + +enum Status { + STATUS_UNSPECIFIED = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; + STATUS_DELETED = 3; +} diff --git a/protoc-gen-kaja/tests/013_cross_package/root.proto b/protoc-gen-kaja/tests/013_cross_package/root.proto new file mode 100644 index 00000000..22792574 --- /dev/null +++ b/protoc-gen-kaja/tests/013_cross_package/root.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package test; + +import "user/service.proto"; +import "common/types.proto"; + +// Test message that uses cross-package types +message CrossPackageTest { + user.UserProfile profile = 1; + common.Address address = 2; + common.Status status = 3; +} diff --git a/protoc-gen-kaja/tests/013_cross_package/user/service.proto b/protoc-gen-kaja/tests/013_cross_package/user/service.proto new file mode 100644 index 00000000..11bb86f9 --- /dev/null +++ b/protoc-gen-kaja/tests/013_cross_package/user/service.proto @@ -0,0 +1,42 @@ +syntax = "proto3"; + +package user; + +import "common/types.proto"; + +// User service with cross-package imports +message UserProfile { + string id = 1; + string username = 2; + string email = 3; + common.Address address = 4; + common.Status status = 5; + common.Timestamp created_at = 6; + common.Timestamp updated_at = 7; +} + +message CreateUserRequest { + string username = 1; + string email = 2; + common.Address address = 3; +} + +message CreateUserResponse { + UserProfile profile = 1; +} + +message UpdateUserRequest { + string id = 1; + string username = 2; + string email = 3; + common.Address address = 4; +} + +message UpdateUserResponse { + UserProfile profile = 1; +} + +service UserManagementService { + rpc CreateUser(CreateUserRequest) returns (CreateUserResponse); + rpc UpdateUser(UpdateUserRequest) returns (UpdateUserResponse); +} diff --git a/protoc-gen-kaja/tests/014_complex_nesting/complex_nesting.proto b/protoc-gen-kaja/tests/014_complex_nesting/complex_nesting.proto new file mode 100644 index 00000000..aed20aec --- /dev/null +++ b/protoc-gen-kaja/tests/014_complex_nesting/complex_nesting.proto @@ -0,0 +1,98 @@ +syntax = "proto3"; + +package complex; + +// Complex nested structures with multiple levels +message Organization { + string id = 1; + string name = 2; + repeated Department departments = 3; + + message Department { + string id = 1; + string name = 2; + repeated Team teams = 3; + + message Team { + string id = 1; + string name = 2; + repeated Member members = 3; + + message Member { + string id = 1; + string name = 2; + Role role = 3; + Permissions permissions = 4; + + enum Role { + ROLE_UNSPECIFIED = 0; + ROLE_ADMIN = 1; + ROLE_DEVELOPER = 2; + ROLE_VIEWER = 3; + } + + message Permissions { + bool can_read = 1; + bool can_write = 2; + bool can_delete = 3; + map resource_access = 4; + + enum AccessLevel { + ACCESS_LEVEL_UNSPECIFIED = 0; + ACCESS_LEVEL_NONE = 1; + ACCESS_LEVEL_READ = 2; + ACCESS_LEVEL_WRITE = 3; + ACCESS_LEVEL_ADMIN = 4; + } + } + } + + // Team-level settings + message Settings { + bool notifications_enabled = 1; + map preferences = 2; + } + + Settings settings = 4; + } + } + + // Organization-level metadata + message Metadata { + map labels = 1; + repeated string tags = 2; + oneof owner { + string user_id = 3; + string team_id = 4; + } + } + + Metadata metadata = 4; +} + +message GetOrganizationRequest { + string id = 1; + bool include_departments = 2; + bool include_teams = 3; +} + +message GetOrganizationResponse { + Organization organization = 1; +} + +message UpdateMemberRequest { + string org_id = 1; + string dept_id = 2; + string team_id = 3; + string member_id = 4; + Organization.Department.Team.Member member = 5; +} + +message UpdateMemberResponse { + Organization.Department.Team.Member member = 1; +} + +service OrganizationService { + rpc GetOrganization(GetOrganizationRequest) returns (GetOrganizationResponse); + rpc UpdateMember(UpdateMemberRequest) returns (UpdateMemberResponse); +} diff --git a/protoc-gen-kaja/tests/015_typescript_keywords/keywords.proto b/protoc-gen-kaja/tests/015_typescript_keywords/keywords.proto new file mode 100644 index 00000000..f11964d9 --- /dev/null +++ b/protoc-gen-kaja/tests/015_typescript_keywords/keywords.proto @@ -0,0 +1,85 @@ +syntax = "proto3"; + +package keywords; + +// Test TypeScript reserved keywords as field names +message TypeScriptKeywords { + string const = 1; + string let = 2; + string var = 3; + string function = 4; + string class = 5; + string interface = 6; + string type = 7; + string enum = 8; + string namespace = 9; + string module = 10; + string extends = 11; + string implements = 12; + string static = 13; + string private = 14; + string public = 15; + string protected = 16; + string readonly = 17; + string async = 18; + string await = 19; + string yield = 20; + string return = 21; + string break = 22; + string continue = 23; + string throw = 24; + string try = 25; + string catch = 26; + string finally = 27; + string if = 28; + string else = 29; + string switch = 30; + string case = 31; + string default = 32; + string for = 33; + string while = 34; + string do = 35; + string new = 36; + string this = 37; + string super = 38; + string import = 39; + string export = 40; + string from = 41; + string as = 42; + string in = 43; + string of = 44; + string typeof = 45; + string instanceof = 46; + string void = 47; + string null = 48; + string undefined = 49; + string true = 50; + string false = 51; + string delete = 52; + string with = 53; + string debugger = 54; +} + +// Test keywords as message names +message const { + string value = 1; +} + +message let { + string value = 1; +} + +message class { + string value = 1; +} + +// Test keywords as enum names +enum const_enum { + CONST_UNKNOWN = 0; + CONST_VALUE = 1; +} + +enum interface_type { + INTERFACE_UNKNOWN = 0; + INTERFACE_VALUE = 1; +} diff --git a/protoc-gen-kaja/tests/016_field_name_edge_cases/field_names.proto b/protoc-gen-kaja/tests/016_field_name_edge_cases/field_names.proto new file mode 100644 index 00000000..5818a23b --- /dev/null +++ b/protoc-gen-kaja/tests/016_field_name_edge_cases/field_names.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; + +package field_names; + +// Test field names with numbers at various positions +message FieldNamesWithNumbers { + // Numbers at the end + string value1 = 1; + string value2 = 2; + string int32s = 3; // Should be "int32s" not "int32S" + string int64s = 4; // Should be "int64s" not "int64S" + string uint32s = 5; + + // Numbers in the middle + string field1name = 6; + string field2_name = 7; + string my2ndField = 8; + + // Starting with capital (should preserve in jsonName) + string CapitalField = 9; + string CamelCaseField = 10; + string ALLCAPS = 11; + + // Already camelCase (no underscore) + string alreadyCamel = 12; + string firstName = 13; + string lastName = 14; + + // Mixed underscore and capital + string Field_With_Underscores = 15; + string CONST_FIELD_NAME = 16; + + // Single letter fields + string a = 17; + string B = 18; + string x = 19; + string Y = 20; + + // Consecutive underscores + string field__double = 21; + string __leading = 22; + string trailing__ = 23; + + // Only underscores + string _ = 24; + string __ = 25; + + // Numbers only after underscore + string field_1 = 26; + string field_2_value = 27; +} diff --git a/protoc-gen-kaja/tests/017_multiple_oneofs/multiple_oneofs.proto b/protoc-gen-kaja/tests/017_multiple_oneofs/multiple_oneofs.proto new file mode 100644 index 00000000..487e92d0 --- /dev/null +++ b/protoc-gen-kaja/tests/017_multiple_oneofs/multiple_oneofs.proto @@ -0,0 +1,73 @@ +syntax = "proto3"; + +package multiple_oneofs; + +// Test message with multiple oneof groups +message MultipleOneofs { + string id = 1; + + oneof authentication { + string password = 2; + string token = 3; + string api_key = 4; + } + + oneof data_format { + string json = 5; + string xml = 6; + bytes binary = 7; + } + + oneof status { + bool active = 8; + bool inactive = 9; + bool pending = 10; + } + + string name = 11; + + oneof metadata { + string description = 12; + int32 version = 13; + } +} + +// Test nested message with oneof +message Container { + string id = 1; + + message Nested { + oneof value { + string text = 1; + int32 number = 2; + } + } + + Nested nested = 2; + + oneof container_type { + string label = 3; + int32 count = 4; + } +} + +// Test oneof with all scalar types +message OneofAllTypes { + oneof value { + double double_val = 1; + float float_val = 2; + int32 int32_val = 3; + int64 int64_val = 4; + uint32 uint32_val = 5; + uint64 uint64_val = 6; + sint32 sint32_val = 7; + sint64 sint64_val = 8; + fixed32 fixed32_val = 9; + fixed64 fixed64_val = 10; + sfixed32 sfixed32_val = 11; + sfixed64 sfixed64_val = 12; + bool bool_val = 13; + string string_val = 14; + bytes bytes_val = 15; + } +} diff --git a/protoc-gen-kaja/tests/018_proto2_required/proto2.proto b/protoc-gen-kaja/tests/018_proto2_required/proto2.proto new file mode 100644 index 00000000..d45f9f81 --- /dev/null +++ b/protoc-gen-kaja/tests/018_proto2_required/proto2.proto @@ -0,0 +1,59 @@ +syntax = "proto2"; + +package proto2_test; + +// Test proto2 with required fields +message RequiredFields { + required string name = 1; + required int32 id = 2; + optional string email = 3; + optional int32 age = 4; + repeated string tags = 5; +} + +// Test proto2 with default values +message DefaultValues { + optional string name = 1 [default = "unknown"]; + optional int32 count = 2 [default = 0]; + optional bool enabled = 3 [default = true]; + optional double rate = 4 [default = 1.5]; + optional Color color = 5 [default = COLOR_RED]; +} + +enum Color { + COLOR_RED = 0; + COLOR_GREEN = 1; + COLOR_BLUE = 2; +} + +// Test proto2 groups (deprecated but valid) +message WithGroup { + optional int32 id = 1; + repeated group Result = 2 { + required string url = 3; + optional string title = 4; + } +} + +// Test proto2 extensions +message Extendable { + optional string name = 1; + extensions 100 to 199; +} + +extend Extendable { + optional int32 custom_field = 100; + optional string custom_string = 101; +} + +// Test nested enums in proto2 +message Container { + enum Status { + STATUS_UNKNOWN = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; + } + + optional Status status = 1; + optional int32 value = 2; +} diff --git a/protoc-gen-kaja/tests/019_enum_without_prefix/no_prefix.proto b/protoc-gen-kaja/tests/019_enum_without_prefix/no_prefix.proto new file mode 100644 index 00000000..11fe6621 --- /dev/null +++ b/protoc-gen-kaja/tests/019_enum_without_prefix/no_prefix.proto @@ -0,0 +1,57 @@ +syntax = "proto3"; + +package no_prefix; + +// Enum without common prefix - each value has different prefix +enum Status { + UNKNOWN = 0; + ACTIVE = 1; + INACTIVE = 2; + PENDING = 3; + DONE = 4; +} + +// Enum with no prefix at all +enum SimpleEnum { + A = 0; + B = 1; + C = 2; + D = 3; +} + +// Enum with mixed prefixes +enum MixedPrefix { + STATUS_UNKNOWN = 0; + ACTIVE_STATE = 1; + MODE_INACTIVE = 2; + PENDING = 3; +} + +// Enum with partial common prefix +enum PartialPrefix { + COLOR_RED = 0; + COLOR_GREEN = 1; + BLUE = 2; // No prefix + COLOR_YELLOW = 3; +} + +// Enum with single value +enum SingleValue { + ONLY_VALUE = 0; +} + +// Enum with empty prefix (values are just numbers) +enum NumberEnum { + V0 = 0; + V1 = 1; + V2 = 2; +} + +message UseEnums { + Status status = 1; + SimpleEnum simple = 2; + MixedPrefix mixed = 3; + PartialPrefix partial = 4; + SingleValue single = 5; + NumberEnum numbers = 6; +} diff --git a/protoc-gen-kaja/tests/020_reserved_fields/reserved.proto b/protoc-gen-kaja/tests/020_reserved_fields/reserved.proto new file mode 100644 index 00000000..2419d1b5 --- /dev/null +++ b/protoc-gen-kaja/tests/020_reserved_fields/reserved.proto @@ -0,0 +1,55 @@ +syntax = "proto3"; + +package reserved_test; + +// Test reserved field numbers +message ReservedNumbers { + string field1 = 1; + reserved 2, 3, 4; + string field5 = 5; + reserved 6 to 10; + string field11 = 11; + reserved 15 to max; +} + +// Test reserved field names +message ReservedNames { + string current_field = 1; + reserved "old_field", "deprecated_field"; + string another_field = 2; + reserved "removed_field"; +} + +// Test both reserved numbers and names +message ReservedBoth { + string active = 1; + reserved 2, 3; + reserved "old_name", "legacy_field"; + string new_field = 4; + reserved 5 to 10; + reserved "deprecated"; +} + +// Test reserved in nested messages +message Container { + string id = 1; + + message Nested { + string value = 1; + reserved 2 to 5; + reserved "old_value"; + string new_value = 6; + } + + Nested nested = 2; + reserved 3, 4; +} + +// Test reserved enum values +enum StatusWithReserved { + STATUS_UNKNOWN = 0; + STATUS_ACTIVE = 1; + reserved 2, 3, 4; + reserved "STATUS_DELETED", "STATUS_ARCHIVED"; + STATUS_PENDING = 5; +} diff --git a/protoc-gen-kaja/tests/021_comment_edge_cases/comments.proto b/protoc-gen-kaja/tests/021_comment_edge_cases/comments.proto new file mode 100644 index 00000000..adc5fd1d --- /dev/null +++ b/protoc-gen-kaja/tests/021_comment_edge_cases/comments.proto @@ -0,0 +1,100 @@ +syntax = "proto3"; + +package comments; + +/** + * Multi-line comment with stars + * Second line + * Third line + */ +message MultiLineComment { + string value = 1; +} + +// Single line comment +message SingleLineComment { + string value = 1; +} + +message FieldComments { + // Leading comment for field1 + string field1 = 1; + + string field2 = 2; // Trailing comment for field2 + + /** + * JSDoc style comment + * @param {string} value - The value + * @returns {boolean} Success + */ + string field3 = 3; + + // Comment with special characters: @#$%^&*() + string field4 = 4; + + // Comment with "quotes" and 'apostrophes' + string field5 = 5; + + // Comment with backslashes \ and forward slashes / + string field6 = 6; + + /* C-style comment */ + string field7 = 7; + + /* + * Multi-line C-style + * with asterisks + */ + string field8 = 8; + + // TODO: implement this field + string field9 = 9; + + // FIXME: this needs review + string field10 = 10; + + // Comment with code: `someCode()` + string field11 = 11; + + // Comment with markdown **bold** and *italic* + string field12 = 12; + + // Comment with emoji 🚀 ✨ 🎉 + string field13 = 13; + + // Comment with URL: https://example.com + string field14 = 14; + + // Comment with XML: content + string field15 = 15; + + // Comment ending with blank line + + string field16 = 16; +} + +// Enum with comments +enum CommentedEnum { + // Unknown value + UNKNOWN = 0; + + // First value + VALUE1 = 1; // Trailing + + /** + * Second value + * with multi-line comment + */ + VALUE2 = 2; +} + +message NestedComments { + // Outer comment + message Inner { + // Inner field comment + string value = 1; + } + + // Field using nested type + Inner inner = 1; +} diff --git a/protoc-gen-kaja/tests/022_field_numbers_gaps/gaps.proto b/protoc-gen-kaja/tests/022_field_numbers_gaps/gaps.proto new file mode 100644 index 00000000..c20a1d87 --- /dev/null +++ b/protoc-gen-kaja/tests/022_field_numbers_gaps/gaps.proto @@ -0,0 +1,62 @@ +syntax = "proto3"; + +package gaps; + +// Test non-sequential field numbers +message FieldNumberGaps { + string field1 = 1; + string field100 = 100; + string field50 = 50; + string field2 = 2; + string field1000 = 1000; + string field500 = 500; +} + +// Test field numbers starting high +message HighFieldNumbers { + string field536870911 = 536870911; // Max field number + string field1000 = 1000; + string field10000 = 10000; +} + +// Test sparse field numbers +message SparseFields { + string a = 1; + string b = 10; + string c = 20; + string d = 30; + string e = 100; + string f = 200; + string g = 300; + string h = 1000; + string i = 10000; + string j = 100000; +} + +// Test reversed order field numbers (in proto definition) +message ReversedOrder { + string last = 100; + string middle = 50; + string first = 1; + string another = 75; + string yet_another = 25; +} + +// Test oneof with gaps +message OneofWithGaps { + string id = 1; + + oneof value { + string text = 10; + int32 number = 50; + bool flag = 100; + } + + string name = 200; +} + +// Test map with high field number +message MapWithHighNumber { + string id = 1; + map data = 500; +} diff --git a/protoc-gen-kaja/tests/023_nested_enums/nested_enums.proto b/protoc-gen-kaja/tests/023_nested_enums/nested_enums.proto new file mode 100644 index 00000000..8f62818f --- /dev/null +++ b/protoc-gen-kaja/tests/023_nested_enums/nested_enums.proto @@ -0,0 +1,107 @@ +syntax = "proto3"; + +package nested_enums; + +// Test enum nested in message +message Container { + enum Status { + STATUS_UNKNOWN = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; + } + + Status status = 1; + string name = 2; +} + +// Test multiple nested enums +message MultipleNestedEnums { + enum Type { + TYPE_UNKNOWN = 0; + TYPE_A = 1; + TYPE_B = 2; + } + + enum Priority { + PRIORITY_LOW = 0; + PRIORITY_MEDIUM = 1; + PRIORITY_HIGH = 2; + } + + Type type = 1; + Priority priority = 2; +} + +// Test deeply nested enums +message Level1 { + message Level2 { + message Level3 { + enum DeepEnum { + DEEP_UNKNOWN = 0; + DEEP_VALUE = 1; + } + + DeepEnum value = 1; + } + + Level3 level3 = 1; + } + + Level2 level2 = 1; +} + +// Test enum in nested message used in parent +message Parent { + message Child { + enum ChildEnum { + CHILD_UNKNOWN = 0; + CHILD_VALUE = 1; + } + } + + Child.ChildEnum status = 1; + string name = 2; +} + +// Test referencing nested enum from another message +message UseNestedEnum { + Container.Status container_status = 1; + MultipleNestedEnums.Type type = 2; + MultipleNestedEnums.Priority priority = 3; + Parent.Child.ChildEnum child_status = 4; +} + +// Test nested enum with no prefix +message NoPrefix { + enum Simple { + A = 0; + B = 1; + C = 2; + } + + Simple value = 1; +} + +// Test sibling messages with same enum name +message Sibling1 { + enum Status { + STATUS_OK = 0; + STATUS_ERROR = 1; + } + + Status status = 1; +} + +message Sibling2 { + enum Status { + STATUS_PENDING = 0; + STATUS_DONE = 1; + } + + Status status = 1; +} + +message UseBothSiblings { + Sibling1.Status status1 = 1; + Sibling2.Status status2 = 2; +} diff --git a/protoc-gen-kaja/tests/024_camelcase_fields/camelcase.proto b/protoc-gen-kaja/tests/024_camelcase_fields/camelcase.proto new file mode 100644 index 00000000..ee085990 --- /dev/null +++ b/protoc-gen-kaja/tests/024_camelcase_fields/camelcase.proto @@ -0,0 +1,96 @@ +syntax = "proto3"; + +package camelcase; + +// Test fields that are already camelCase +message CamelCaseFields { + string firstName = 1; + string lastName = 2; + string emailAddress = 3; + int32 userId = 4; + bool isActive = 5; + string fullName = 6; +} + +// Test mixed snake_case and camelCase +message MixedCase { + string snake_case = 1; + string camelCase = 2; + string PascalCase = 3; + string SCREAMING_SNAKE = 4; + string mixedWith_underscores = 5; +} + +// Test edge cases in camelCase conversion +message CamelCaseEdgeCases { + // Numbers after underscore + string field_1value = 1; // Should be field1value + string field_2_value = 2; // Should be field2Value + + // Multiple consecutive underscores + string field__name = 3; + + // Leading/trailing underscores + string _private = 4; + string trailing_ = 5; + + // All caps with underscores + string HTTP_URL = 6; + string API_KEY = 7; + + // Mixed case combinations + string getHTTPResponseCode = 8; + string URLPath = 9; + string xmlHTTPRequest = 10; + + // Single character segments + string a_b_c = 11; + string x1_y2_z3 = 12; +} + +// Test jsonName override +message JsonNameOverride { + string weird_name = 1 [json_name = "WeirdName"]; + string custom = 2 [json_name = "CUSTOM"]; + string override_field = 3 [json_name = "overrideField"]; +} + +// Test service method names +service CamelCaseService { + rpc GetUser(GetUserRequest) returns (GetUserResponse); + rpc listItems(ListItemsRequest) returns (ListItemsResponse); + rpc CreateOrUpdate(CreateOrUpdateRequest) returns (CreateOrUpdateResponse); + rpc HTTPGet(HTTPGetRequest) returns (HTTPGetResponse); +} + +message GetUserRequest { + string user_id = 1; +} + +message GetUserResponse { + string user_name = 1; +} + +message ListItemsRequest { + int32 page_size = 1; +} + +message ListItemsResponse { + repeated string items = 1; +} + +message CreateOrUpdateRequest { + string id = 1; +} + +message CreateOrUpdateResponse { + bool created = 1; +} + +message HTTPGetRequest { + string url = 1; +} + +message HTTPGetResponse { + string body = 1; +} diff --git a/protoc-gen-kaja/tests/025_empty_messages/empty.proto b/protoc-gen-kaja/tests/025_empty_messages/empty.proto new file mode 100644 index 00000000..f6e72866 --- /dev/null +++ b/protoc-gen-kaja/tests/025_empty_messages/empty.proto @@ -0,0 +1,55 @@ +syntax = "proto3"; + +package empty; + +// Empty message (no fields) +message EmptyMessage { +} + +// Message with only reserved fields +message OnlyReserved { + reserved 1, 2, 3; + reserved "old_field"; +} + +// Service with empty messages +service EmptyService { + rpc DoNothing(EmptyMessage) returns (EmptyMessage); + rpc SendEmpty(Request) returns (EmptyMessage); + rpc ReceiveEmpty(EmptyMessage) returns (Response); +} + +message Request { + string query = 1; +} + +message Response { + string result = 1; +} + +// Nested empty messages +message Container { + message Empty { + } + + Empty empty = 1; + string value = 2; +} + +// Map with empty message value +message MapWithEmpty { + map data = 1; +} + +// Repeated empty messages +message RepeatedEmpty { + repeated EmptyMessage items = 1; +} + +// Oneof with empty message +message OneofWithEmpty { + oneof value { + EmptyMessage empty = 1; + string text = 2; + } +} diff --git a/protoc-gen-kaja/tests/026_circular_deps/circular.proto b/protoc-gen-kaja/tests/026_circular_deps/circular.proto new file mode 100644 index 00000000..4580bda1 --- /dev/null +++ b/protoc-gen-kaja/tests/026_circular_deps/circular.proto @@ -0,0 +1,69 @@ +syntax = "proto3"; + +package circular; + +// Direct self-reference +message SelfReference { + string id = 1; + SelfReference child = 2; +} + +// Mutual reference (A -> B -> A) +message PersonNode { + string name = 1; + AddressNode address = 2; +} + +message AddressNode { + string street = 1; + PersonNode resident = 2; +} + +// Circular through repeated field +message TreeNode { + string value = 1; + repeated TreeNode children = 2; + TreeNode parent = 3; +} + +// Circular through map +message GraphNode { + string id = 1; + map neighbors = 2; +} + +// Three-way circular (A -> B -> C -> A) +message NodeA { + string id = 1; + NodeB next = 2; +} + +message NodeB { + string id = 1; + NodeC next = 2; +} + +message NodeC { + string id = 1; + NodeA next = 2; +} + +// Nested circular +message Outer { + message Inner { + string value = 1; + Outer outer = 2; + } + + Inner inner = 1; +} + +// Circular with oneof +message OneofCircular { + string id = 1; + + oneof value { + string text = 2; + OneofCircular nested = 3; + } +} diff --git a/protoc-gen-kaja/tests/027_optional_proto3/optional.proto b/protoc-gen-kaja/tests/027_optional_proto3/optional.proto new file mode 100644 index 00000000..25c748dd --- /dev/null +++ b/protoc-gen-kaja/tests/027_optional_proto3/optional.proto @@ -0,0 +1,85 @@ +syntax = "proto3"; + +package optional; + +// Test proto3 optional fields (proto3.15+ feature) +message OptionalFields { + // Regular required fields (proto3 default) + string required_string = 1; + int32 required_int = 2; + + // Optional scalar fields + optional string optional_string = 3; + optional int32 optional_int = 4; + optional bool optional_bool = 5; + optional double optional_double = 6; + + // Optional message field + optional Nested optional_message = 7; + + // Optional enum field + optional Status optional_enum = 8; + + // Mix of optional and required + string required2 = 9; + optional string optional2 = 10; + int32 required3 = 11; + optional int32 optional3 = 12; +} + +message Nested { + string value = 1; +} + +enum Status { + STATUS_UNKNOWN = 0; + STATUS_ACTIVE = 1; + STATUS_INACTIVE = 2; +} + +// All optional fields +message AllOptional { + optional string field1 = 1; + optional int32 field2 = 2; + optional bool field3 = 3; + optional bytes field4 = 4; + optional Nested field5 = 5; +} + +// Optional in nested message +message Container { + message Inner { + optional string optional_value = 1; + string required_value = 2; + } + + Inner inner = 1; + optional string optional_name = 2; +} + +// Optional with oneof (edge case) +message OptionalWithOneof { + optional string optional_field = 1; + + oneof choice { + string text = 2; + int32 number = 3; + } + + optional int32 another_optional = 4; +} + +// Service using optional fields +service OptionalService { + rpc GetData(OptionalRequest) returns (OptionalResponse); +} + +message OptionalRequest { + optional string query = 1; + optional int32 limit = 2; +} + +message OptionalResponse { + optional string result = 1; + optional int32 count = 2; +} diff --git a/protoc-gen-kaja/tests/028_invalid_syntax/error.proto b/protoc-gen-kaja/tests/028_invalid_syntax/error.proto new file mode 100644 index 00000000..d4b92a80 --- /dev/null +++ b/protoc-gen-kaja/tests/028_invalid_syntax/error.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package test; + +// Invalid: missing import for timestamp +message Event { + string id = 1; + google.protobuf.Timestamp created_at = 2; +} diff --git a/protoc-gen-kaja/tests/029_kaja_specific_error/test.proto b/protoc-gen-kaja/tests/029_kaja_specific_error/test.proto new file mode 100644 index 00000000..ff31b214 --- /dev/null +++ b/protoc-gen-kaja/tests/029_kaja_specific_error/test.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package test; + +// This should work fine in both generators +message SimpleMessage { + string id = 1; + string name = 2; +} diff --git a/protoc-gen-kaja/tests/030_nested_with_keyword/test.proto b/protoc-gen-kaja/tests/030_nested_with_keyword/test.proto new file mode 100644 index 00000000..462602fc --- /dev/null +++ b/protoc-gen-kaja/tests/030_nested_with_keyword/test.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package test; + +// Test nested message with reserved keyword name +message Outer { + message class { + string value = 1; + } + + message const { + int32 number = 1; + } + + class my_class = 1; + const my_const = 2; +} + +// Test referencing nested keyword types from another message +message Another { + Outer.class imported_class = 1; + Outer.const imported_const = 2; +} + +// Test multiple levels of nesting with keywords +message Level1 { + message interface { + message type { + string data = 1; + } + type nested = 1; + } + interface my_interface = 1; +} diff --git a/protoc-gen-kaja/tests/031_merged_keyword/test.proto b/protoc-gen-kaja/tests/031_merged_keyword/test.proto new file mode 100644 index 00000000..754c9c09 --- /dev/null +++ b/protoc-gen-kaja/tests/031_merged_keyword/test.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package test; + +// Test case where individual component is NOT a keyword, +// but when merged with underscore, creates a keyword + +message from { + message of { + // "from_of" is NOT a reserved keyword, should stay "from_of" + string value = 1; + } +} + +// Top level keywords should still be escaped +message class { + string data = 1; +} + +// Test case where parent is safe but merged name might look like keyword +message my { + message class { + // "my_class" is NOT a keyword (only "class" is), should be "my_class" + string value = 1; + } +} diff --git a/protoc-gen-kaja/tests/032_reserved_properties/test.proto b/protoc-gen-kaja/tests/032_reserved_properties/test.proto new file mode 100644 index 00000000..d06f6f7c --- /dev/null +++ b/protoc-gen-kaja/tests/032_reserved_properties/test.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package test; + +// Test field names that are reserved object properties +message ReservedProps { + // __proto__ is a reserved object property + string __proto__ = 1; + + // toString is a reserved object property + int32 to_string = 2; + + // Normal field for comparison + string normal = 3; +} + +// Test in nested message too +message Container { + message Inner { + string __proto__ = 1; + int32 to_string = 2; + } + + Inner data = 1; +} diff --git a/protoc-gen-kaja/tests/033_service_reserved_methods/test.proto b/protoc-gen-kaja/tests/033_service_reserved_methods/test.proto new file mode 100644 index 00000000..6abdf9da --- /dev/null +++ b/protoc-gen-kaja/tests/033_service_reserved_methods/test.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package test; + +message Request {} +message Response {} + +// Test service with methods that are reserved class properties +service TestService { + // These method names are reserved class properties and should be escaped + rpc Name(Request) returns (Response); + rpc Constructor(Request) returns (Response); + rpc Close(Request) returns (Response); + rpc ToString(Request) returns (Response); + + // Normal method for comparison + rpc GetData(Request) returns (Response); +} diff --git a/protoc-gen-kaja/tests/034_grpc_reserved_methods/test.proto b/protoc-gen-kaja/tests/034_grpc_reserved_methods/test.proto new file mode 100644 index 00000000..41ddb210 --- /dev/null +++ b/protoc-gen-kaja/tests/034_grpc_reserved_methods/test.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package test; + +// Test that gRPC client reserved method names are properly escaped +service TestService { + // These method names collide with gRPC client methods and should be escaped + rpc MakeUnaryRequest(EmptyRequest) returns (EmptyResponse); + rpc MakeClientStreamRequest(EmptyRequest) returns (EmptyResponse); + rpc MakeServerStreamRequest(EmptyRequest) returns (EmptyResponse); + rpc MakeBidiStreamRequest(EmptyRequest) returns (EmptyResponse); + rpc GetChannel(EmptyRequest) returns (EmptyResponse); + rpc WaitForReady(EmptyRequest) returns (EmptyResponse); + + // These collide with generic client properties + rpc Methods(EmptyRequest) returns (EmptyResponse); + rpc TypeName(EmptyRequest) returns (EmptyResponse); + rpc Options(EmptyRequest) returns (EmptyResponse); +} + +message EmptyRequest {} +message EmptyResponse {} diff --git a/protoc-gen-kaja/tests/035_proto_transport_methods/test.proto b/protoc-gen-kaja/tests/035_proto_transport_methods/test.proto new file mode 100644 index 00000000..fdd70c13 --- /dev/null +++ b/protoc-gen-kaja/tests/035_proto_transport_methods/test.proto @@ -0,0 +1,21 @@ +syntax = "proto3"; + +package test; + +// Test that __proto__ and _transport are properly escaped +// These need to match the reserved names AFTER lowerCamelCase transformation +service TestService { + // After lowerCamelCase, this becomes "__proto__" which is reserved + // Wait, that won't work because underscores are stripped... + // Let me try a different approach: names that are ALREADY in lowercase + rpc __proto__(EmptyRequest) returns (EmptyResponse); + + // After lowerCamelCase, becomes "_transport" which is reserved + rpc _transport(EmptyRequest) returns (EmptyResponse); + + // Normal method for comparison + rpc GetData(EmptyRequest) returns (EmptyResponse); +} + +message EmptyRequest {} +message EmptyResponse {} diff --git a/protoc-gen-kaja/tests/036_methods_clash/test.proto b/protoc-gen-kaja/tests/036_methods_clash/test.proto new file mode 100644 index 00000000..0dd0cfcb --- /dev/null +++ b/protoc-gen-kaja/tests/036_methods_clash/test.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package test; + +// Test service with method names that clash with ServiceType interface properties +// The TypeScript plugin escapes these because they conflict with the client class properties +service TestService { + // "methods" is a property of ServiceType interface, should be escaped to "methods$" + rpc methods(Request) returns (Response); + + // "typeName" is a property of ServiceType interface, should be escaped to "typeName$" + rpc typeName(Request) returns (Response); + + // "name" is a property of the client class, should be escaped to "name$" + rpc name(Request) returns (Response); + + // "constructor" is a reserved JavaScript class property, should be escaped to "constructor$" + rpc constructor(Request) returns (Response); + + // Normal method for comparison + rpc getData(Request) returns (Response); +} + +message Request { + string id = 1; +} + +message Response { + string result = 1; +} diff --git a/protoc-gen-kaja/tests/037_service_detached_comments/test.proto b/protoc-gen-kaja/tests/037_service_detached_comments/test.proto new file mode 100644 index 00000000..394caf5f --- /dev/null +++ b/protoc-gen-kaja/tests/037_service_detached_comments/test.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package test; + +// Service with detached comments between methods +service TestService { + // Comment for first method + rpc First(Request) returns (Response); + + // This is a detached comment between methods + // It should appear in the generated client as standalone comments + + // Comment for second method + rpc Second(Request) returns (Response); + + // Another detached comment + // This one has multiple lines + // And should also be preserved + + // Comment for third method + rpc Third(Request) returns (Response); +} + +message Request { + string id = 1; +} + +message Response { + string result = 1; +} diff --git a/protoc-gen-kaja/tests/038_oneof_reserved_names/test.proto b/protoc-gen-kaja/tests/038_oneof_reserved_names/test.proto new file mode 100644 index 00000000..f3874ed8 --- /dev/null +++ b/protoc-gen-kaja/tests/038_oneof_reserved_names/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package test; + +message TestMessage { + oneof value { + string constructor = 1; + int32 toString = 2; + bool name = 3; + } +} diff --git a/protoc-gen-kaja/tests/039_enum_reserved_values/test.proto b/protoc-gen-kaja/tests/039_enum_reserved_values/test.proto new file mode 100644 index 00000000..e899aef5 --- /dev/null +++ b/protoc-gen-kaja/tests/039_enum_reserved_values/test.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package test; + +// Enum with reserved TypeScript names as values +enum Status { + // TypeScript reserved keyword + default = 0; + + // Another reserved word + typeof = 1; + + // Reserved type name + Array = 2; + + // Normal value + ACTIVE = 3; +} diff --git a/protoc-gen-kaja/tests/040_map_with_reserved_fields/test.proto b/protoc-gen-kaja/tests/040_map_with_reserved_fields/test.proto new file mode 100644 index 00000000..01e423a1 --- /dev/null +++ b/protoc-gen-kaja/tests/040_map_with_reserved_fields/test.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package test; + +message Value { + string constructor = 1; + int32 __proto__ = 2; +} + +message Container { + map items = 1; + map indexed = 2; +} diff --git a/protoc-gen-kaja/tests/041_nested_type_reserved/test.proto b/protoc-gen-kaja/tests/041_nested_type_reserved/test.proto new file mode 100644 index 00000000..f6ca7bd8 --- /dev/null +++ b/protoc-gen-kaja/tests/041_nested_type_reserved/test.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package test; + +message Outer { + message constructor { + string value = 1; + } + + constructor nested = 1; +} + +// Test top-level message with reserved type name +message Array { + string value = 1; +} diff --git a/protoc-gen-kaja/tests/042_compound_reserved/test.proto b/protoc-gen-kaja/tests/042_compound_reserved/test.proto new file mode 100644 index 00000000..5831fef1 --- /dev/null +++ b/protoc-gen-kaja/tests/042_compound_reserved/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package test; + +message to { + message String { + string value = 1; + } + + String data = 1; +} diff --git a/protoc-gen-kaja/tests/043_nested_reserved_merge/test.proto b/protoc-gen-kaja/tests/043_nested_reserved_merge/test.proto new file mode 100644 index 00000000..25fe8d51 --- /dev/null +++ b/protoc-gen-kaja/tests/043_nested_reserved_merge/test.proto @@ -0,0 +1,42 @@ +syntax = "proto3"; + +package test; + +// Test case: Nested types that merge to reserved TypeScript type names +// When Outer.Array merges to "Outer_Array", it should NOT be escaped +// But when just "Array" it becomes "Array" which IS a reserved type name + +// This nested message should produce "String_" type after merging +message String { + message _ { + int32 value = 1; + } +} + +// This should produce "Array_Value" after merging (not reserved) +message Array { + message Value { + string data = 1; + } +} + +// This creates "Number_" after merging (reserved!) +message Number { + message _ { + int32 x = 1; + } +} + +// Test with deeply nested: "Boolean_True_" +message Boolean { + message True { + message _ { + bool flag = 1; + } + } +} + +// Service names also go through createLocalTypeName +service Object { + rpc DoSomething(String) returns (String); +} diff --git a/protoc-gen-kaja/tests/044_oneof_proto/test.proto b/protoc-gen-kaja/tests/044_oneof_proto/test.proto new file mode 100644 index 00000000..477717e0 --- /dev/null +++ b/protoc-gen-kaja/tests/044_oneof_proto/test.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package test; + +message TestMessage { + oneof __proto__ { + string text = 1; + int32 number = 2; + } + + oneof toString { + bool flag = 3; + double value = 4; + } +} diff --git a/protoc-gen-kaja/tests/045_service_reserved_type_import/service.proto b/protoc-gen-kaja/tests/045_service_reserved_type_import/service.proto new file mode 100644 index 00000000..3cd27981 --- /dev/null +++ b/protoc-gen-kaja/tests/045_service_reserved_type_import/service.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package api; + +import "types.proto"; + +// Service that uses the reserved-name types from types.proto +service TypeService { + // Uses String from types.proto + rpc GetString(types.String) returns (types.String); + + // Uses Array from types.proto + rpc GetArray(types.Array) returns (types.Array); + + // Uses Number from types.proto + rpc GetNumber(types.Number) returns (types.Number); +} diff --git a/protoc-gen-kaja/tests/045_service_reserved_type_import/types.proto b/protoc-gen-kaja/tests/045_service_reserved_type_import/types.proto new file mode 100644 index 00000000..b3de0e8b --- /dev/null +++ b/protoc-gen-kaja/tests/045_service_reserved_type_import/types.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package types; + +// This message has a reserved TypeScript type name +message String { + string value = 1; +} + +// Another reserved type name +message Array { + repeated string items = 1; +} + +// And another +message Number { + int32 value = 1; +} diff --git a/protoc-gen-kaja/tests/046_nested_import_reserved/service.proto b/protoc-gen-kaja/tests/046_nested_import_reserved/service.proto new file mode 100644 index 00000000..21dd32bb --- /dev/null +++ b/protoc-gen-kaja/tests/046_nested_import_reserved/service.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package api; + +import "types.proto"; + +// Service using nested reserved types from another package +service NestedTypeService { + // Uses nested String from Container + rpc GetString(types.Container.String) returns (types.Container.String); + + // Uses nested Array from Container + rpc GetArray(types.Container.Array) returns (types.Container.Array); + + // Uses nested Number from Container + rpc GetNumber(types.Container.Number) returns (types.Container.Number); + + // Uses top-level Boolean + rpc GetBoolean(types.Boolean) returns (types.Boolean); + + // Mix of nested and top-level + rpc Transform(types.Container.String) returns (types.Boolean); +} diff --git a/protoc-gen-kaja/tests/046_nested_import_reserved/types.proto b/protoc-gen-kaja/tests/046_nested_import_reserved/types.proto new file mode 100644 index 00000000..ee9e07bb --- /dev/null +++ b/protoc-gen-kaja/tests/046_nested_import_reserved/types.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package types; + +// Outer message with nested reserved type names +message Container { + // Nested message with a reserved TypeScript type name + message String { + string value = 1; + } + + // Another nested reserved name + message Array { + repeated string items = 1; + } + + // Nested Number + message Number { + int32 value = 1; + } + + // A field using one of the nested types + String str = 1; + Array arr = 2; +} + +// Top-level reserved type +message Boolean { + bool value = 1; +} diff --git a/protoc-gen-kaja/tests/047_double_nested_service/service.proto b/protoc-gen-kaja/tests/047_double_nested_service/service.proto new file mode 100644 index 00000000..ddb67df0 --- /dev/null +++ b/protoc-gen-kaja/tests/047_double_nested_service/service.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package test; + +import "types.proto"; + +service TestService { + // Uses doubly-nested type + rpc DoSomething(Outer.Middle.Inner) returns (Outer.Middle.Inner); + + // Uses singly-nested type + rpc DoAnother(Outer.Middle) returns (Outer.Middle); +} diff --git a/protoc-gen-kaja/tests/047_double_nested_service/types.proto b/protoc-gen-kaja/tests/047_double_nested_service/types.proto new file mode 100644 index 00000000..16e83f2e --- /dev/null +++ b/protoc-gen-kaja/tests/047_double_nested_service/types.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package test; + +message Outer { + message Middle { + message Inner { + string value = 1; + } + Inner inner = 1; + } + Middle middle = 1; +} diff --git a/protoc-gen-kaja/tests/048_method_custom_options/test.proto b/protoc-gen-kaja/tests/048_method_custom_options/test.proto new file mode 100644 index 00000000..63944545 --- /dev/null +++ b/protoc-gen-kaja/tests/048_method_custom_options/test.proto @@ -0,0 +1,37 @@ +syntax = "proto3"; + +package test; + +import "google/protobuf/descriptor.proto"; + +// Define custom method options +extend google.protobuf.MethodOptions { + string api_version = 51001; + bool deprecated_api = 51002; + int32 rate_limit = 51003; +} + +message Request { + string query = 1; +} + +message Response { + string result = 1; +} + +service TestService { + // Method with custom options + rpc GetData(Request) returns (Response) { + option (api_version) = "v2.0"; + option (deprecated_api) = true; + option (rate_limit) = 100; + } + + // Method without custom options + rpc PostData(Request) returns (Response); + + // Method with only one custom option + rpc UpdateData(Request) returns (Response) { + option (api_version) = "v1.5"; + } +} diff --git a/protoc-gen-kaja/tests/049_all_wkt/test.proto b/protoc-gen-kaja/tests/049_all_wkt/test.proto new file mode 100644 index 00000000..14d8818c --- /dev/null +++ b/protoc-gen-kaja/tests/049_all_wkt/test.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package test; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +message AllWKT { + google.protobuf.Any any_field = 1; + google.protobuf.Duration duration_field = 2; + google.protobuf.Empty empty_field = 3; + google.protobuf.FieldMask mask_field = 4; + google.protobuf.Struct struct_field = 5; + google.protobuf.Timestamp timestamp_field = 6; + + google.protobuf.DoubleValue double_value = 7; + google.protobuf.FloatValue float_value = 8; + google.protobuf.Int64Value int64_value = 9; + google.protobuf.UInt64Value uint64_value = 10; + google.protobuf.Int32Value int32_value = 11; + google.protobuf.UInt32Value uint32_value = 12; + google.protobuf.BoolValue bool_value = 13; + google.protobuf.StringValue string_value = 14; + google.protobuf.BytesValue bytes_value = 15; +} diff --git a/protoc-gen-kaja/tests/050_escaped_collision/test.proto b/protoc-gen-kaja/tests/050_escaped_collision/test.proto new file mode 100644 index 00000000..969b7cd4 --- /dev/null +++ b/protoc-gen-kaja/tests/050_escaped_collision/test.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package test; + +message Request {} +message Response {} + +// Test service where method names collide after escaping +service TestService { + // This becomes "name$" after escaping + rpc Name(Request) returns (Response); + + // This should ALSO become "name$" but would collide! + // Wait, proto doesn't allow $ in names... + // Let me try a different approach: what if we have both Name and NAME? + // After camelCase: name and name - collision before escaping + + // Let me try: constructor and Constructor$ (if allowed) + rpc Constructor(Request) returns (Response); +} diff --git a/protoc-gen-kaja/tests/051_service_reserved_type_names/test.proto b/protoc-gen-kaja/tests/051_service_reserved_type_names/test.proto new file mode 100644 index 00000000..c30f13e5 --- /dev/null +++ b/protoc-gen-kaja/tests/051_service_reserved_type_names/test.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; +package test; + +message Request {} +message Response {} + +// Service names that are reserved TypeScript type names should be escaped +service Array { + rpc Get(Request) returns (Response); +} + +service String { + rpc Set(Request) returns (Response); +} + +service Object { + rpc Create(Request) returns (Response); +} diff --git a/protoc-gen-kaja/tests/052_package_reserved/test.proto b/protoc-gen-kaja/tests/052_package_reserved/test.proto new file mode 100644 index 00000000..0ed3fa94 --- /dev/null +++ b/protoc-gen-kaja/tests/052_package_reserved/test.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +// Package with a reserved TypeScript keyword +package const; + +message TestMessage { + string value = 1; +} + +service TestService { + rpc GetTest(TestMessage) returns (TestMessage); +} diff --git a/protoc-gen-kaja/tests/053_json_name/test.proto b/protoc-gen-kaja/tests/053_json_name/test.proto new file mode 100644 index 00000000..724d0724 --- /dev/null +++ b/protoc-gen-kaja/tests/053_json_name/test.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package test; + +message TestMessage { + // Field with json_name option that's a reserved word + string value = 1 [json_name = "constructor"]; + + // Another reserved property + int32 number = 2 [json_name = "__proto__"]; + + // Reserved keyword + bool flag = 3 [json_name = "typeof"]; +} diff --git a/protoc-gen-kaja/tests/054_double_underscore/test.proto b/protoc-gen-kaja/tests/054_double_underscore/test.proto new file mode 100644 index 00000000..cb4747b0 --- /dev/null +++ b/protoc-gen-kaja/tests/054_double_underscore/test.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package test; + +message TestMessage { + // Field with double underscore prefix + string __value = 1; + + // Triple underscore + int32 ___number = 2; + + // Mix + bool __proto__ = 3; +} diff --git a/protoc-gen-kaja/tests/055_special_comment/test.proto b/protoc-gen-kaja/tests/055_special_comment/test.proto new file mode 100644 index 00000000..45092db5 --- /dev/null +++ b/protoc-gen-kaja/tests/055_special_comment/test.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package test; + +message TestMessage { + // Comment with */ that could break JSDoc + string value = 1; + + /* Multi-line comment with */ in the middle + that spans lines */ + int32 number = 2; + + // Comment with