diff --git a/docs/cli/imports.md b/docs/cli/imports.md new file mode 100644 index 00000000..a11a3f96 --- /dev/null +++ b/docs/cli/imports.md @@ -0,0 +1,717 @@ +# Imports Command – Comprehensive Reference + +The `raiden imports` command fetches Supabase resources (tables, roles, RPC functions, storage buckets, and types) and generates corresponding Go source files in the local project. This document covers the full architecture, execution flow, data model, flags, error handling, and key implementation details. + +--- + +## Table of Contents + +1. [Overview](#overview) +2. [Usage & Flags](#usage--flags) +3. [Execution Flow](#execution-flow) +4. [Phase-by-Phase Breakdown](#phase-by-phase-breakdown) +5. [Architecture Diagrams](#architecture-diagrams) +6. [Key Data Structures](#key-data-structures) +7. [Resource Loading (BFF vs Service Mode)](#resource-loading-bff-vs-service-mode) +8. [State Management](#state-management) +9. [Comparison & Diff Logic](#comparison--diff-logic) +10. [Code Generation](#code-generation) +11. [File Map](#file-map) +12. [Error Handling](#error-handling) +13. [Troubleshooting](#troubleshooting) + +--- + +## Overview + +`raiden imports` synchronises the local project with the remote Supabase database by: + +1. Fetching remote resources (tables, roles, functions, storages, types, policies, indexes). +2. Loading the local state (`.raiden/` binary state files). +3. Comparing remote vs local to detect drift. +4. Generating/updating Go source files under `internal/` (models, roles, rpc, storages, types). +5. Persisting the updated state. + +The command uses a **two-stage build** approach: the Raiden CLI first code-generates a temporary `cmd/import/main.go` binary that embeds the user's registered models and bootstrap code, then compiles and executes that binary to perform the actual import. + +--- + +## Usage & Flags + +```bash +raiden imports [flags] +``` + +### Resource Selection Flags + +| Flag | Short | Description | +|------|-------|-------------| +| `--models-only` | `-m` | Import tables/models only | +| `--rpc-only` | | Import RPC functions only | +| `--roles-only` | `-r` | Import roles only | +| `--storages-only` | | Import storage buckets only | +| `--policy-only` | | Import policies only | +| `--schema` | `-s` | Comma-separated list of schemas to include (default: `public`) | + +When **no** resource flag is set, all resources are imported. + +### Behaviour Flags + +| Flag | Description | +|------|-------------| +| `--force` | Skip diff checks and overwrite local state unconditionally | +| `--dry-run` | Simulate the import without writing files; reports what would change | +| `--generate-controller` | Generate REST controller stubs for imported tables (BFF mode only) | + +### Debug Flags + +| Flag | Description | +|------|-------------| +| `--debug` | Enable debug-level logging | +| `--trace` | Enable trace-level logging (most verbose) | + +### Examples + +```bash +# Import everything from public schema +raiden imports + +# Import only models from auth and public schemas +raiden imports --models-only --schema auth,public + +# Dry run to preview changes +raiden imports --dry-run + +# Force overwrite without diff checks +raiden imports --force + +# Import models and auto-generate controllers +raiden imports --models-only --generate-controller + +# Import with debug logging +raiden imports --debug +``` + +--- + +## Execution Flow + +The import process spans three layers: the CLI command, the compiled import binary, and the core import logic. + +### High-Level Flow + +``` +User runs: raiden imports --schema public,auth + │ + ▼ +┌──────────────────────────────────────────────────────┐ +│ Layer 1: CLI Command (cmd/raiden/commands/import.go) │ +│ │ +│ 1. PreRun: verify configs/app.yaml exists │ +│ 2. Version check (auto-update if needed) │ +│ 3. Load config: raiden.LoadConfig() │ +│ 4. generate.Run() — refresh internal/bootstrap │ +│ 5. imports.Run() — build & exec import binary │ +└──────────────────────┬───────────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────────┐ +│ Layer 2: Build & Exec (pkg/cli/imports/command.go) │ +│ │ +│ 1. Build: go build -o build/import cmd/import/main.go│ +│ 2. Assemble args from flags │ +│ 3. Exec: build/import --schema public,auth │ +└──────────────────────┬───────────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────────┐ +│ Layer 3: Import Binary (cmd/import/main.go) │ +│ (code-generated by pkg/generator/import.go) │ +│ │ +│ 1. Load config │ +│ 2. bootstrap.RegisterModels/Types/Rpc/Roles/Storages │ +│ 3. generate.Run() — pre-generate pass │ +│ 4. resource.Import(flags, config) ← CORE │ +│ 5. generate.Run() — post-generate (bootstrap regen) │ +└──────────────────────┬───────────────────────────────┘ + │ + ▼ +┌──────────────────────────────────────────────────────┐ +│ Layer 4: Core Import (pkg/resource/import.go) │ +│ │ +│ importJob.run() executes 9 phases: │ +│ │ +│ Phase 1: loadNativeRoles │ +│ Phase 2: loadRemoteResource │ +│ Phase 3: prepareRemoteResource │ +│ Phase 4: loadLocalState │ +│ Phase 5: extractAppResources │ +│ Phase 6: collectValidationTags │ +│ Phase 7: performComparisons │ +│ Phase 8: computeReport │ +│ Phase 9: handleOutput │ +└──────────────────────────────────────────────────────┘ +``` + +--- + +## Phase-by-Phase Breakdown + +### Phase 1: Load Native Roles + +**Function:** `importJob.loadNativeRoles()` +**Source:** `pkg/resource/import.go` + +Loads the built-in PostgreSQL/Supabase roles (e.g., `postgres`, `supabase_admin`, `pg_*`) into a map. These are used later to distinguish user-defined roles from system roles. + +**Data source:** `pkg/postgres/roles.NativeRoles` — a hard-coded list of known native roles. + +--- + +### Phase 2: Load Remote Resource + +**Function:** `importJob.loadRemoteResource()` +**Source:** `pkg/resource/load.go :: Load()` + +Fetches all resources from the Supabase database **concurrently** using goroutines. The resources loaded depend on the flags and the server mode (BFF vs Service). + +Resources fetched: +- Tables (with columns, primary keys, relationships) +- Roles and role memberships +- Functions (RPC) +- Storage buckets +- Indexes +- Relationship actions (ON DELETE, ON UPDATE) +- Policies (ACL) +- Types (custom PostgreSQL types) + +After loading, post-processing attaches: +- Indexes and relation actions to their respective tables via `tables.AttachIndexAndAction()` +- Inherited roles via `roles.AttachInherithRole()` + +--- + +### Phase 3: Prepare Remote Resource + +**Function:** `importJob.prepareRemoteResource()` +**Source:** `pkg/resource/import.go` + +Filters the fetched resources to only include what the user requested: + +1. **Schema filter:** Tables and functions are filtered by `--schema` (defaults to `public`). +2. **Allowed tables filter** (BFF mode only): When `config.AllowedTables` is set, further restricts which tables are included. Relationships referencing excluded tables are also removed. +3. **Native role removal:** System roles are removed from the role list, keeping only user-defined roles. +4. **Relation validation:** Logs warnings for relationships referencing tables outside the import set. + +--- + +### Phase 4: Load Local State + +**Function:** `importJob.loadLocalState()` +**Source:** `pkg/state/state.go :: Load()` + +Reads the local state from `.raiden/` directory. The state is stored as binary GOB files and tracks what was previously imported: +- Tables with their columns, relations, and policies +- Roles +- RPC functions +- Storage buckets +- Types + +If no state exists (first import), this returns an empty state. + +--- + +### Phase 5: Extract App Resources + +**Function:** `importJob.extractAppResources()` +**Source:** `pkg/resource/common.go :: extractAppResource()` + +Compares the local state against the currently registered Go resources (from `bootstrap.Register*()` calls) and categorises each resource as either **New** or **Existing**. + +The output for each resource type contains: +- `New` — resources in local state but not yet registered in Go code +- `Existing` — resources that exist in both state and Go code + +This is used later for comparison and reporting. + +--- + +### Phase 6: Collect Validation Tags + +**Function:** `importJob.collectValidationTags()` +**Source:** `pkg/resource/import.go` + +Preserves existing model validation tags (e.g., `validate:"required"`) so they aren't lost during code regeneration. Tags are collected from both new and existing table extractions and stored in `mapModelValidationTags`. + +Only runs when importing models (`--models-only` or all resources). + +--- + +### Phase 7: Perform Comparisons + +**Function:** `importJob.performComparisons()` +**Source:** `pkg/resource/import.go` + +**Skipped entirely when `--force` is used.** + +Runs diff checks between remote (Supabase) and local (existing Go code) resources: + +| Comparison | Package | What it checks | +|-----------|---------|---------------| +| `types.Compare()` | `pkg/resource/types` | Custom type definitions | +| `tables.Compare()` | `pkg/resource/tables` | Table structure, columns, constraints | +| `roles.Compare()` | `pkg/resource/roles` | Role configuration | +| `rpc.Compare()` | `pkg/resource/rpc` | Function signatures, parameters | +| `storages.Compare()` | `pkg/resource/storages` | Bucket configuration | + +Each comparison: +- Returns an **error** if drift is detected and conflicts exist. +- In `--dry-run` mode, errors are collected (not returned) for reporting. +- Only runs for the resource types being imported and when existing resources are present. + +--- + +### Phase 8: Compute Report + +**Function:** `importJob.computeReport()` +**Source:** `pkg/resource/import.go` + +Counts how many **new** resources would be added for each type by calling `GetNewCountData()` from each resource package. The report contains counts for: + +```go +type ImportReport struct { + Table int + Role int + Rpc int + Storage int + Types int + Policies int +} +``` + +--- + +### Phase 9: Handle Output + +**Function:** `importJob.handleOutput()` +**Source:** `pkg/resource/import.go` + +Decides what to do based on flags: + +| Condition | Action | +|-----------|--------| +| `--dry-run` with errors | Print collected errors, skip report | +| `--dry-run` without errors | Print report showing what would be imported | +| `--update-state-only` | Update `.raiden/` state without generating code | +| Normal (no flags) | Run `generateImportResource()` → generate Go files + update state | + +--- + +## Architecture Diagrams + +### Sequence Diagram + +See [imports-sequence.md](./imports-sequence.md) for the full Mermaid sequence diagram. + +### Structural Diagram + +See [imports-structure.md](./imports-structure.md) for the class-style package relationship diagram. + +### Dependency Injection + +The `importJob` struct uses a `importDeps` struct for dependency injection, making each phase testable: + +``` +importDeps { + loadNativeRoles → pkg/postgres/roles.NativeRoles + loadRemote → pkg/resource.Load() + loadState → pkg/state.Load() + extractApp → extractAppResource() + compareTypes → pkg/resource/types.Compare() + compareTables → pkg/resource/tables.Compare() + compareRoles → pkg/resource/roles.Compare() + compareRpc → pkg/resource/rpc.Compare() + compareStorages → pkg/resource/storages.Compare() + updateStateOnly → updateStateOnly() + generate → generateImportResource() + printReport → PrintImportReport() +} +``` + +--- + +## Key Data Structures + +### Resource (remote data container) + +```go +// pkg/resource/load.go +type Resource struct { + Tables []objects.Table + Policies objects.Policies + Roles []objects.Role + RoleMemberships []objects.RoleMembership + Functions []objects.Function + Storages []objects.Bucket + Indexes []objects.Index + RelationActions []objects.TablesRelationshipAction + Types []objects.Type +} +``` + +### Flags (import options) + +```go +// pkg/resource/common.go +type Flags struct { + ProjectPath string + RpcOnly bool + RolesOnly bool + ModelsOnly bool + StoragesOnly bool + AllowedSchema string + DebugMode bool + TraceMode bool + Generate generate.Flags + GenerateController bool + ForceImport bool + UpdateStateOnly bool + DryRun bool +} +``` + +`Flags.All()` returns `true` when no resource-specific flag is set (imports everything). + +### State (local persistence) + +```go +// pkg/state/state.go +type State struct { + Tables []TableState + Roles []RoleState + Rpc []RpcState + Storage []StorageState + Types []TypeState +} + +type TableState struct { + Table objects.Table + Relation []Relation + ModelPath string + ModelStruct string + LastUpdate time.Time + Policies []objects.Policy +} +``` + +### importJob (workflow state) + +```go +// pkg/resource/import.go +type importJob struct { + flags *Flags + config *raiden.Config + deps importDeps + mapNativeRole map[string]raiden.Role + resource *Resource + localState *state.State + importState state.LocalState + appTables state.ExtractTableResult + appRoles state.ExtractRoleResult + appRpcFunctions state.ExtractRpcResult + appStorage state.ExtractStorageResult + appTypes state.ExtractTypeResult + nativeStateRoles []state.RoleState + dryRunErrors []string + mapModelValidationTags map[string]state.ModelValidationTag + report ImportReport + reportComputed bool + reportPrinted bool + skipReport bool +} +``` + +--- + +## Resource Loading (BFF vs Service Mode) + +The `Load()` function behaves differently based on `config.Mode`: + +### BFF Mode (`raiden.BffMode`) + +Uses the **Supabase REST API** (`pkg/supabase`) to fetch resources: + +``` +Supabase API ──► supabase.GetTables() + ► supabase.GetFunctions() + ► supabase.GetRoles() + ► supabase.GetBuckets() + ► supabase.GetPolicies() + ► supabase.GetTypes() + ► supabase.GetIndexes() + ► supabase.GetRoleMemberships() + ► supabase.GetTableRelationshipActions() +``` + +**Selective loading:** Only loads resource types matching the flags. For example, `--rpc-only` skips loading tables, roles, and storages (but loads types and tables for function parameter resolution). + +### Service Mode + +Uses **PgMeta** (`pkg/connector/pgmeta`) for direct PostgreSQL metadata access: + +``` +PgMeta ──► pgmeta.GetTables() + ► pgmeta.GetFunctions() + ► pgmeta.GetIndexes() + ► pgmeta.GetTableRelationshipActions() + ► pgmeta.GetTypes() +``` + +Roles are still loaded via the Supabase API. Storage buckets are **not loaded** in Service mode. + +### Concurrency + +All resource loads run concurrently as goroutines, sending results through a shared channel. The `Load()` function collects results using a type switch: + +```go +for result := range loadChan { + switch rs := result.(type) { + case []objects.Table: resource.Tables = rs + case []objects.Role: resource.Roles = rs + case []objects.Function: resource.Functions = rs + case error: return nil, rs + // ... + } +} +``` + +--- + +## State Management + +### State Location + +State files are stored in the `.raiden/` directory at the project root, using Go's GOB binary encoding. + +### State Lifecycle + +``` +First Import: + No state exists → all resources treated as "new" + → generate files → persist state + +Subsequent Imports: + Load state → extract registered resources + → classify as New/Existing + → compare Existing against remote + → generate only what changed → persist updated state +``` + +### LocalState Operations + +The `LocalState` struct provides thread-safe operations: + +- `AddTable(TableState)` — adds or updates a table in state +- `AddRole(RoleState)` — adds or updates a role in state +- `AddRpc(RpcState)` — adds or updates an RPC function in state +- `AddStorage(StorageState)` — adds or updates a storage bucket in state +- `AddType(TypeState)` — adds or updates a custom type in state +- `Persist()` — writes state to disk + +### State Update During Generation + +During code generation, each generated file emits a message through `stateChan`. The `UpdateLocalStateFromImport()` goroutine listens on this channel and updates the `LocalState` in real-time. When the channel closes (generation complete), it calls `Persist()`. + +--- + +## Comparison & Diff Logic + +Each resource type has its own comparison package under `pkg/resource/`: + +| Package | Compare Function | What It Checks | +|---------|-----------------|----------------| +| `pkg/resource/types` | `Compare(remote, existing)` | Type name, attributes, schema | +| `pkg/resource/tables` | `Compare(mode, remote, existing)` | Columns, types, constraints, defaults, nullability | +| `pkg/resource/roles` | `Compare(remote, existing)` | Role name, permissions, inheritance | +| `pkg/resource/rpc` | `Compare(remote, existing)` | Function name, parameters, return type, schema | +| `pkg/resource/storages` | `Compare(remote, existing)` | Bucket name, public flag, size limits | + +### Comparison Modes + +`tables.Compare()` accepts a mode parameter: +- `CompareModeImport` — used during imports (remote is source of truth) +- Other modes may exist for apply operations + +### Force Import + +When `--force` is set, **all comparisons are skipped**. The remote state overwrites local state unconditionally. Use with caution. + +### Dry Run + +When `--dry-run` is set, comparison errors are **collected** instead of returned. The command prints either: +- A success report with counts of new resources +- An error summary if drift/conflicts exist + +--- + +## Code Generation + +### Generated File Structure + +``` +project/ +├── internal/ +│ ├── bootstrap/ # Auto-generated registration code +│ │ ├── models.go +│ │ ├── roles.go +│ │ ├── rpc.go +│ │ ├── storages.go +│ │ └── types.go +│ ├── models/ # Table model structs +│ │ ├── users.go +│ │ └── orders.go +│ ├── roles/ # Role definitions +│ │ └── custom_role.go +│ ├── rpc/ # RPC function wrappers +│ │ └── get_stats.go +│ ├── storages/ # Storage bucket definitions +│ │ └── avatars.go +│ └── types/ # Custom PostgreSQL types +│ └── status_enum.go +├── cmd/ +│ └── import/ +│ └── main.go # Code-generated import binary +└── .raiden/ # Binary state files (GOB encoded) +``` + +### Generation Pipeline + +The `generateImportResource()` function runs all generators concurrently in a single goroutine: + +1. **Types** → `generator.GenerateTypes()` — custom PostgreSQL type enums/structs +2. **Models** → `generator.GenerateModels()` — table struct definitions with column tags +3. **Controllers** → `generator.GenerateRestControllers()` — REST controller stubs (optional, BFF only) +4. **Roles** → `generator.GenerateRoles()` — role struct definitions +5. **RPC** → `generator.GenerateRpc()` — function parameter/return type structs +6. **Storages** → `generator.GenerateStorages()` — bucket definitions with policies + +### Capture Function Pattern + +Each generator wraps the standard `generator.Generate()` with `ImportDecorateFunc()`, which: +1. Generates the file content normally +2. Finds the matching resource item +3. Sends the item + output path to `stateChan` for state tracking + +--- + +## File Map + +| File | Role | +|------|------| +| `cmd/raiden/commands/import.go` | CLI command definition, flag binding, orchestration | +| `pkg/cli/imports/command.go` | Builds and executes the import binary subprocess | +| `pkg/generator/import.go` | Code-generates `cmd/import/main.go` template | +| `pkg/resource/import.go` | **Core logic** — `importJob` struct, all 9 phases, code generation | +| `pkg/resource/load.go` | `Resource` struct, `Load()`, concurrent resource fetching | +| `pkg/resource/common.go` | `Flags` struct, filter functions, `extractAppResource()` | +| `pkg/state/state.go` | `State` / `LocalState` structs, GOB persistence | +| `pkg/state/table.go` | Table state extraction and comparison | +| `pkg/state/role.go` | Role state extraction | +| `pkg/state/rpc.go` | RPC state extraction | +| `pkg/state/storage.go` | Storage state extraction | +| `pkg/state/type.go` | Type state extraction | +| `pkg/resource/tables/` | Table comparison, index/action attachment, model input building | +| `pkg/resource/roles/` | Role comparison, inheritance attachment | +| `pkg/resource/rpc/` | RPC function comparison | +| `pkg/resource/storages/` | Storage comparison, input building | +| `pkg/resource/types/` | Type comparison | +| `pkg/resource/policies/` | Policy ACL expression cleanup | + +### Test Files + +| File | Coverage | +|------|----------| +| `pkg/resource/import_test.go` | Integration tests for the full import flow | +| `pkg/resource/import_internal_test.go` | White-box tests for internal helpers | +| `pkg/generator/import_test.go` | Tests for import binary code generation | + +--- + +## Error Handling + +### Panic Recovery + +The `runImport()` function includes a deferred panic recovery: + +```go +defer func() { + if r := recover(); r != nil { + err = fmt.Errorf("import panic: %v", r) + } +}() +``` + +### Error Propagation + +- **Phase errors** propagate up immediately, aborting the import. +- **Comparison errors** are either returned (normal mode) or collected (dry-run mode). +- **Generation errors** are sent through `errChan` and returned on the first error. +- **State persistence errors** are returned via the `doneListen` channel. + +### Common Error Scenarios + +| Error | Cause | Resolution | +|-------|-------|------------| +| `missing config file` | `configs/app.yaml` not found | Run `raiden configure` | +| `error building binary` | Compilation of `cmd/import/main.go` failed | Check Go code syntax, run `go build ./...` | +| Comparison error | Remote schema differs from local state | Use `--force` to overwrite, or reconcile manually | +| Load error | Network/auth failure when fetching from Supabase | Check config credentials and connectivity | + +--- + +## Troubleshooting + +### Debug Logging + +```bash +# See which tables/functions are included/excluded +raiden imports --debug + +# See full filter details and schema matching +raiden imports --trace +``` + +### Preview Without Changes + +```bash +raiden imports --dry-run +``` + +### Reset Local State + +Delete the `.raiden/` directory to force a clean import: + +```bash +rm -rf .raiden/ +raiden imports +``` + +### Schema Filtering Issues + +If tables are missing, verify the schema: + +```bash +# Import from multiple schemas +raiden imports --schema public,auth,storage + +# Debug to see what's being filtered +raiden imports --schema public,auth --debug +``` + +### Force Overwrite + +When comparison errors block import (e.g., after manual DB changes): + +```bash +raiden imports --force +``` diff --git a/openspec/changes/archive/2026-02-24-fix-apply-false-changes/proposal.md b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/proposal.md new file mode 100644 index 00000000..a138a08c --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/proposal.md @@ -0,0 +1,20 @@ +# Change: Fix apply dry-run false-change detection + +## Why +After running `import --force` followed by `apply --dry-run`, the system incorrectly +reports relation creates/deletes, policy table reassignments, and RPC updates even +though no code was modified. This makes the dry-run output noisy and untrustworthy. + +## What Changes +- Fix relation comparison to skip duplicate FK constraints for already-matched columns +- Fix relation comparison to skip cross-schema FK references (e.g., `auth.users`) +- Fix relation index creation check to only fire when target has index but source doesn't +- Fix policy comparison to match by schema+table+name instead of name only +- Fix RPC state extraction to preserve stored CompleteStatement from previous import + +## Impact +- Affected specs: cli-imports +- Affected code: + - `pkg/resource/tables/compare.go` (relation comparison) + - `pkg/resource/policies/compare.go` (policy comparison) + - `pkg/state/rpc.go` (RPC state extraction) diff --git a/openspec/changes/archive/2026-02-24-fix-apply-false-changes/specs/cli-imports/spec.md b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/specs/cli-imports/spec.md new file mode 100644 index 00000000..242d9e23 --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/specs/cli-imports/spec.md @@ -0,0 +1,41 @@ +## MODIFIED Requirements + +### Requirement: Relation Comparison Accuracy +The relation comparison SHALL correctly match source and target relations even when the +database contains duplicate FK constraints for the same column or cross-schema FK references. + +#### Scenario: Duplicate FK constraints on same column +- **WHEN** the remote database has two FK constraints for the same source column (e.g., custom-named `fk_mc_division` and default-named `master_creators_division_id_fkey`) +- **AND** the local code has one FK for that column +- **THEN** the matched constraint SHALL be recognized as identical +- **AND** the duplicate constraint SHALL NOT be flagged as a delete + +#### Scenario: Cross-schema FK reference +- **WHEN** the remote database has a FK referencing a table in a different schema (e.g., `public.user_brands.user_id → auth.users.id`) +- **AND** the local code does not represent this FK (because the target table is not in the imported model set) +- **THEN** the cross-schema FK SHALL NOT be flagged as a delete + +#### Scenario: Index creation check +- **WHEN** both local and remote sides have no index for a relation +- **THEN** no index creation SHALL be proposed +- **WHEN** the remote has an index but the local does not +- **THEN** an index creation item SHALL be proposed + +### Requirement: Policy Comparison Accuracy +The policy comparison SHALL match policies by their full identity (schema, table, and name) +rather than by name alone, to prevent cross-table mismatches when multiple tables share +the same policy name. + +#### Scenario: Same-named policies on different tables +- **WHEN** multiple tables have policies with the same name (e.g., "admin full access" on `products` and `product_interaction_performance`) +- **THEN** each policy SHALL be compared only with its corresponding policy on the same table +- **AND** no false table-change diffs SHALL be reported + +### Requirement: RPC Comparison Accuracy +The RPC state extraction SHALL preserve the CompleteStatement from the previous import +rather than rebuilding it from the Go struct template, to avoid format-only differences +between `BuildRpc()` output and `pg_get_functiondef()` output. + +#### Scenario: No code changes after import +- **WHEN** a user runs import and then apply without modifying any RPC code +- **THEN** no RPC updates SHALL be detected diff --git a/openspec/changes/archive/2026-02-24-fix-apply-false-changes/tasks.md b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/tasks.md new file mode 100644 index 00000000..c4850a0f --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-apply-false-changes/tasks.md @@ -0,0 +1,16 @@ +## 1. Investigation +- [x] 1.1 Trace relation false creates/deletes — root cause: duplicate FK constraints + cross-schema FKs +- [x] 1.2 Trace policy false table reassignment — root cause: CompareList maps by name only, not schema+table+name +- [x] 1.3 Trace RPC false updates — root cause: BindRpcFunction overwrites state CompleteStatement with BuildRpc() output + +## 2. Implementation +- [x] 2.1 Add `matchedSourceCols` tracking to skip duplicate FK deletes in `compareRelations` +- [x] 2.2 Add cross-schema FK filter to skip FKs where TargetTableSchema != SourceSchema +- [x] 2.3 Change index creation condition from `t.Index == nil && sc.Index == nil` to `t.Index != nil && sc.Index == nil` +- [x] 2.4 Fix policy `CompareList` to use schema+table+name key instead of name only +- [x] 2.5 Preserve state CompleteStatement in `ExtractRpc` for existing functions + +## 3. Validation +- [x] 3.1 All existing tests pass +- [x] 3.2 Pivot project: `import` shows zero conflicts +- [x] 3.3 Pivot project: `apply --dry-run` shows only genuinely new resources diff --git a/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/proposal.md b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/proposal.md new file mode 100644 index 00000000..5a59565b --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/proposal.md @@ -0,0 +1,11 @@ +# Change: Fix model relation TargetTableName derivation + +## Why +`addModelRelation` in `pkg/state/table.go` derives `TargetTableName` from the struct **field name** (`utils.ToSnakeCase(field.Name)`) instead of the referenced **type name**. When a field name differs from the type name (e.g., field `MasterCreatorBrand` of type `*MasterCreators`), the relation points to a non-existent table (`master_creator_brand` instead of `master_creators`). This causes `validateTableRelations` during `apply` to fail with "target column id is not exist in table master_creator_brand". + +## What Changes +- Fix `addModelRelation` in `pkg/state/table.go` to resolve `TargetTableName` from the field's type name using `findTypeName()`, matching the behavior of `addStateRelation` + +## Impact +- Affected specs: cli-imports +- Affected code: `pkg/state/table.go` diff --git a/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/specs/cli-imports/spec.md b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/specs/cli-imports/spec.md new file mode 100644 index 00000000..9c71240b --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/specs/cli-imports/spec.md @@ -0,0 +1,61 @@ +## MODIFIED Requirements + +### Requirement: Comparison and Diff Checks + +The system SHALL compare remote (Supabase) resources against existing local resources to detect drift, unless `--force` is set. Pointer-typed fields (e.g., `*string`) SHALL be compared by dereferenced value, not by pointer address. Slice fields SHALL be compared by element values, not by slice indices. Relation action comparisons SHALL only flag a conflict when action data is available from both sides, or when running in apply mode. Relation matching SHALL fall back to `schema.table.column` lookup when constraint name lookup fails. Cross-schema FK references SHALL be filtered out before comparison when the target table is not in the local model set. RPC `CompleteStatement` comparison during import SHALL use the stored state value, not the rebuilt value from `BuildRpc()`. Model relation `TargetTableName` SHALL be derived from the referenced type name, not from the struct field name, to ensure it matches the actual database table. + +#### Scenario: Normal comparison +- **WHEN** `--force` is not set and existing resources are present +- **THEN** the system SHALL run comparison checks for types, tables, roles, RPC functions, and storages + +#### Scenario: Comparison error in normal mode +- **WHEN** a comparison detects conflicting changes +- **THEN** the system SHALL return an error and abort the import + +#### Scenario: Comparison error in dry-run mode +- **WHEN** `--dry-run` is set and a comparison detects conflicts +- **THEN** the system SHALL collect the error message without aborting, and report it at the end + +#### Scenario: Skip comparisons with force flag +- **WHEN** `--force` is set +- **THEN** all comparison checks SHALL be skipped and remote state SHALL overwrite local unconditionally + +#### Scenario: Skip comparison when no existing resources +- **WHEN** the Existing set for a resource type is empty (first import or new resource type) +- **THEN** the comparison for that resource type SHALL be skipped + +#### Scenario: No false conflict on identical pointer-typed fields +- **WHEN** a type's `Comment` field has the same string value on both remote and local but different pointer addresses +- **THEN** the comparison SHALL report no conflict + +#### Scenario: No false conflict on identical enum values +- **WHEN** a type's `Enums` slice has the same string values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on identical attribute values +- **WHEN** a type's `Attributes` slice has the same `Name` and `TypeID` values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on missing remote relation action during import +- **WHEN** a relation's remote `Action` is nil (not attached) but local `Action` is populated, and the comparison mode is import +- **THEN** the comparison SHALL NOT flag this as a conflict + +#### Scenario: Flag missing action as conflict in apply mode +- **WHEN** a relation's local `Action` is populated but remote `Action` is nil, and the comparison mode is apply +- **THEN** the comparison SHALL flag this as a conflict for `OnUpdate` and `OnDelete` actions + +#### Scenario: No false conflict on constraint name mismatch +- **WHEN** a relation exists in both remote and local with different constraint names but identical `schema.table.column` reference +- **THEN** the comparison SHALL match them via fallback lookup and report no conflict + +#### Scenario: No false conflict on cross-schema FK references +- **WHEN** a remote table has a FK referencing a table in a different schema (e.g., `auth.users`) that is not in the local model set +- **THEN** the comparison SHALL exclude that relationship before comparison + +#### Scenario: No false conflict on RPC CompleteStatement formatting +- **WHEN** an RPC function's `CompleteStatement` from `pg_get_functiondef()` differs from the `BuildRpc()` rebuilt version only in formatting (param prefix, default quoting, search_path) +- **THEN** the import comparison SHALL use the stored state `CompleteStatement` and report no conflict + +#### Scenario: Correct TargetTableName for model relations +- **WHEN** a model struct field references a type with a different name than the field (e.g., field `MasterCreatorBrand` of type `*MasterCreators`) +- **THEN** the relation `TargetTableName` SHALL be derived from the type name (`master_creators`), not the field name (`master_creator_brand`) diff --git a/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/tasks.md b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/tasks.md new file mode 100644 index 00000000..ff451647 --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-model-relation-target-table/tasks.md @@ -0,0 +1,7 @@ +## 1. Implementation +- [x] 1.1 Fix `addModelRelation` to use `raiden.GetTableName()` on the field's type for `TargetTableName` instead of `field.Name` + +## 2. Testing +- [x] 2.1 Run state tests (`go test ./pkg/state/`) +- [x] 2.2 Run resource tests (`go test ./pkg/resource/...`) +- [x] 2.3 Validate in pivot project with `apply --dry-run` diff --git a/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/proposal.md b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/proposal.md new file mode 100644 index 00000000..71d7714d --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/proposal.md @@ -0,0 +1,31 @@ +# Change: Fix false-positive conflicts during import comparison + +## Why +The import comparison logic produces false-positive conflicts that block `raiden imports` after a clean `--force` import across all three resource comparison subsystems: + +1. **Type comparison**: Compares `*string` pointer addresses instead of values for `Comment` fields; iterates slice indices instead of values for `Enums` and `Attributes`. +2. **Table relation comparison**: Constraint name mismatch between remote (real DB names like `fk_mc_division`) and local (generated names like `public_table_col_fkey`) causes relations to be flagged as new. Missing remote `Action` data treated as conflict during import. Cross-schema FK references (e.g., `auth.users`) not filtered before comparison. +3. **RPC function comparison**: `BindRpcFunction` overwrites state's stored `CompleteStatement` with rebuilt one from `BuildRpc()`, which differs from `pg_get_functiondef()` in parameter prefix (`in_`), default value quoting (`'null'::uuid` vs `null::uuid`), and `search_path` inclusion. + +## What Changes +### Type comparison (`pkg/resource/types/compare.go`) +- Fix `Comment` pointer comparison to dereference and compare string values +- Remove duplicate `Comment` comparison block +- Fix `Enums` and `Attributes` range loops to iterate values instead of indices + +### Table relation comparison (`pkg/resource/tables/compare.go`) +- Add secondary index `mapTargetByCol` keyed by `schema.table.column` for fallback relation matching when constraint name lookup fails +- Guard relation `Action` nil check to only flag as conflict in apply mode, not import mode + +### Table state extraction (`pkg/state/table.go`) +- Add fallback relation lookup by `SourceTableName+SourceColumnName` in `buildTableRelation` when constraint name lookup fails + +### Cross-schema relation filtering (`pkg/resource/import.go`) +- Filter remote relationships to exclude references to tables not in the local model set before comparison, mirroring generator behavior that skips cross-schema FKs + +### RPC comparison (`pkg/resource/import.go`) +- Restore state's stored `CompleteStatement` before comparison instead of using rebuilt one from `BuildRpc()`, so import only detects real remote changes + +## Impact +- Affected specs: cli-imports +- Affected code: `pkg/resource/types/compare.go`, `pkg/resource/tables/compare.go`, `pkg/state/table.go`, `pkg/resource/import.go` diff --git a/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/specs/cli-imports/spec.md b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/specs/cli-imports/spec.md new file mode 100644 index 00000000..de422c39 --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/specs/cli-imports/spec.md @@ -0,0 +1,57 @@ +## MODIFIED Requirements + +### Requirement: Comparison and Diff Checks + +The system SHALL compare remote (Supabase) resources against existing local resources to detect drift, unless `--force` is set. Pointer-typed fields (e.g., `*string`) SHALL be compared by dereferenced value, not by pointer address. Slice fields SHALL be compared by element values, not by slice indices. Relation action comparisons SHALL only flag a conflict when action data is available from both sides, or when running in apply mode. Relation matching SHALL fall back to `schema.table.column` lookup when constraint name lookup fails. Cross-schema FK references SHALL be filtered out before comparison when the target table is not in the local model set. RPC `CompleteStatement` comparison during import SHALL use the stored state value, not the rebuilt value from `BuildRpc()`. + +#### Scenario: Normal comparison +- **WHEN** `--force` is not set and existing resources are present +- **THEN** the system SHALL run comparison checks for types, tables, roles, RPC functions, and storages + +#### Scenario: Comparison error in normal mode +- **WHEN** a comparison detects conflicting changes +- **THEN** the system SHALL return an error and abort the import + +#### Scenario: Comparison error in dry-run mode +- **WHEN** `--dry-run` is set and a comparison detects conflicts +- **THEN** the system SHALL collect the error message without aborting, and report it at the end + +#### Scenario: Skip comparisons with force flag +- **WHEN** `--force` is set +- **THEN** all comparison checks SHALL be skipped and remote state SHALL overwrite local unconditionally + +#### Scenario: Skip comparison when no existing resources +- **WHEN** the Existing set for a resource type is empty (first import or new resource type) +- **THEN** the comparison for that resource type SHALL be skipped + +#### Scenario: No false conflict on identical pointer-typed fields +- **WHEN** a type's `Comment` field has the same string value on both remote and local but different pointer addresses +- **THEN** the comparison SHALL report no conflict + +#### Scenario: No false conflict on identical enum values +- **WHEN** a type's `Enums` slice has the same string values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on identical attribute values +- **WHEN** a type's `Attributes` slice has the same `Name` and `TypeID` values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on missing remote relation action during import +- **WHEN** a relation's remote `Action` is nil (not attached) but local `Action` is populated, and the comparison mode is import +- **THEN** the comparison SHALL NOT flag this as a conflict + +#### Scenario: Flag missing action as conflict in apply mode +- **WHEN** a relation's local `Action` is populated but remote `Action` is nil, and the comparison mode is apply +- **THEN** the comparison SHALL flag this as a conflict for `OnUpdate` and `OnDelete` actions + +#### Scenario: No false conflict on constraint name mismatch +- **WHEN** a relation exists in both remote and local with different constraint names but identical `schema.table.column` reference +- **THEN** the comparison SHALL match them via fallback lookup and report no conflict + +#### Scenario: No false conflict on cross-schema FK references +- **WHEN** a remote table has a FK referencing a table in a different schema (e.g., `auth.users`) that is not in the local model set +- **THEN** the comparison SHALL exclude that relationship before comparison + +#### Scenario: No false conflict on RPC CompleteStatement formatting +- **WHEN** an RPC function's `CompleteStatement` from `pg_get_functiondef()` differs from the `BuildRpc()` rebuilt version only in formatting (param prefix, default quoting, search_path) +- **THEN** the import comparison SHALL use the stored state `CompleteStatement` and report no conflict diff --git a/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/tasks.md b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/tasks.md new file mode 100644 index 00000000..1bbbe97f --- /dev/null +++ b/openspec/changes/archive/2026-02-24-fix-type-comparison-false-conflicts/tasks.md @@ -0,0 +1,22 @@ +## 1. Type comparison fixes +- [x] 1.1 Fix `Comment` pointer comparison in `CompareItem()` to dereference `*string` values +- [x] 1.2 Remove duplicate `Comment` comparison block (lines 106-115) +- [x] 1.3 Fix `Enums` range loop to iterate values (`for _, se := range`) instead of indices +- [x] 1.4 Fix `Attributes` range loop to iterate values and compare `Name`/`TypeID` fields + +## 2. Table relation comparison fixes +- [x] 2.1 Add `mapTargetByCol` secondary index in `compareRelations()` for fallback matching by `schema.table.column` +- [x] 2.2 Guard relation `Action` nil check to only flag diff in apply mode (not import) +- [x] 2.3 Add fallback relation lookup in `buildTableRelation()` by `SourceTableName+SourceColumnName` +- [x] 2.4 Add cross-schema relation filtering in `compareTables()` to exclude FKs referencing tables not in local model set + +## 3. RPC comparison fix +- [x] 3.1 Restore state `CompleteStatement` in `importJob.compareRpc()` before comparison instead of using rebuilt value from `BuildRpc()` + +## 4. Testing +- [x] 4.1 Run existing type comparison tests (`go test ./pkg/resource/types/`) +- [x] 4.2 Run table comparison tests (`go test ./pkg/resource/tables/`) +- [x] 4.3 Run state tests (`go test ./pkg/state/`) +- [x] 4.4 Run import tests (`go test ./pkg/resource/`) +- [x] 4.5 Run RPC tests (`go test ./pkg/resource/rpc/`) +- [x] 4.6 Validate in pivot project — zero false-positive conflicts diff --git a/openspec/specs/cli-imports/spec.md b/openspec/specs/cli-imports/spec.md new file mode 100644 index 00000000..a8062600 --- /dev/null +++ b/openspec/specs/cli-imports/spec.md @@ -0,0 +1,423 @@ +# CLI Imports + +## Purpose + +The `raiden imports` CLI command synchronises a local Raiden project with a remote Supabase database by fetching resources (tables, roles, RPC functions, storage buckets, custom types, and policies), comparing them against the local state, and generating Go source files that represent those resources as code. It uses a two-stage build approach: a temporary import binary is code-generated with the user's registered bootstrap code, compiled, and executed to perform the actual import. + +**Key files:** `cmd/raiden/commands/import.go`, `pkg/cli/imports/command.go`, `pkg/resource/import.go`, `pkg/resource/load.go`, `pkg/resource/common.go`, `pkg/generator/import.go`, `pkg/state/state.go` +## Requirements +### Requirement: CLI Command Registration + +The system SHALL register an `imports` cobra subcommand on the root CLI that accepts resource selection flags, behaviour flags, and debug flags. The command SHALL run a `PreRun` check to verify that `configs/app.yaml` exists before proceeding. + +#### Scenario: Command invocation with valid config +- **WHEN** the user runs `raiden imports` +- **THEN** the system SHALL load the project configuration from `configs/app.yaml` and proceed with the import workflow + +#### Scenario: Command invocation without config file +- **WHEN** the user runs `raiden imports` and `configs/app.yaml` does not exist +- **THEN** the system SHALL return an error: "missing config file (./configs/app.yaml), run `raiden configure` first for generate configuration file" + +#### Scenario: Version check on invocation +- **WHEN** the user runs `raiden imports` +- **THEN** the system SHALL check for CLI updates before proceeding, and if an update is available and applied, SHALL exit with code 0 + +### Requirement: Resource Selection Flags + +The system SHALL support flags to selectively import specific resource types. When no resource-specific flag is provided, all resource types SHALL be imported. + +#### Scenario: Import all resources (default) +- **WHEN** the user runs `raiden imports` without `--models-only`, `--rpc-only`, `--roles-only`, `--storages-only`, or `--policy-only` +- **THEN** the system SHALL import tables, roles, RPC functions, storage buckets, types, and policies + +#### Scenario: Import models only +- **WHEN** the user runs `raiden imports --models-only` +- **THEN** the system SHALL import only tables/models and their associated types + +#### Scenario: Import RPC only +- **WHEN** the user runs `raiden imports --rpc-only` +- **THEN** the system SHALL import only RPC functions (and load types and tables for parameter resolution) + +#### Scenario: Import roles only +- **WHEN** the user runs `raiden imports --roles-only` +- **THEN** the system SHALL import only user-defined roles + +#### Scenario: Import storages only +- **WHEN** the user runs `raiden imports --storages-only` +- **THEN** the system SHALL import only storage buckets + +#### Scenario: Import policies only +- **WHEN** the user runs `raiden imports --policy-only` +- **THEN** the system SHALL import only policies + +### Requirement: Schema Filtering + +The system SHALL filter tables and functions by the schemas specified via `--schema`. When no schema is specified, the system SHALL default to `public`. + +#### Scenario: Default schema filter +- **WHEN** the user runs `raiden imports` without `--schema` +- **THEN** only tables and functions in the `public` schema SHALL be included + +#### Scenario: Multiple schema filter +- **WHEN** the user runs `raiden imports --schema auth,public,storage` +- **THEN** tables and functions in the `auth`, `public`, and `storage` schemas SHALL be included, and all others SHALL be excluded + +#### Scenario: Relation validation during filtering +- **WHEN** a table has relationships referencing tables outside the imported schema set +- **THEN** the system SHALL log a debug warning identifying the missing relation target but SHALL NOT fail the import + +### Requirement: Allowed Tables Filter (BFF Mode) + +When running in BFF mode with `config.AllowedTables` set to a value other than `"*"`, the system SHALL further restrict imported tables to only those listed in `AllowedTables`. Relationships referencing tables not in the allowed list SHALL be removed. + +#### Scenario: BFF mode with restricted tables +- **WHEN** the config mode is `bff` and `AllowedTables` is `"users,orders"` +- **THEN** only the `users` and `orders` tables SHALL be imported, and relationships to other tables SHALL be stripped + +#### Scenario: BFF mode with wildcard +- **WHEN** the config mode is `bff` and `AllowedTables` is `"*"` +- **THEN** all tables matching the schema filter SHALL be imported without further restriction + +### Requirement: Two-Stage Build Execution + +The system SHALL use a two-stage approach: first code-generate a `cmd/import/main.go` binary that embeds the user's registered models, then compile and execute that binary as a subprocess to perform the actual import. + +#### Scenario: Binary compilation +- **WHEN** `imports.Run()` is called +- **THEN** the system SHALL execute `go build -o build/import cmd/import/main.go`, deleting any previously built binary first + +#### Scenario: Binary execution with flags +- **WHEN** the import binary is compiled successfully +- **THEN** the system SHALL execute `build/import` with the appropriate flags forwarded (e.g., `--models-only`, `--schema`, `--force`, `--dry-run`, `--debug`) + +#### Scenario: Windows platform support +- **WHEN** the target OS is `windows` +- **THEN** the output binary path SHALL have a `.exe` extension + +#### Scenario: Build failure +- **WHEN** `go build` fails (e.g., syntax error in generated code) +- **THEN** the system SHALL return an error: "error building binary: {details}" + +### Requirement: Generated Import Binary Bootstrap + +The code-generated `cmd/import/main.go` SHALL register the user's application resources (models, types, and optionally RPC, roles, storages in BFF mode) via `bootstrap.Register*()` calls, then run a pre-generate pass, call `resource.Import()`, and run a post-generate pass to refresh bootstrap files. + +#### Scenario: BFF mode bootstrap +- **WHEN** the config mode is `bff` +- **THEN** the generated binary SHALL call `bootstrap.RegisterModels()`, `bootstrap.RegisterTypes()`, `bootstrap.RegisterRpc()`, `bootstrap.RegisterRoles()`, and `bootstrap.RegisterStorages()` + +#### Scenario: Service mode bootstrap +- **WHEN** the config mode is not `bff` +- **THEN** the generated binary SHALL call only `bootstrap.RegisterModels()` and `bootstrap.RegisterTypes()` + +#### Scenario: Post-import regeneration +- **WHEN** the import completes successfully and `--dry-run` is not set +- **THEN** the generated binary SHALL run `generate.Run()` a second time to regenerate bootstrap files reflecting newly imported resources + +### Requirement: Concurrent Remote Resource Loading + +The `Load()` function SHALL fetch all required resources from the remote Supabase database concurrently using goroutines. Results SHALL be collected through a typed channel. + +#### Scenario: Concurrent fetch in BFF mode +- **WHEN** `Load()` is called in BFF mode with all resources +- **THEN** tables, roles, role memberships, functions, storages, indexes, relation actions, policies, and types SHALL be fetched concurrently via `pkg/supabase` API calls + +#### Scenario: Concurrent fetch in Service mode +- **WHEN** `Load()` is called in Service mode +- **THEN** tables, functions, indexes, relation actions, and types SHALL be fetched from PgMeta (`pkg/connector/pgmeta`), and roles from the Supabase API + +#### Scenario: Fetch error propagation +- **WHEN** any resource fetch goroutine encounters an error +- **THEN** the error SHALL be sent through the channel and `Load()` SHALL return that error immediately + +### Requirement: Post-Load Resource Enrichment + +After loading remote resources, the system SHALL attach additional metadata to the raw resource data. + +#### Scenario: Table enrichment +- **WHEN** tables are loaded +- **THEN** `tables.AttachIndexAndAction()` SHALL attach indexes and relationship actions to their respective tables + +#### Scenario: Role enrichment +- **WHEN** roles are loaded +- **THEN** `roles.AttachInherithRole()` SHALL attach inherited role memberships to each role, using the native role map to resolve references + +### Requirement: Native Role Handling + +The system SHALL maintain a map of built-in PostgreSQL/Supabase roles (e.g., `postgres`, `supabase_admin`, `pg_*`) and use it to separate native roles from user-defined roles. + +#### Scenario: Native role exclusion from import +- **WHEN** remote roles are loaded +- **THEN** native roles SHALL be excluded from the importable roles list (only user-defined roles are imported as code) + +#### Scenario: Native role state tracking +- **WHEN** remote roles include native roles +- **THEN** native roles SHALL be recorded in the import state for reference but SHALL NOT be code-generated + +### Requirement: Local State Management + +The system SHALL persist import state in the `.raiden/` directory using Go's GOB binary encoding. The state tracks previously imported tables, roles, RPC functions, storage buckets, and types. + +#### Scenario: First import (no existing state) +- **WHEN** no `.raiden/` state exists +- **THEN** all remote resources SHALL be treated as new and code-generated + +#### Scenario: Subsequent import +- **WHEN** `.raiden/` state exists from a prior import +- **THEN** the system SHALL load the state, extract registered resources, and classify each as New or Existing + +#### Scenario: State update during generation +- **WHEN** a resource file is generated +- **THEN** the resource metadata and output path SHALL be sent through `stateChan` to `UpdateLocalStateFromImport()`, which updates the `LocalState` in real-time + +#### Scenario: State persistence +- **WHEN** all code generation is complete (the `stateChan` channel closes) +- **THEN** `LocalState.Persist()` SHALL write the updated state to `.raiden/` + +### Requirement: Resource Extraction and Classification + +The system SHALL compare the local state against currently registered Go resources (from `bootstrap.Register*()`) and classify each resource as either New (in state but not registered) or Existing (in both state and code). + +#### Scenario: Extract tables +- **WHEN** `--models-only` or all resources are being imported +- **THEN** `state.ExtractTable()` SHALL classify tables into New and Existing sets + +#### Scenario: Extract roles +- **WHEN** `--roles-only` or all resources are being imported +- **THEN** `state.ExtractRole()` SHALL classify roles into New and Existing sets + +#### Scenario: Extract RPC functions +- **WHEN** `--rpc-only` or all resources are being imported +- **THEN** `state.ExtractRpc()` SHALL classify RPC functions into New and Existing sets + +#### Scenario: Extract storages +- **WHEN** `--storages-only` or all resources are being imported +- **THEN** `state.ExtractStorage()` SHALL classify storage buckets into New and Existing sets + +### Requirement: Validation Tag Preservation + +When importing models, the system SHALL preserve existing model validation tags (e.g., `validate:"required"`) from both New and Existing table extractions so that regeneration does not lose user-defined validation constraints. + +#### Scenario: Preserve validation tags during reimport +- **WHEN** an existing model has validation tags defined +- **THEN** the regenerated model file SHALL include those same validation tags + +### Requirement: Comparison and Diff Checks + +The system SHALL compare remote (Supabase) resources against existing local resources to detect drift, unless `--force` is set. Pointer-typed fields (e.g., `*string`) SHALL be compared by dereferenced value, not by pointer address. Slice fields SHALL be compared by element values, not by slice indices. Relation action comparisons SHALL only flag a conflict when action data is available from both sides, or when running in apply mode. Relation matching SHALL fall back to `schema.table.column` lookup when constraint name lookup fails. Cross-schema FK references SHALL be filtered out before comparison when the target table is not in the local model set. RPC `CompleteStatement` comparison during import SHALL use the stored state value, not the rebuilt value from `BuildRpc()`. Model relation `TargetTableName` SHALL be derived from the referenced type name, not from the struct field name, to ensure it matches the actual database table. + +#### Scenario: Normal comparison +- **WHEN** `--force` is not set and existing resources are present +- **THEN** the system SHALL run comparison checks for types, tables, roles, RPC functions, and storages + +#### Scenario: Comparison error in normal mode +- **WHEN** a comparison detects conflicting changes +- **THEN** the system SHALL return an error and abort the import + +#### Scenario: Comparison error in dry-run mode +- **WHEN** `--dry-run` is set and a comparison detects conflicts +- **THEN** the system SHALL collect the error message without aborting, and report it at the end + +#### Scenario: Skip comparisons with force flag +- **WHEN** `--force` is set +- **THEN** all comparison checks SHALL be skipped and remote state SHALL overwrite local unconditionally + +#### Scenario: Skip comparison when no existing resources +- **WHEN** the Existing set for a resource type is empty (first import or new resource type) +- **THEN** the comparison for that resource type SHALL be skipped + +#### Scenario: No false conflict on identical pointer-typed fields +- **WHEN** a type's `Comment` field has the same string value on both remote and local but different pointer addresses +- **THEN** the comparison SHALL report no conflict + +#### Scenario: No false conflict on identical enum values +- **WHEN** a type's `Enums` slice has the same string values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on identical attribute values +- **WHEN** a type's `Attributes` slice has the same `Name` and `TypeID` values on both remote and local +- **THEN** the comparison SHALL report no conflict regardless of slice allocation + +#### Scenario: No false conflict on missing remote relation action during import +- **WHEN** a relation's remote `Action` is nil (not attached) but local `Action` is populated, and the comparison mode is import +- **THEN** the comparison SHALL NOT flag this as a conflict + +#### Scenario: Flag missing action as conflict in apply mode +- **WHEN** a relation's local `Action` is populated but remote `Action` is nil, and the comparison mode is apply +- **THEN** the comparison SHALL flag this as a conflict for `OnUpdate` and `OnDelete` actions + +#### Scenario: No false conflict on constraint name mismatch +- **WHEN** a relation exists in both remote and local with different constraint names but identical `schema.table.column` reference +- **THEN** the comparison SHALL match them via fallback lookup and report no conflict + +#### Scenario: No false conflict on cross-schema FK references +- **WHEN** a remote table has a FK referencing a table in a different schema (e.g., `auth.users`) that is not in the local model set +- **THEN** the comparison SHALL exclude that relationship before comparison + +#### Scenario: No false conflict on RPC CompleteStatement formatting +- **WHEN** an RPC function's `CompleteStatement` from `pg_get_functiondef()` differs from the `BuildRpc()` rebuilt version only in formatting (param prefix, default quoting, search_path) +- **THEN** the import comparison SHALL use the stored state `CompleteStatement` and report no conflict + +#### Scenario: Correct TargetTableName for model relations +- **WHEN** a model struct field references a type with a different name than the field (e.g., field `MasterCreatorBrand` of type `*MasterCreators`) +- **THEN** the relation `TargetTableName` SHALL be derived from the type name (`master_creators`), not the field name (`master_creator_brand`) + +### Requirement: Import Report + +The system SHALL compute and display a report showing the count of new resources added per type (Table, Role, Rpc, Storage, Types, Policies). + +#### Scenario: Report after successful import +- **WHEN** the import generates new resources +- **THEN** the system SHALL log: "import process is complete, adding several new resources to the codebase" with counts for each resource type + +#### Scenario: Report when no changes +- **WHEN** the import finds no new resources +- **THEN** the system SHALL log: "import process is complete, your code is up to date" + +#### Scenario: Dry-run report with no errors +- **WHEN** `--dry-run` completes without comparison errors +- **THEN** the system SHALL log: "finish running import in dry run mode" with resource counts + +#### Scenario: Dry-run report with errors +- **WHEN** `--dry-run` completes with comparison errors +- **THEN** the system SHALL log the collected errors and skip the resource count report + +#### Scenario: Report printed exactly once +- **WHEN** the import workflow completes +- **THEN** the report SHALL be printed exactly once (guarded by `reportPrinted` flag) + +### Requirement: Output Handling Modes + +The system SHALL support three output modes based on flags: normal (generate code), dry-run (report only), and update-state-only (update `.raiden/` without generating code). + +#### Scenario: Normal output (code generation) +- **WHEN** neither `--dry-run` nor `--update-state-only` is set +- **THEN** the system SHALL run `generateImportResource()` to create Go source files and update state + +#### Scenario: Dry-run output +- **WHEN** `--dry-run` is set +- **THEN** the system SHALL NOT write any files; it SHALL only print the report or collected errors + +#### Scenario: Update state only +- **WHEN** `--update-state-only` is set +- **THEN** the system SHALL update `.raiden/` state with the remote resource data without generating Go source files + +### Requirement: Concurrent Code Generation + +The `generateImportResource()` function SHALL generate all resource types concurrently within a single goroutine group, sending state updates through a channel as each file is generated. + +#### Scenario: Generate types +- **WHEN** custom PostgreSQL types exist in the remote resource +- **THEN** `generator.GenerateTypes()` SHALL produce type files under `internal/types/` + +#### Scenario: Generate models +- **WHEN** tables exist in the remote resource +- **THEN** `generator.GenerateModels()` SHALL produce model structs under `internal/models/` with column tags, join tags, and preserved validation tags + +#### Scenario: Generate REST controllers (optional) +- **WHEN** `--generate-controller` is set and config mode is `bff` +- **THEN** `generator.GenerateRestControllers()` SHALL produce controller stubs for each imported table + +#### Scenario: Generate roles +- **WHEN** user-defined roles exist in the remote resource +- **THEN** `generator.GenerateRoles()` SHALL produce role definitions under `internal/roles/` + +#### Scenario: Generate RPC functions +- **WHEN** functions exist in the remote resource +- **THEN** `generator.GenerateRpc()` SHALL produce RPC wrappers under `internal/rpc/` + +#### Scenario: Generate storages +- **WHEN** storage buckets exist in the remote resource +- **THEN** `generator.GenerateStorages()` SHALL produce storage definitions under `internal/storages/` + +#### Scenario: Generation error handling +- **WHEN** any generator function returns an error +- **THEN** the error SHALL be sent through `errChan` and the import SHALL return that error + +### Requirement: Import Binary Code Generation + +The system SHALL generate a `cmd/import/main.go` file from a Go template that embeds the user's project module name and bootstrap imports. + +#### Scenario: Template rendering +- **WHEN** `GenerateImportMainFunction()` is called +- **THEN** it SHALL create `cmd/import/main.go` with the correct package imports, bootstrap registration calls, and cobra command setup + +#### Scenario: Directory creation +- **WHEN** the `cmd/` or `cmd/import/` directories do not exist +- **THEN** the system SHALL create them before writing the file + +#### Scenario: Import paths +- **WHEN** the template is rendered +- **THEN** it SHALL include imports for `raiden`, `generate`, `imports`, `resource`, `utils`, `cobra`, and the user's `internal/bootstrap` package + +### Requirement: Dependency Injection for Testability + +The `importJob` struct SHALL use an `importDeps` struct that holds function references for all external dependencies (loading, comparing, generating, reporting). This allows tests to stub individual phases. + +#### Scenario: Default dependencies +- **WHEN** `Import()` is called in production +- **THEN** `defaultImportDeps` SHALL be used, wiring real implementations for all phases + +#### Scenario: Test dependencies +- **WHEN** `runImport()` is called in tests +- **THEN** custom `importDeps` MAY be provided to stub any phase (e.g., `loadRemote`, `compareTables`) + +### Requirement: Panic Recovery + +The `runImport()` function SHALL include a deferred panic recovery that converts any panic into a returned error. + +#### Scenario: Panic during import +- **WHEN** any phase panics +- **THEN** the system SHALL recover and return an error: "import panic: {details}" + +#### Scenario: Deferred report on success +- **WHEN** the import completes without error and the report has been computed but not yet printed +- **THEN** the deferred function SHALL print the report before returning + +### Requirement: Pre-Import Generation Pass + +Before the core import logic runs, the system SHALL execute `generate.Run()` to refresh internal bootstrap artifacts. This ensures the import binary has up-to-date type registrations. + +#### Scenario: Bootstrap refresh before import +- **WHEN** the CLI command runs the import workflow +- **THEN** `generate.Run()` SHALL be called before `imports.Run()` to regenerate route files and bootstrap registrations + +#### Scenario: Generation failure before import +- **WHEN** `generate.Run()` fails before the import +- **THEN** the system SHALL log the error and abort without proceeding to the import + + +### Requirement: Apply Relation Comparison Accuracy + +The relation comparison in apply mode SHALL correctly handle duplicate FK constraints and cross-schema FK references without producing false-change detections. + +#### Scenario: Duplicate FK constraints on same column +- **WHEN** the remote database has two FK constraints for the same source column (e.g., custom-named and default-named) +- **AND** the local code has one FK for that column +- **THEN** the duplicate constraint SHALL NOT be flagged as a delete + +#### Scenario: Cross-schema FK reference +- **WHEN** the remote database has a FK referencing a table in a different schema (e.g., `auth.users`) +- **AND** the local code does not represent this FK +- **THEN** the cross-schema FK SHALL NOT be flagged as a delete + +#### Scenario: Index creation when both sides lack indexes +- **WHEN** both local and remote sides have no index for a relation +- **THEN** no index creation SHALL be proposed + +### Requirement: Apply Policy Comparison Accuracy + +The policy comparison SHALL match policies by their full identity (schema, table, and name) rather than by name alone. + +#### Scenario: Same-named policies on different tables +- **WHEN** multiple tables have policies with the same name +- **THEN** each policy SHALL be compared only with its corresponding policy on the same table +- **AND** no false table-change diffs SHALL be reported + +### Requirement: Apply RPC Comparison Accuracy + +The RPC state extraction SHALL preserve the stored CompleteStatement from the previous import to avoid format-only differences. + +#### Scenario: No code changes after import +- **WHEN** a user runs import and then apply without modifying any RPC code +- **THEN** no RPC updates SHALL be detected diff --git a/pkg/resource/import.go b/pkg/resource/import.go index 24c95835..f610fd3c 100644 --- a/pkg/resource/import.go +++ b/pkg/resource/import.go @@ -332,7 +332,32 @@ func (j *importJob) compareTables() error { compareTables = append(compareTables, j.appTables.Existing[i].Table) } - if err := j.deps.compareTables(j.resource.Tables, compareTables); err != nil { + // Build set of locally-known tables so we can filter out remote + // relationships that reference tables not in the local model set + // (e.g., cross-schema FKs to auth.users). The generator already + // skips these when creating join tags, so the comparison should too. + localTableSet := make(map[string]bool, len(compareTables)) + for _, t := range compareTables { + localTableSet[fmt.Sprintf("%s.%s", t.Schema, t.Name)] = true + } + remoteTables := make([]objects.Table, len(j.resource.Tables)) + copy(remoteTables, j.resource.Tables) + for i := range remoteTables { + if len(remoteTables[i].Relationships) == 0 { + continue + } + filtered := make([]objects.TablesRelationship, 0, len(remoteTables[i].Relationships)) + for _, r := range remoteTables[i].Relationships { + targetKey := fmt.Sprintf("%s.%s", r.TargetTableSchema, r.TargetTableName) + sourceKey := fmt.Sprintf("%s.%s", r.SourceSchema, r.SourceTableName) + if localTableSet[targetKey] && localTableSet[sourceKey] { + filtered = append(filtered, r) + } + } + remoteTables[i].Relationships = filtered + } + + if err := j.deps.compareTables(remoteTables, compareTables); err != nil { if j.flags.DryRun { j.dryRunErrors = append(j.dryRunErrors, err.Error()) return nil @@ -374,6 +399,24 @@ func (j *importJob) compareRpc() error { if !j.flags.DryRun { ImportLogger.Debug("start compare rpc") } + + // Restore state CompleteStatement for import comparison. + // BindRpcFunction rebuilds CompleteStatement from the Go struct template which + // may differ in formatting from pg_get_functiondef() (param prefix, default + // quoting, search_path inclusion). Using the stored state value (captured from + // the last import) ensures we only flag real remote changes as conflicts. + mapStateCS := make(map[string]string) + for _, rs := range j.localState.Rpc { + if rs.Function.CompleteStatement != "" { + mapStateCS[rs.Function.Name] = rs.Function.CompleteStatement + } + } + for i := range j.appRpcFunctions.Existing { + if cs, ok := mapStateCS[j.appRpcFunctions.Existing[i].Name]; ok { + j.appRpcFunctions.Existing[i].CompleteStatement = cs + } + } + if err := j.deps.compareRpc(j.resource.Functions, j.appRpcFunctions.Existing); err != nil { if j.flags.DryRun { j.dryRunErrors = append(j.dryRunErrors, err.Error()) diff --git a/pkg/resource/import_internal_test.go b/pkg/resource/import_internal_test.go index ecbc8e42..4099dfd5 100644 --- a/pkg/resource/import_internal_test.go +++ b/pkg/resource/import_internal_test.go @@ -303,3 +303,104 @@ func TestRunImport_CompareTablesError(t *testing.T) { err := runImport(flags, &raiden.Config{}, deps) require.Equal(t, expectedErr, err) } + +func TestRunImport_CrossSchemaRelationsFiltered(t *testing.T) { + flags := &Flags{AllowedSchema: "public"} + + var capturedRemote []objects.Table + deps := importDeps{ + loadNativeRoles: func() (map[string]raiden.Role, error) { return map[string]raiden.Role{}, nil }, + loadRemote: func(*Flags, *raiden.Config) (*Resource, error) { + return &Resource{ + Tables: []objects.Table{ + { + Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + {ConstraintName: "fk1", SourceSchema: "public", SourceTableName: "t1", TargetTableSchema: "public", TargetTableName: "t2"}, + {ConstraintName: "fk_cross", SourceSchema: "public", SourceTableName: "t1", TargetTableSchema: "auth", TargetTableName: "users"}, + }, + }, + {Name: "t2", Schema: "public"}, + }, + }, nil + }, + loadState: func() (*state.State, error) { return &state.State{}, nil }, + extractApp: func(*Flags, *state.State) (state.ExtractTableResult, state.ExtractRoleResult, state.ExtractRpcResult, state.ExtractStorageResult, state.ExtractTypeResult, error) { + return state.ExtractTableResult{ + Existing: state.ExtractTableItems{ + {Table: objects.Table{Name: "t1", Schema: "public"}}, + {Table: objects.Table{Name: "t2", Schema: "public"}}, + }, + }, state.ExtractRoleResult{}, state.ExtractRpcResult{}, state.ExtractStorageResult{}, state.ExtractTypeResult{}, nil + }, + compareTypes: func([]objects.Type, []objects.Type) error { return nil }, + compareTables: func(remote []objects.Table, local []objects.Table) error { + capturedRemote = remote + return nil + }, + compareRoles: func([]objects.Role, []objects.Role) error { return nil }, + compareRpc: func([]objects.Function, []objects.Function) error { return nil }, + compareStorages: func([]objects.Bucket, []objects.Bucket) error { return nil }, + updateStateOnly: func(*state.LocalState, *Resource, map[string]state.ModelValidationTag) error { return nil }, + generate: func(*raiden.Config, *state.LocalState, string, *Resource, map[string]state.ModelValidationTag, bool) error { + return nil + }, + printReport: func(ImportReport, bool) {}, + } + + err := runImport(flags, &raiden.Config{Mode: raiden.BffMode, AllowedTables: "*", ProjectName: "proj"}, deps) + require.NoError(t, err) + + // The cross-schema FK (auth.users) should have been filtered out. + for _, tbl := range capturedRemote { + if tbl.Name == "t1" { + assert.Len(t, tbl.Relationships, 1, "should only have the intra-schema FK") + assert.Equal(t, "fk1", tbl.Relationships[0].ConstraintName) + } + } +} + +func TestRunImport_RpcStateCompleteStatementRestored(t *testing.T) { + flags := &Flags{AllowedSchema: "public"} + + var capturedLocal []objects.Function + deps := importDeps{ + loadNativeRoles: func() (map[string]raiden.Role, error) { return map[string]raiden.Role{}, nil }, + loadRemote: func(*Flags, *raiden.Config) (*Resource, error) { + return &Resource{ + Functions: []objects.Function{{Name: "my_rpc", Schema: "public", CompleteStatement: "remote_cs"}}, + }, nil + }, + loadState: func() (*state.State, error) { + return &state.State{ + Rpc: []state.RpcState{{Function: objects.Function{Name: "my_rpc", CompleteStatement: "state_cs"}}}, + }, nil + }, + extractApp: func(*Flags, *state.State) (state.ExtractTableResult, state.ExtractRoleResult, state.ExtractRpcResult, state.ExtractStorageResult, state.ExtractTypeResult, error) { + return state.ExtractTableResult{}, + state.ExtractRoleResult{}, + state.ExtractRpcResult{Existing: []objects.Function{{Name: "my_rpc", CompleteStatement: "rebuilt_cs"}}}, + state.ExtractStorageResult{}, state.ExtractTypeResult{}, nil + }, + compareTypes: func([]objects.Type, []objects.Type) error { return nil }, + compareTables: func([]objects.Table, []objects.Table) error { return nil }, + compareRoles: func([]objects.Role, []objects.Role) error { return nil }, + compareRpc: func(remote []objects.Function, local []objects.Function) error { + capturedLocal = local + return nil + }, + compareStorages: func([]objects.Bucket, []objects.Bucket) error { return nil }, + updateStateOnly: func(*state.LocalState, *Resource, map[string]state.ModelValidationTag) error { return nil }, + generate: func(*raiden.Config, *state.LocalState, string, *Resource, map[string]state.ModelValidationTag, bool) error { + return nil + }, + printReport: func(ImportReport, bool) {}, + } + + err := runImport(flags, &raiden.Config{Mode: raiden.BffMode, AllowedTables: "*", ProjectName: "proj"}, deps) + require.NoError(t, err) + + // CompleteStatement should have been restored from state, not the rebuilt value. + require.Len(t, capturedLocal, 1) + assert.Equal(t, "state_cs", capturedLocal[0].CompleteStatement) +} diff --git a/pkg/resource/policies/compare.go b/pkg/resource/policies/compare.go index 85480ea2..f320ba9f 100644 --- a/pkg/resource/policies/compare.go +++ b/pkg/resource/policies/compare.go @@ -27,12 +27,12 @@ func CompareList(sourcePolicies, targetPolicies []objects.Policy) (diffResult [] mapTargetPolicies := make(map[string]objects.Policy) for i := range targetPolicies { r := targetPolicies[i] - mapTargetPolicies[strings.ToLower(r.Name)] = r + mapTargetPolicies[comparePolicyKey(r)] = r } for i := range sourcePolicies { p := sourcePolicies[i] - tp, isExist := mapTargetPolicies[strings.ToLower(p.Name)] + tp, isExist := mapTargetPolicies[comparePolicyKey(p)] if !isExist { continue } @@ -42,6 +42,19 @@ func CompareList(sourcePolicies, targetPolicies []objects.Policy) (diffResult [] return } +// comparePolicyKey builds a unique key for matching policies. Policies with the +// same name on different tables are distinct; keying by name alone causes +// cross-table mismatches. +func comparePolicyKey(p objects.Policy) string { + sch := strings.ToLower(p.Schema) + table := strings.ToLower(p.Table) + name := strings.ToLower(p.Name) + if sch == "" && table == "" { + return name + } + return sch + "." + table + "." + name +} + func CompareItem(source, target objects.Policy) (diffResult CompareDiffResult) { updateItem := objects.UpdatePolicyParam{ Name: target.Name, diff --git a/pkg/resource/policies/compare_test.go b/pkg/resource/policies/compare_test.go index 878e5ee2..2a9176cb 100644 --- a/pkg/resource/policies/compare_test.go +++ b/pkg/resource/policies/compare_test.go @@ -329,3 +329,104 @@ func TestCompareList_DuplicateNameDifferentSchema(t *testing.T) { assert.Equal(t, "table_a", diffResult[0].TargetResource.Table) assert.False(t, diffResult[0].IsConflict) } + +func TestCompareList_SameNameDifferentTable(t *testing.T) { + // Two policies with the same name but different tables in the same schema. + // They should NOT collide — each should match only its own table's policy. + check := "check" + sourcePolicies := []objects.Policy{ + { + Name: "admin full access", + Schema: "public", + Table: "products", + Action: "PERMISSIVE", + Command: objects.PolicyCommandSelect, + Definition: "def1", + Check: &check, + Roles: []string{"admin"}, + }, + { + Name: "admin full access", + Schema: "public", + Table: "orders", + Action: "PERMISSIVE", + Command: objects.PolicyCommandSelect, + Definition: "def2", + Check: &check, + Roles: []string{"admin"}, + }, + } + + targetPolicies := []objects.Policy{ + { + Name: "admin full access", + Schema: "public", + Table: "products", + Action: "PERMISSIVE", + Command: objects.PolicyCommandSelect, + Definition: "def1", + Check: &check, + Roles: []string{"admin"}, + }, + { + Name: "admin full access", + Schema: "public", + Table: "orders", + Action: "PERMISSIVE", + Command: objects.PolicyCommandSelect, + Definition: "def2", + Check: &check, + Roles: []string{"admin"}, + }, + } + + diffResult := policies.CompareList(sourcePolicies, targetPolicies) + assert.Equal(t, 2, len(diffResult)) + // Neither should have a table mismatch conflict + for _, r := range diffResult { + assert.Equal(t, r.SourceResource.Table, r.TargetResource.Table, + "policy should match its own table, not a different one") + assert.False(t, r.IsConflict) + } +} + +func TestCompareList_SameNameDifferentTable_WithChange(t *testing.T) { + // Verify that a real change is detected on the correct table, not cross-matched. + check := "check" + sourcePolicies := []objects.Policy{ + { + Name: "shared policy", Schema: "public", Table: "table_x", + Action: "PERMISSIVE", Command: objects.PolicyCommandSelect, + Definition: "def", Check: &check, Roles: []string{"role1"}, + }, + { + Name: "shared policy", Schema: "public", Table: "table_y", + Action: "PERMISSIVE", Command: objects.PolicyCommandSelect, + Definition: "def", Check: &check, Roles: []string{"role1"}, + }, + } + + targetPolicies := []objects.Policy{ + { + Name: "shared policy", Schema: "public", Table: "table_x", + Action: "PERMISSIVE", Command: objects.PolicyCommandSelect, + Definition: "def", Check: &check, Roles: []string{"role1"}, + }, + { + Name: "shared policy", Schema: "public", Table: "table_y", + Action: "PERMISSIVE", Command: objects.PolicyCommandSelect, + Definition: "different_def", Check: &check, Roles: []string{"role1"}, + }, + } + + diffResult := policies.CompareList(sourcePolicies, targetPolicies) + assert.Equal(t, 2, len(diffResult)) + + // table_x should have no conflict + assert.Equal(t, "table_x", diffResult[0].SourceResource.Table) + assert.False(t, diffResult[0].IsConflict) + + // table_y should have a conflict (definition differs) + assert.Equal(t, "table_y", diffResult[1].SourceResource.Table) + assert.True(t, diffResult[1].IsConflict) +} diff --git a/pkg/resource/tables/compare.go b/pkg/resource/tables/compare.go index 25f1b358..58c6dc0e 100644 --- a/pkg/resource/tables/compare.go +++ b/pkg/resource/tables/compare.go @@ -217,14 +217,23 @@ func compareColumns(source, target []objects.Column) (updateItems []objects.Upda func compareRelations(mode CompareMode, table *objects.Table, source, target []objects.TablesRelationship) (updateItems []objects.UpdateRelationItem) { mapTargetRelation := make(map[string]objects.TablesRelationship) + // Secondary index keyed by schema.table.column for fallback matching + // when constraint names differ (e.g., custom FK names vs generated defaults). + mapTargetByCol := make(map[string]objects.TablesRelationship) for i := range target { c := target[i] if !strings.HasPrefix(c.ConstraintName, fmt.Sprintf("%s_", c.SourceSchema)) { c.ConstraintName = fmt.Sprintf("%s_%s", c.SourceSchema, c.ConstraintName) } mapTargetRelation[c.ConstraintName] = c + colKey := fmt.Sprintf("%s.%s.%s", c.SourceSchema, c.SourceTableName, c.SourceColumnName) + mapTargetByCol[colKey] = c } + // Track which source columns have been matched, so duplicate target FKs + // for the same column are not incorrectly flagged as deletes. + matchedSourceCols := make(map[string]bool) + for i := range source { sc := source[i] @@ -238,6 +247,17 @@ func compareRelations(mode CompareMode, table *objects.Table, source, target []o } t, exist := mapTargetRelation[sc.ConstraintName] + if !exist { + // Fallback: match by source table + column when constraint + // names differ (e.g., custom FK names vs generated defaults). + colKey := fmt.Sprintf("%s.%s.%s", sc.SourceSchema, sc.SourceTableName, sc.SourceColumnName) + if tc, ok := mapTargetByCol[colKey]; ok { + t = tc + exist = true + delete(mapTargetRelation, tc.ConstraintName) + } + } + if !exist { updateItems = append(updateItems, objects.UpdateRelationItem{ Data: sc, @@ -246,7 +266,11 @@ func compareRelations(mode CompareMode, table *objects.Table, source, target []o continue } - if t.Index == nil && sc.Index == nil && mode == CompareModeApply { + // Record that this source column has been matched. + matchedColKey := fmt.Sprintf("%s.%s.%s", sc.SourceSchema, sc.SourceTableName, sc.SourceColumnName) + matchedSourceCols[matchedColKey] = true + + if t.Index != nil && sc.Index == nil && mode == CompareModeApply { updateItems = append(updateItems, objects.UpdateRelationItem{ Data: sc, Type: objects.UpdateRelationCreateIndex, @@ -271,16 +295,21 @@ func compareRelations(mode CompareMode, table *objects.Table, source, target []o Logger.Debug("check on delete", "t-on-delete", t.Action.DeletionAction, "sc-on-delete", sc.Action.DeletionAction, "same", t.Action.DeletionAction == sc.Action.DeletionAction) } } else if t.Action != nil && sc.Action == nil { - updateItems = append(updateItems, objects.UpdateRelationItem{ - Data: sc, - Type: objects.UpdateRelationActionOnUpdate, - }) + if mode == CompareModeApply { + // Only flag missing remote action as a diff in apply mode. + // During import the remote may not have action data attached; + // treating that as a conflict produces false positives. + updateItems = append(updateItems, objects.UpdateRelationItem{ + Data: sc, + Type: objects.UpdateRelationActionOnUpdate, + }) - updateItems = append(updateItems, objects.UpdateRelationItem{ - Data: sc, - Type: objects.UpdateRelationActionOnDelete, - }) - Logger.Debug("create relation new action", "on-update", t.Action.UpdateAction, "on-delete", t.Action.DeletionAction) + updateItems = append(updateItems, objects.UpdateRelationItem{ + Data: sc, + Type: objects.UpdateRelationActionOnDelete, + }) + Logger.Debug("create relation new action", "on-update", t.Action.UpdateAction, "on-delete", t.Action.DeletionAction) + } } delete(mapTargetRelation, sc.ConstraintName) @@ -310,6 +339,22 @@ func compareRelations(mode CompareMode, table *objects.Table, source, target []o continue } + // Skip cross-schema FKs (e.g., public.user_brands → auth.users). + // These are not represented in Go code because the target table + // is not in the imported model set. + if r.TargetTableSchema != r.SourceSchema { + continue + } + + // Skip duplicate FKs whose column was already matched. + // The database may have multiple constraints for the same column + // (e.g., custom-named + default-named). Since Go structs can only + // represent one FK per column, these duplicates are expected. + colKey := fmt.Sprintf("%s.%s.%s", r.SourceSchema, r.SourceTableName, r.SourceColumnName) + if matchedSourceCols[colKey] { + continue + } + updateItems = append(updateItems, objects.UpdateRelationItem{ Data: r, Type: objects.UpdateRelationDelete, diff --git a/pkg/resource/tables/compare_test.go b/pkg/resource/tables/compare_test.go index 6904a33d..ba7b159e 100644 --- a/pkg/resource/tables/compare_test.go +++ b/pkg/resource/tables/compare_test.go @@ -231,3 +231,293 @@ func TestCompareItemWithoutIndex(t *testing.T) { assert.Equal(t, []objects.UpdateColumnType{objects.UpdateColumnNullable}, diffResult.DiffItems.ChangeColumnItems[0].UpdateItems) assert.Equal(t, []objects.UpdateColumnType{objects.UpdateColumnNullable}, diffResult.DiffItems.ChangeColumnItems[1].UpdateItems) } + +func TestCompareItem_FallbackMatchByColumn(t *testing.T) { + // Source has a custom-named FK, target has a default-named FK for the same column. + // They should match via the column-based fallback. + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + + source := objects.Table{ + ID: 1, Name: "orders", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "fk_custom_name", + SourceSchema: "public", + SourceTableName: "orders", + SourceColumnName: "user_id", + TargetTableSchema: "public", + TargetTableName: "users", + TargetColumnName: "id", + Action: &action, + }, + }, + } + + target := objects.Table{ + ID: 1, Name: "orders", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_orders_user_id_fkey", + SourceSchema: "public", + SourceTableName: "orders", + SourceColumnName: "user_id", + TargetTableSchema: "public", + TargetTableName: "users", + TargetColumnName: "id", + Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + // Should NOT be a conflict — same FK, different constraint names + hasRelationCreate := false + hasRelationDelete := false + for _, item := range diffResult.DiffItems.ChangeRelationItems { + if item.Type == objects.UpdateRelationCreate { + hasRelationCreate = true + } + if item.Type == objects.UpdateRelationDelete { + hasRelationDelete = true + } + } + assert.False(t, hasRelationCreate, "should not create relation — matched via column fallback") + assert.False(t, hasRelationDelete, "should not delete relation — matched via column fallback") +} + +func TestCompareItem_CrossSchemaFKSkip(t *testing.T) { + // Target has a cross-schema FK (public → auth). It should NOT be flagged as delete. + source := objects.Table{ + ID: 1, Name: "user_brands", Schema: "public", + Relationships: []objects.TablesRelationship{}, + } + + target := objects.Table{ + ID: 1, Name: "user_brands", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_user_brands_user_id_fkey", + SourceSchema: "public", + SourceTableName: "user_brands", + SourceColumnName: "user_id", + TargetTableSchema: "auth", + TargetTableName: "users", + TargetColumnName: "id", + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + for _, item := range diffResult.DiffItems.ChangeRelationItems { + assert.NotEqual(t, objects.UpdateRelationDelete, item.Type, + "cross-schema FK should NOT be flagged as delete") + } +} + +func TestCompareItem_DuplicateFKSkip(t *testing.T) { + // Source has one FK, target has two FKs for the same column (custom + default name). + // The duplicate should NOT be flagged as delete. + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + + source := objects.Table{ + ID: 1, Name: "creators", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_creators_div_id_fkey", + SourceSchema: "public", + SourceTableName: "creators", + SourceColumnName: "div_id", + TargetTableSchema: "public", + TargetTableName: "divisions", + TargetColumnName: "id", + Action: &action, + }, + }, + } + + target := objects.Table{ + ID: 1, Name: "creators", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_creators_div_id_fkey", + SourceSchema: "public", + SourceTableName: "creators", + SourceColumnName: "div_id", + TargetTableSchema: "public", + TargetTableName: "divisions", + TargetColumnName: "id", + Action: &action, + }, + { + ConstraintName: "fk_custom_div", + SourceSchema: "public", + SourceTableName: "creators", + SourceColumnName: "div_id", + TargetTableSchema: "public", + TargetTableName: "divisions", + TargetColumnName: "id", + Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + for _, item := range diffResult.DiffItems.ChangeRelationItems { + assert.NotEqual(t, objects.UpdateRelationDelete, item.Type, + "duplicate FK for matched column should NOT be flagged as delete") + } +} + +func TestCompareItem_NoIndexCreationWhenBothNil(t *testing.T) { + // Both source and target have nil Index — should NOT propose index creation. + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + + source := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Index: nil, Action: &action, + }, + }, + } + + target := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Index: nil, Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + for _, item := range diffResult.DiffItems.ChangeRelationItems { + assert.NotEqual(t, objects.UpdateRelationCreateIndex, item.Type, + "should NOT create index when both sides have nil Index") + } +} + +func TestCompareItem_IndexCreationWhenTargetHasIndex(t *testing.T) { + // In apply mode: source = local, target = remote (supabase). + // When source (local) has no index but target (remote) has one, + // compareRelations checks t.Index (target) != nil && sc.Index (source) == nil. + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + idx := &objects.Index{Schema: "public", Table: "t1", Name: "idx1", Definition: "idx1"} + + source := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Index: nil, Action: &action, + }, + }, + } + + target := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Index: idx, Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + hasIndexCreate := false + for _, item := range diffResult.DiffItems.ChangeRelationItems { + if item.Type == objects.UpdateRelationCreateIndex { + hasIndexCreate = true + } + } + assert.True(t, hasIndexCreate, "should create index when target (remote) has it but source (local) doesn't") +} + +func TestCompareItem_NilActionApplyMode(t *testing.T) { + // In apply mode, when target (remote) has Action but source (local) doesn't, + // compareRelations should create action update items. + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + + source := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Action: nil, + }, + }, + } + target := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeApply, source, target) + hasOnUpdate := false + hasOnDelete := false + for _, item := range diffResult.DiffItems.ChangeRelationItems { + if item.Type == objects.UpdateRelationActionOnUpdate { + hasOnUpdate = true + } + if item.Type == objects.UpdateRelationActionOnDelete { + hasOnDelete = true + } + } + assert.True(t, hasOnUpdate, "should flag action on update diff") + assert.True(t, hasOnDelete, "should flag action on delete diff") +} + +func TestCompareItem_NilActionImportMode(t *testing.T) { + // In import mode, when target has Action but source doesn't, + // compareRelations should NOT create action update items (no false positives). + action := objects.TablesRelationshipAction{UpdateAction: "c", DeletionAction: "c"} + + source := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Action: nil, + }, + }, + } + target := objects.Table{ + ID: 1, Name: "t1", Schema: "public", + Relationships: []objects.TablesRelationship{ + { + ConstraintName: "public_t1_col_fkey", SourceSchema: "public", + SourceTableName: "t1", SourceColumnName: "col", + TargetTableSchema: "public", TargetTableName: "t2", TargetColumnName: "id", + Action: &action, + }, + }, + } + + diffResult := tables.CompareItem(tables.CompareModeImport, source, target) + for _, item := range diffResult.DiffItems.ChangeRelationItems { + assert.NotEqual(t, objects.UpdateRelationActionOnUpdate, item.Type, "should not flag action on update in import mode") + assert.NotEqual(t, objects.UpdateRelationActionOnDelete, item.Type, "should not flag action on delete in import mode") + } +} diff --git a/pkg/resource/types/compare.go b/pkg/resource/types/compare.go index bbb2d48c..c04469d7 100644 --- a/pkg/resource/types/compare.go +++ b/pkg/resource/types/compare.go @@ -53,8 +53,12 @@ func CompareItem(source, target objects.Type) (diffResult CompareDiffResult) { updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeName) } - if source.Comment != target.Comment { + if (source.Comment != nil && target.Comment == nil) || (source.Comment == nil && target.Comment != nil) { updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeComment) + } else if source.Comment != nil && target.Comment != nil { + if *source.Comment != *target.Comment { + updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeComment) + } } if source.Format != target.Format { @@ -68,9 +72,9 @@ func CompareItem(source, target objects.Type) (diffResult CompareDiffResult) { if len(source.Enums) != len(target.Enums) { updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeEnums) } else { - for se := range source.Enums { + for _, se := range source.Enums { isFound := false - for te := range target.Enums { + for _, te := range target.Enums { if se == te { isFound = true break @@ -87,10 +91,10 @@ func CompareItem(source, target objects.Type) (diffResult CompareDiffResult) { if len(source.Attributes) != len(target.Attributes) { updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeAttributes) } else { - for sa := range source.Attributes { + for _, sa := range source.Attributes { isFound := false - for ta := range target.Attributes { - if sa == ta { + for _, ta := range target.Attributes { + if sa.Name == ta.Name && sa.TypeID == ta.TypeID { isFound = true break } @@ -103,17 +107,6 @@ func CompareItem(source, target objects.Type) (diffResult CompareDiffResult) { } } - if (source.Comment != nil && target.Comment == nil) || (source.Comment == nil && target.Comment != nil) { - updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeComment) - } else if source.Comment != nil && target.Comment != nil { - sv := source.Comment - tv := target.Comment - - if sv != tv { - updateItem.ChangeItems = append(updateItem.ChangeItems, objects.UpdateTypeComment) - } - } - diffResult.IsConflict = len(updateItem.ChangeItems) > 0 diffResult.DiffItems = updateItem diff --git a/pkg/state/rpc.go b/pkg/state/rpc.go index eb9dc25a..f1b30902 100644 --- a/pkg/state/rpc.go +++ b/pkg/state/rpc.go @@ -35,9 +35,17 @@ func ExtractRpc(rpcState []RpcState, appRpc []raiden.Rpc) (result ExtractRpcResu } fn := state.Function + // Preserve the state's CompleteStatement (captured from pg_get_functiondef + // during the last import). BindRpcFunction rebuilds it via BuildRpc() which + // differs in formatting (param prefix, default quoting, search_path), + // causing false update detections even when no code was changed. + stateCompleteStatement := fn.CompleteStatement if err := BindRpcFunction(r, &fn); err != nil { return result, err } + if stateCompleteStatement != "" { + fn.CompleteStatement = stateCompleteStatement + } if fn.CompleteStatement != "" { result.Existing = append(result.Existing, fn) diff --git a/pkg/state/rpc_test.go b/pkg/state/rpc_test.go index 77171e56..8d5eec5c 100644 --- a/pkg/state/rpc_test.go +++ b/pkg/state/rpc_test.go @@ -108,6 +108,30 @@ func TestExtractRpcResult_ToDeleteFlatMap(t *testing.T) { assert.Equal(t, "rpc2", mapData["rpc2"].Name) } +func TestExtractRpc_PreservesStateCompleteStatement(t *testing.T) { + // When a function exists in state with a CompleteStatement from pg_get_functiondef, + // ExtractRpc should preserve that value instead of using the rebuilt one from BuildRpc. + stateCS := "create or replace function public.get_submissions(in_scouter_name varchar, in_candidate_name text) returns table(id integer) language plpgsql as $function$ begin end; $function$" + rpcStates := []state.RpcState{ + { + Function: objects.Function{ + Name: "get_submissions", + Schema: "public", + CompleteStatement: stateCS, + }, + }, + } + + rpc1 := &GetSubmissions{} + _ = raiden.BuildRpc(rpc1) + + result, err := state.ExtractRpc(rpcStates, []raiden.Rpc{rpc1}) + assert.NoError(t, err) + assert.Len(t, result.Existing, 1) + assert.Equal(t, stateCS, result.Existing[0].CompleteStatement, + "should preserve state CompleteStatement, not the rebuilt one") +} + // Test declaration query with return trigger type CreateProfileParams struct{} diff --git a/pkg/state/table.go b/pkg/state/table.go index 3bc0679e..c67ba5d9 100644 --- a/pkg/state/table.go +++ b/pkg/state/table.go @@ -51,8 +51,18 @@ func ExtractTable(tableStates []TableState, appTable []any, mapDataType map[stri mapTableState[t.Table.Name] = t } + processedTables := make(map[string]bool) for _, t := range appTable { tableName := raiden.GetTableName(t) + + // Skip duplicate model registrations for the same table name. + // Two Go structs pointing to the same tableName would cause the + // second one to be incorrectly classified as "New". + if processedTables[tableName] { + continue + } + processedTables[tableName] = true + ts, isExist := mapTableState[tableName] if !isExist { @@ -301,10 +311,22 @@ func (b *tableBuilder) addModelRelation(field reflect.StructField, join string) return } + // Derive target table name from the field's type, not the field name. + // The field name is a relationship alias (e.g., MasterCreatorBrand) + // while the type resolves to the actual table via GetTableName. + fieldType := field.Type + if fieldType.Kind() == reflect.Ptr { + fieldType = fieldType.Elem() + } + if fieldType.Kind() != reflect.Struct { + return + } + targetTableName := raiden.GetTableName(reflect.New(fieldType).Interface()) + relation := objects.TablesRelationship{ SourceTableName: b.item.Table.Name, SourceSchema: b.item.Table.Schema, - TargetTableName: utils.ToSnakeCase(field.Name), + TargetTableName: targetTableName, TargetTableSchema: b.item.Table.Schema, } @@ -512,9 +534,16 @@ func buildTableRelation(tableName, fieldName, schema string, mapRelations map[st relation.ConstraintName = getRelationConstrainName(schema, sourceTableName, foreignKey) if r, ok := mapRelations[relation.ConstraintName]; ok { relation = r + } else if r, ok := mapRelations[getRelationConstrainNameWithoutSchema(sourceTableName, foreignKey)]; ok { + relation = r } else { - if r, ok := mapRelations[getRelationConstrainNameWithoutSchema(sourceTableName, foreignKey)]; ok { - relation = r + // Fallback: match by source table + column when constraint name + // doesn't follow the default naming pattern (e.g., custom FK names). + for _, r := range mapRelations { + if r.SourceTableName == sourceTableName && r.SourceColumnName == foreignKey { + relation = r + break + } } } diff --git a/pkg/state/table_test.go b/pkg/state/table_test.go index f2e48019..cd8d2b02 100644 --- a/pkg/state/table_test.go +++ b/pkg/state/table_test.go @@ -9,6 +9,19 @@ import ( "github.com/stretchr/testify/assert" ) +// CandidateAlias is a second struct pointing to the same table as Candidate. +// Used to test duplicate table name registration handling. +type CandidateAlias struct { + Id int64 `json:"id,omitempty" column:"name:id;type:bigint;primaryKey;autoIncrement;nullable:false"` + Name *string `json:"name,omitempty" column:"name:name;type:varchar;nullable;unique"` + + // Table information — intentionally same tableName as Candidate + Metadata string `json:"-" schema:"public" tableName:"candidate"` + + // Access control + Acl string `json:"-" read:"anon" write:"anon"` +} + type Submission struct { Id int64 `json:"id,omitempty" column:"name:id;type:bigint;primaryKey;autoIncrement;nullable:false"` ScouterId *int64 `json:"scouter_id,omitempty" column:"name:scouter_id;type:bigint;nullable"` @@ -197,3 +210,39 @@ func TestToDeleteFlatMap(t *testing.T) { assert.Equal(t, "table1", mapData["table1"].Name) assert.Equal(t, "table2", mapData["table2"].Name) } + +func TestExtractTable_DuplicateTableName(t *testing.T) { + // Two models (Candidate + CandidateAlias) point to the same tableName "candidate". + // The second registration should be skipped, not treated as "New". + tableState := []state.TableState{ + { + Table: objects.Table{ + ID: 100, + Name: "candidate", + PrimaryKeys: []objects.PrimaryKey{ + {Name: "id", Schema: "public", TableName: "candidate"}, + }, + Columns: []objects.Column{{Name: "id"}, {Name: "name"}}, + }, + }, + } + appTable := []any{&Candidate{}, &CandidateAlias{}} + rs, err := state.ExtractTable(tableState, appTable, nil) + + assert.NoError(t, err) + assert.Equal(t, 1, len(rs.Existing), "should have exactly one Existing entry") + assert.Equal(t, 0, len(rs.New), "duplicate should NOT create a New entry") + assert.Equal(t, 0, len(rs.Delete)) + assert.Equal(t, 100, rs.Existing[0].Table.ID) +} + +func TestExtractTable_DuplicateTableName_NoState(t *testing.T) { + // Two models with same tableName, neither in state — only one New entry expected. + appTable := []any{&Candidate{}, &CandidateAlias{}} + rs, err := state.ExtractTable(nil, appTable, nil) + + assert.NoError(t, err) + assert.Equal(t, 0, len(rs.Existing)) + assert.Equal(t, 1, len(rs.New), "duplicate should NOT create a second New entry") + assert.Equal(t, "candidate", rs.New[0].Table.Name) +}