From f046b9cf346264363df8e77291058fd3aa3e3a2a Mon Sep 17 00:00:00 2001 From: Sasha Varlamov Date: Sat, 14 Feb 2026 14:56:59 -0500 Subject: [PATCH 1/5] Add comprehensive worktree support coverage across modes --- src/error.rs | 1 + src/git/repository.rs | 101 ++- src/git/status.rs | 4 + tests/ai_tab.rs | 10 + tests/amend.rs | 13 + tests/blame_flags.rs | 31 + tests/checkout_switch.rs | 15 + tests/checkpoint_size.rs | 4 + tests/cherry_pick.rs | 11 + tests/chinese_text_edits.rs | 7 + tests/ci_squash_rebase.rs | 8 + tests/claude_code.rs | 4 + tests/continue_cli.rs | 7 + tests/cursor.rs | 5 + tests/diff.rs | 20 + tests/gemini.rs | 8 + tests/github_copilot_integration.rs | 9 + tests/gix_config_tests.rs | 15 + tests/ignore_prompts.rs | 8 + tests/initial_attributions.rs | 9 + tests/internal_db_integration.rs | 13 + tests/merge_rebase.rs | 5 + tests/prompt_across_commit.rs | 4 + tests/prompt_hash_migration.rs | 6 + tests/pull_rebase_ff.rs | 9 + tests/realistic_complex_edits.rs | 14 + tests/rebase.rs | 24 + tests/repos/mod.rs | 120 +++ tests/repos/test_repo.rs | 500 +++++++++- tests/reset.rs | 17 + tests/show_prompt.rs | 15 + tests/simple_additions.rs | 27 + ...ons__initial_and_blame_merge@worktree.snap | 6 + ...ons_in_subsequent_checkpoint@worktree.snap | 6 + ...__initial_only_no_blame_data@worktree.snap | 6 + ...tions__initial_wins_overlaps@worktree.snap | 6 + ...ons__partial_file_coverage@worktree-2.snap | 6 + ...tions__partial_file_coverage@worktree.snap | 6 + ...stats__markdown_stats_all_ai@worktree.snap | 6 + ...ts__markdown_stats_all_human@worktree.snap | 6 + ...markdown_stats_deletion_only@worktree.snap | 6 + ...s__markdown_stats_formatting@worktree.snap | 6 + ...markdown_stats_minimal_human@worktree.snap | 6 + .../stats__markdown_stats_mixed@worktree.snap | 6 + ...ats__markdown_stats_no_mixed@worktree.snap | 6 + tests/squash_merge.rs | 7 + tests/stash_attribution.rs | 21 + tests/stats.rs | 23 +- tests/worktrees.rs | 853 ++++++++++++++++++ 49 files changed, 1977 insertions(+), 49 deletions(-) create mode 100644 tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap create mode 100644 tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap create mode 100644 tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_all_ai@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_all_human@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_formatting@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_mixed@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap create mode 100644 tests/worktrees.rs diff --git a/src/error.rs b/src/error.rs index fa621e5d8..8b06a10dd 100644 --- a/src/error.rs +++ b/src/error.rs @@ -12,6 +12,7 @@ pub enum GitAiError { args: Vec, }, /// Errors from Gix + #[allow(dead_code)] GixError(String), JsonError(serde_json::Error), Utf8Error(std::str::Utf8Error), diff --git a/src/git/repository.rs b/src/git/repository.rs index 58988234c..72484513b 100644 --- a/src/git/repository.rs +++ b/src/git/repository.rs @@ -1,5 +1,3 @@ -use regex::Regex; - use crate::authorship::authorship_log_serialization::AuthorshipLog; use crate::authorship::rebase_authorship::rewrite_authorship_if_needed; use crate::config; @@ -854,6 +852,8 @@ impl<'a> Iterator for References<'a> { pub struct Repository { global_args: Vec, git_dir: PathBuf, + #[allow(dead_code)] + common_git_dir: PathBuf, pub storage: RepoStorage, pub pre_command_base_commit: Option, pub pre_command_refname: Option, @@ -964,6 +964,12 @@ impl Repository { self.git_dir.as_path() } + /// Returns the common .git directory shared by all worktrees. + #[allow(dead_code)] + pub fn common_git_dir(&self) -> &Path { + self.common_git_dir.as_path() + } + // Get the path of the working directory for this repository. // If this repository is bare, then None is returned. pub fn workdir(&self) -> Result { @@ -1049,66 +1055,53 @@ impl Repository { Ok(remotes) } - /// Get the git config file for this repository and fallback to global config if not found. - fn get_git_config_file(&self) -> Result, GitAiError> { - match gix_config::File::from_git_dir(self.path().to_path_buf()) { - Ok(git_config_file) => Ok(git_config_file), - Err(e) => match gix_config::File::from_globals() { - Ok(system_config) => Ok(system_config), - Err(_) => Err(GitAiError::GixError(e.to_string())), - }, - } - } /// Get config value for a given key as a String. + /// + /// Uses the git CLI so worktree config, includeIf directives, and precedence + /// match native git behavior exactly. pub fn config_get_str(&self, key: &str) -> Result, GitAiError> { - match self.get_git_config_file() { - Ok(git_config_file) => Ok(git_config_file.string(key).map(|cow| cow.to_string())), + let mut args = self.global_args_for_exec(); + args.push("config".to_string()); + args.push("--get".to_string()); + args.push(key.to_string()); + + match exec_git(&args) { + Ok(output) => { + let value = String::from_utf8(output.stdout)?; + Ok(Some(value.trim_end_matches(['\r', '\n']).to_string())) + } + Err(GitAiError::GitCliError { code: Some(1), .. }) => Ok(None), Err(e) => Err(e), } } /// Get all config values matching a regex pattern. - /// - /// Regular expression matching is currently case-sensitive - /// and done against a canonicalized version of the key - /// in which section and variable names are lowercased, but subsection names are not. - /// /// Returns a HashMap of key -> value for all matching config entries. pub fn config_get_regexp( &self, pattern: &str, ) -> Result, GitAiError> { - match self.get_git_config_file() { - Ok(git_config_file) => { - let mut matches: HashMap = HashMap::new(); - - let re = Regex::new(pattern) - .map_err(|e| GitAiError::Generic(format!("Invalid regex pattern: {}", e)))?; - - // iterate over all sections - for section in git_config_file.sections() { - // Support subsections in the key - let section_name = section.header().name().to_string().to_lowercase(); - let subsection = section.header().subsection_name(); + let mut args = self.global_args_for_exec(); + args.push("config".to_string()); + args.push("--get-regexp".to_string()); + args.push(pattern.to_string()); - for value_name in section.body().value_names() { - let value_name_str = value_name.to_string().to_lowercase(); - let full_key = if let Some(sub) = subsection { - format!("{}.{}.{}", section_name, sub, value_name_str) - } else { - format!("{}.{}", section_name, value_name_str) - }; - - if re.is_match(&full_key) - && let Some(value) = - section.body().value(value_name).map(|c| c.to_string()) - { - matches.insert(full_key, value); - } + match exec_git(&args) { + Ok(output) => { + let stdout = String::from_utf8(output.stdout)?; + let mut matches: HashMap = HashMap::new(); + for line in stdout.lines().filter(|line| !line.is_empty()) { + if let Some(split_at) = line.find(char::is_whitespace) { + let key = line[..split_at].to_string(); + let value = line[split_at..].trim_start().to_string(); + matches.insert(key, value); + } else { + matches.insert(line.to_string(), String::new()); } } Ok(matches) } + Err(GitAiError::GitCliError { code: Some(1), .. }) => Ok(HashMap::new()), Err(e) => Err(e), } } @@ -2029,10 +2022,13 @@ pub fn find_repository(global_args: &[String]) -> Result )) })?; + let common_git_dir = resolve_common_git_dir(&git_dir); + Ok(Repository { global_args: normalized_global_args, storage: RepoStorage::for_repo_path(&git_dir, &workdir), git_dir, + common_git_dir, pre_command_base_commit: None, pre_command_refname: None, pre_reset_target_commit: None, @@ -2076,10 +2072,13 @@ pub fn from_bare_repository(git_dir: &Path) -> Result { let canonical_workdir = workdir.canonicalize().unwrap_or_else(|_| workdir.clone()); + let common_git_dir = resolve_common_git_dir(git_dir); + Ok(Repository { global_args, storage: RepoStorage::for_repo_path(git_dir, &workdir), git_dir: git_dir.to_path_buf(), + common_git_dir, pre_command_base_commit: None, pre_command_refname: None, pre_reset_target_commit: None, @@ -2088,6 +2087,18 @@ pub fn from_bare_repository(git_dir: &Path) -> Result { }) } +fn resolve_common_git_dir(git_dir: &Path) -> PathBuf { + let commondir_path = git_dir.join("commondir"); + if let Ok(contents) = std::fs::read_to_string(&commondir_path) { + let relative = contents.trim(); + if !relative.is_empty() { + let resolved = git_dir.join(relative); + return resolved.canonicalize().unwrap_or(resolved); + } + } + git_dir.to_path_buf() +} + pub fn find_repository_in_path(path: &str) -> Result { let global_args = vec!["-C".to_string(), path.to_string()]; find_repository(&global_args) diff --git a/src/git/status.rs b/src/git/status.rs index 3ee2c0160..98fdb669a 100644 --- a/src/git/status.rs +++ b/src/git/status.rs @@ -141,6 +141,10 @@ impl Repository { if skip_untracked { args.push("--untracked-files=no".to_string()); + } else { + // Avoid directory-collapsed untracked entries like `nested/` so downstream + // text-file detection can reason about concrete files. + args.push("--untracked-files=all".to_string()); } // Add combined pathspecs as CLI args only if under the threshold; diff --git a/tests/ai_tab.rs b/tests/ai_tab.rs index c4b23ee9f..f248c168f 100644 --- a/tests/ai_tab.rs +++ b/tests/ai_tab.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -366,3 +367,12 @@ fn test_ai_tab_e2e_handles_dirty_files_map() { "}".ai(), ]); } + +worktree_test_wrappers! { + test_ai_tab_before_edit_checkpoint_includes_dirty_files, + test_ai_tab_after_edit_checkpoint_includes_dirty_files_and_paths, + test_ai_tab_rejects_invalid_hook_event, + test_ai_tab_requires_non_empty_tool_and_model, + test_ai_tab_e2e_marks_ai_lines, + test_ai_tab_e2e_handles_dirty_files_map, +} diff --git a/tests/amend.rs b/tests/amend.rs index 01d911a58..c0d1ebdb0 100644 --- a/tests/amend.rs +++ b/tests/amend.rs @@ -489,3 +489,16 @@ fn test_amend_repeated_round_trips_preserve_exact_line_authorship() { "// AI trailing note".ai() ]); } + +worktree_test_wrappers! { + test_amend_add_lines_at_top, + test_amend_add_lines_in_middle, + test_amend_add_lines_at_bottom, + test_amend_multiple_changes, + test_amend_with_unstaged_ai_code_in_other_file, + test_amend_preserves_unstaged_ai_attribution, + test_amend_with_multiple_files_mixed_staging, + test_amend_with_partially_staged_ai_file, + test_amend_with_partially_staged_mixed_content, + test_amend_with_unstaged_middle_section, +} diff --git a/tests/blame_flags.rs b/tests/blame_flags.rs index 10599af52..2d54e9ebf 100644 --- a/tests/blame_flags.rs +++ b/tests/blame_flags.rs @@ -1185,3 +1185,34 @@ fn test_blame_ai_human_author() { ] ); } + +worktree_test_wrappers! { + test_blame_basic_format, + test_blame_line_range, + test_blame_porcelain_format, + test_blame_show_email, + test_blame_show_name, + test_blame_show_number, + test_blame_suppress_author, + test_blame_long_rev, + test_blame_raw_timestamp, + test_blame_abbrev, + test_blame_blank_boundary, + test_blame_show_root, + test_blame_date_format, + test_blame_multiple_flags, + test_blame_incremental_format, + test_blame_line_porcelain, + test_blame_with_ai_authorship, + test_blame_contents_from_stdin, + test_blame_mark_unknown_without_authorship_log, + test_blame_mark_unknown_mixed_commits, + test_blame_mark_unknown_backward_compatible, + test_blame_auto_detects_git_blame_ignore_revs_file, + test_blame_no_ignore_revs_file_flag_disables_auto_detection, + test_blame_explicit_ignore_revs_file_takes_precedence, + test_blame_respects_git_config_blame_ignore_revs_file, + test_blame_without_ignore_revs_file_works_normally, + test_blame_ignore_revs_with_multiple_commits, + test_blame_ai_human_author, +} diff --git a/tests/checkout_switch.rs b/tests/checkout_switch.rs index 97c90c011..09f85e1de 100644 --- a/tests/checkout_switch.rs +++ b/tests/checkout_switch.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -418,3 +419,17 @@ fn test_checkout_pathspec_multiple_files() { file_b.assert_lines_and_blame(vec!["Original B".human()]); file_c.assert_lines_and_blame(vec!["Modified C by AI".ai()]); } + +worktree_test_wrappers! { + test_checkout_branch_migrates_working_log, + test_checkout_force_deletes_working_log, + test_checkout_pathspec_removes_file_attributions, + test_switch_branch_migrates_working_log, + test_switch_discard_changes_deletes_working_log, + test_switch_force_flag_deletes_working_log, + test_checkout_merge_migrates_working_log, + test_switch_merge_migrates_working_log, + test_checkout_same_branch_no_op, + test_checkout_with_mixed_attribution, + test_checkout_pathspec_multiple_files, +} diff --git a/tests/checkpoint_size.rs b/tests/checkpoint_size.rs index 606747299..b050719ac 100644 --- a/tests/checkpoint_size.rs +++ b/tests/checkpoint_size.rs @@ -88,3 +88,7 @@ fn test_checkpoint_size_logging_large_ai_rewrites() { ); } } + +worktree_test_wrappers! { + test_checkpoint_size_logging_large_ai_rewrites, +} diff --git a/tests/cherry_pick.rs b/tests/cherry_pick.rs index 778c7b5f8..7a4ef104d 100644 --- a/tests/cherry_pick.rs +++ b/tests/cherry_pick.rs @@ -571,3 +571,14 @@ fn test_cherry_pick_empty_commits() { "File content should be preserved after cherry-pick/abort" ); } + +worktree_test_wrappers! { + test_single_commit_cherry_pick, + test_multiple_commits_cherry_pick, + test_cherry_pick_with_conflict_and_continue, + test_cherry_pick_abort, + test_cherry_pick_no_ai_authorship, + test_cherry_pick_multiple_ai_sessions, + test_cherry_pick_identical_trees, + test_cherry_pick_empty_commits, +} diff --git a/tests/chinese_text_edits.rs b/tests/chinese_text_edits.rs index 40dfa2b29..03667b82f 100644 --- a/tests/chinese_text_edits.rs +++ b/tests/chinese_text_edits.rs @@ -153,3 +153,10 @@ fn test_chinese_reflow_preserves_ai() { ")".ai(), ]); } + +worktree_test_wrappers! { + test_chinese_simple_additions, + test_chinese_ai_then_human_edits, + test_chinese_deletions_and_insertions, + test_chinese_partial_staging, +} diff --git a/tests/ci_squash_rebase.rs b/tests/ci_squash_rebase.rs index 59042aa52..e386b23ee 100644 --- a/tests/ci_squash_rebase.rs +++ b/tests/ci_squash_rebase.rs @@ -481,3 +481,11 @@ fn test_ci_rebase_merge_multiple_commits() { "function human() { }".human() ]); } + +worktree_test_wrappers! { + test_ci_squash_merge_basic, + test_ci_squash_merge_multiple_files, + test_ci_squash_merge_mixed_content, + test_ci_squash_merge_with_manual_changes, + test_ci_rebase_merge_multiple_commits, +} diff --git a/tests/claude_code.rs b/tests/claude_code.rs index cd2beb97a..f47086a3d 100644 --- a/tests/claude_code.rs +++ b/tests/claude_code.rs @@ -366,3 +366,7 @@ fn test_user_text_content_blocks_are_parsed_correctly() { "Second message should be Assistant" ); } + +worktree_test_wrappers! { + test_claude_e2e_prefers_latest_checkpoint_for_prompts, +} diff --git a/tests/continue_cli.rs b/tests/continue_cli.rs index b772ee7a6..da619126c 100644 --- a/tests/continue_cli.rs +++ b/tests/continue_cli.rs @@ -765,3 +765,10 @@ fn test_continue_cli_e2e_preserves_model_on_commit() { ); assert_eq!(prompt_record.agent_id.tool, "continue-cli"); } + +worktree_test_wrappers! { + test_continue_cli_e2e_with_attribution, + test_continue_cli_e2e_human_checkpoint, + test_continue_cli_e2e_multiple_tool_calls, + test_continue_cli_e2e_preserves_model_on_commit, +} diff --git a/tests/cursor.rs b/tests/cursor.rs index 9b4ae1002..3cbeac0b2 100644 --- a/tests/cursor.rs +++ b/tests/cursor.rs @@ -572,3 +572,8 @@ fn test_cursor_e2e_with_resync() { // The temp directory and database will be automatically cleaned up when temp_dir goes out of scope } + +worktree_test_wrappers! { + test_cursor_e2e_with_attribution, + test_cursor_e2e_with_resync, +} diff --git a/tests/diff.rs b/tests/diff.rs index 8526be676..95d2291e3 100644 --- a/tests/diff.rs +++ b/tests/diff.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; use repos::test_repo::TestRepo; @@ -794,3 +795,22 @@ fn test_diff_range_multiple_commits() { "Should have attribution markers" ); } + +worktree_test_wrappers! { + test_diff_single_commit, + test_diff_commit_range, + test_diff_shows_ai_attribution, + test_diff_shows_human_attribution, + test_diff_multiple_files, + test_diff_initial_commit, + test_diff_pure_additions, + test_diff_pure_deletions, + test_diff_mixed_ai_and_human, + test_diff_with_head_ref, + test_diff_output_format, + test_diff_error_on_no_args, + test_diff_json_output_with_escaped_newlines, + test_diff_preserves_context_lines, + test_diff_exact_sequence_verification, + test_diff_range_multiple_commits, +} diff --git a/tests/gemini.rs b/tests/gemini.rs index 3ce45405e..872b5a156 100644 --- a/tests/gemini.rs +++ b/tests/gemini.rs @@ -899,3 +899,11 @@ fn test_gemini_e2e_partial_staging() { // ai_line5 is not committed because it's unstaged ]); } + +worktree_test_wrappers! { + test_gemini_e2e_with_attribution, + test_gemini_e2e_human_checkpoint, + test_gemini_e2e_multiple_tool_calls, + test_gemini_e2e_with_resync, + test_gemini_e2e_partial_staging, +} diff --git a/tests/github_copilot_integration.rs b/tests/github_copilot_integration.rs index f2f43988d..52ab337ba 100644 --- a/tests/github_copilot_integration.rs +++ b/tests/github_copilot_integration.rs @@ -271,3 +271,12 @@ fn test_github_copilot_human_checkpoint_with_clean_file() { // The new line should be human file.assert_lines_and_blame(lines!["const x = 1;".human(), "const y = 2;".human(),]); } + +worktree_test_wrappers! { + test_github_copilot_human_checkpoint_before_edit, + test_github_copilot_human_checkpoint_scoped_to_files, + test_github_copilot_human_then_ai_checkpoint, + test_github_copilot_multiple_files_with_dirty_files, + test_github_copilot_empty_will_edit_filepaths_fails, + test_github_copilot_human_checkpoint_with_clean_file, +} diff --git a/tests/gix_config_tests.rs b/tests/gix_config_tests.rs index d11711efe..f418476a5 100644 --- a/tests/gix_config_tests.rs +++ b/tests/gix_config_tests.rs @@ -303,3 +303,18 @@ fn test_config_get_regexp_bare_repo() { assert_eq!(result.get("baretest.key1"), Some(&"value1".to_string())); assert_eq!(result.get("baretest.key2"), Some(&"value2".to_string())); } + +worktree_test_wrappers! { + test_config_get_str_simple_value, + test_config_get_str_subsection, + test_config_get_str_missing_key_returns_none, + test_config_get_str_special_chars, + test_config_get_regexp_subsection, + test_config_get_regexp_no_matches, + test_config_get_regexp_with_subsections, + test_config_get_regexp_case_insensitive_keys, + test_config_falls_back_to_global, + test_config_local_overrides_global, + test_config_get_str_bare_repo, + test_config_get_regexp_bare_repo, +} diff --git a/tests/ignore_prompts.rs b/tests/ignore_prompts.rs index 93767ae80..31718a844 100644 --- a/tests/ignore_prompts.rs +++ b/tests/ignore_prompts.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::transcript::{AiTranscript, Message}; @@ -307,3 +308,10 @@ fn test_prompt_sharing_disabled_with_empty_transcript() { // The key thing is the checkpoint should succeed assert!(!commit.commit_sha.is_empty()); } + +worktree_test_wrappers! { + test_checkpoint_with_prompt_sharing_enabled, + test_checkpoint_with_prompt_sharing_disabled_strips_messages, + test_multiple_checkpoints_with_messages, + test_prompt_sharing_disabled_with_empty_transcript, +} diff --git a/tests/initial_attributions.rs b/tests/initial_attributions.rs index c1e1daf7d..dd9c0bee6 100644 --- a/tests/initial_attributions.rs +++ b/tests/initial_attributions.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::attribution_tracker::LineAttribution; @@ -442,3 +443,11 @@ fn test_initial_attributions_in_subsequent_checkpoint() { assert_debug_snapshot!(normalized_b); } + +worktree_test_wrappers! { + test_initial_only_no_blame_data, + test_initial_wins_overlaps, + test_initial_and_blame_merge, + test_partial_file_coverage, + test_initial_attributions_in_subsequent_checkpoint, +} diff --git a/tests/internal_db_integration.rs b/tests/internal_db_integration.rs index 754bdb897..d78467cee 100644 --- a/tests/internal_db_integration.rs +++ b/tests/internal_db_integration.rs @@ -613,3 +613,16 @@ fn test_thinking_transcript_saves_to_internal_db_after_commit() { "Should have all messages including thinking" ); } + +worktree_test_wrappers! { + test_checkpoint_saves_prompt_to_internal_db, + test_commit_updates_prompt_with_commit_sha_and_model, + test_post_commit_uses_latest_transcript_messages, + test_multiple_checkpoints_same_session_deduplicated, + test_different_sessions_create_separate_prompts, + test_line_stats_saved_to_db_after_commit, + test_human_author_saved_to_db_after_commit, + test_workdir_saved_to_db, + test_mock_ai_checkpoint_saves_to_internal_db, + test_thinking_transcript_saves_to_internal_db_after_commit, +} diff --git a/tests/merge_rebase.rs b/tests/merge_rebase.rs index b2214b8a1..1de4552f2 100644 --- a/tests/merge_rebase.rs +++ b/tests/merge_rebase.rs @@ -265,3 +265,8 @@ fn test_blame_after_merge_conflict_resolution() { "Line 10".human(), ]); } + +worktree_test_wrappers! { + test_blame_after_merge_with_ai_contributions, + test_blame_after_merge_conflict_resolution, +} diff --git a/tests/prompt_across_commit.rs b/tests/prompt_across_commit.rs index 5e1443446..4f0576952 100644 --- a/tests/prompt_across_commit.rs +++ b/tests/prompt_across_commit.rs @@ -60,3 +60,7 @@ fn test_change_across_commits() { assert_eq!(second_ai_entry.line_ranges, vec![LineRange::Single(6)]); assert_ne!(second_ai_entry.hash, initial_ai_entry.hash); } + +worktree_test_wrappers! { + test_change_across_commits, +} diff --git a/tests/prompt_hash_migration.rs b/tests/prompt_hash_migration.rs index d4293daae..606e57640 100644 --- a/tests/prompt_hash_migration.rs +++ b/tests/prompt_hash_migration.rs @@ -335,3 +335,9 @@ fn test_prompt_hash_migration_unstaged_ai_lines_saved_to_working_log() { "ai_line7".ai(), ]); } + +worktree_test_wrappers! { + test_prompt_hash_migration_ai_adds_lines_multiple_commits, + test_prompt_hash_migration_ai_adds_then_commits_in_batches, + test_prompt_hash_migration_unstaged_ai_lines_saved_to_working_log, +} diff --git a/tests/pull_rebase_ff.rs b/tests/pull_rebase_ff.rs index 1328fcc39..934aa9a5c 100644 --- a/tests/pull_rebase_ff.rs +++ b/tests/pull_rebase_ff.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -557,3 +558,11 @@ fn test_failed_pull_rebase_without_autostash_does_not_leak_stale_ai_metadata() { "stale pull-autostash attribution leaked into later human-only commit" ); } + +worktree_test_wrappers! { + test_fast_forward_pull_preserves_ai_attribution, + test_pull_rebase_autostash_preserves_uncommitted_ai_attribution, + test_pull_rebase_autostash_with_mixed_attribution, + test_pull_rebase_autostash_via_git_config, + test_fast_forward_pull_without_local_changes, +} diff --git a/tests/realistic_complex_edits.rs b/tests/realistic_complex_edits.rs index 7dae483e6..8a145b744 100644 --- a/tests/realistic_complex_edits.rs +++ b/tests/realistic_complex_edits.rs @@ -1636,3 +1636,17 @@ pub fn get_user_by_email(email: &str) -> Option { "CREATE INDEX idx_users_email ON users(email);".ai(), ]); } + +worktree_test_wrappers! { + test_realistic_refactoring_sequence, + test_realistic_api_endpoint_expansion, + test_realistic_test_file_evolution, + test_realistic_config_file_with_comments, + test_realistic_jsx_component_development, + test_realistic_class_with_multiple_methods, + test_realistic_middleware_chain_development, + test_realistic_sql_migration_sequence, + test_realistic_refactoring_with_deletions, + test_realistic_formatting_and_whitespace_changes, + test_realistic_multi_file_commit, +} diff --git a/tests/rebase.rs b/tests/rebase.rs index 3c378dfd5..5087eabfc 100644 --- a/tests/rebase.rs +++ b/tests/rebase.rs @@ -1492,3 +1492,27 @@ cat {} > "$1" "function feature3() {}".ai() ]); } + +worktree_test_wrappers! { + test_rebase_no_conflicts_identical_trees, + test_rebase_with_different_trees, + test_rebase_multiple_commits, + test_rebase_mixed_authorship, + test_rebase_fast_forward, + test_rebase_interactive_reorder, + test_rebase_skip, + test_rebase_keep_empty, + test_rebase_rerere, + test_rebase_patch_stack, + test_rebase_already_up_to_date, + test_rebase_with_conflicts, + test_rebase_abort, + test_rebase_branch_switch_during, + test_rebase_autosquash, + test_rebase_autostash, + test_rebase_exec, + test_rebase_preserve_merges, + test_rebase_commit_splitting, + test_rebase_squash_preserves_all_authorship, + test_rebase_reword_commit_with_children, +} diff --git a/tests/repos/mod.rs b/tests/repos/mod.rs index 697f267e1..f021505e0 100644 --- a/tests/repos/mod.rs +++ b/tests/repos/mod.rs @@ -11,6 +11,12 @@ macro_rules! subdir_test_variants { #[test] fn []() $body + // Variant 1b: Run from subdirectory on a worktree + #[test] + fn []() { + $crate::repos::test_repo::with_worktree_mode(|| $body); + } + // Variant 2: Run with -C flag from arbitrary directory #[test] fn []() { @@ -66,6 +72,120 @@ macro_rules! subdir_test_variants { type TestRepo = TestRepoWithCFlag; $body } + + // Variant 2b: Run with -C flag from arbitrary directory on a worktree + #[test] + fn []() { + $crate::repos::test_repo::with_worktree_mode(|| { + // Wrapper struct that intercepts git calls to use -C flag + struct TestRepoWithCFlag { + inner: $crate::repos::test_repo::TestRepo, + } + + #[allow(dead_code)] + impl TestRepoWithCFlag { + fn new() -> Self { + Self { inner: $crate::repos::test_repo::TestRepo::new() } + } + + fn git_from_working_dir( + &self, + _working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + // Prepend -C to args and run from arbitrary directory + let arbitrary_dir = std::env::temp_dir(); + self.inner + .git_with_env_using_c_flag(args, &[], &arbitrary_dir) + } + + fn git_with_env( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + if working_dir.is_some() { + // If working_dir is specified, prepend -C and run from arbitrary dir + let arbitrary_dir = std::env::temp_dir(); + self.inner + .git_with_env_using_c_flag(args, envs, &arbitrary_dir) + } else { + // No working_dir, use normal behavior + self.inner.git_with_env(args, envs, None) + } + } + } + + // Forward all other methods via Deref + impl std::ops::Deref for TestRepoWithCFlag { + type Target = $crate::repos::test_repo::TestRepo; + fn deref(&self) -> &Self::Target { + &self.inner + } + } + + // Type alias to shadow TestRepo + type TestRepo = TestRepoWithCFlag; + $body + }); + } + } + }; +} + +#[macro_export] +macro_rules! worktree_test_variants { + ( + fn $test_name:ident() $body:block + ) => { + paste::paste! { + // Variant 1: Run against a normal repo (baseline behavior) + #[test] + fn []() $body + + // Variant 2: Run against a linked worktree + #[test] + fn []() { + // Wrapper struct that keeps the base repo alive while exposing worktree APIs. + struct TestRepoWithWorktree { + _base: $crate::repos::test_repo::TestRepo, + worktree: $crate::repos::test_repo::WorktreeRepo, + } + + impl TestRepoWithWorktree { + fn new() -> Self { + let base = $crate::repos::test_repo::TestRepo::new(); + let worktree = base.add_worktree(stringify!($test_name)); + Self { _base: base, worktree } + } + } + + impl std::ops::Deref for TestRepoWithWorktree { + type Target = $crate::repos::test_repo::WorktreeRepo; + fn deref(&self) -> &Self::Target { + &self.worktree + } + } + + // Type alias to shadow TestRepo + type TestRepo = TestRepoWithWorktree; + $body + } + } + }; +} + +#[macro_export] +macro_rules! worktree_test_wrappers { + ( $( $test_name:ident ),+ $(,)? ) => { + paste::paste! { + $( + #[test] + fn [<$test_name _on_worktree>]() { + $crate::repos::test_repo::with_worktree_mode(|| $test_name()); + } + )+ } }; } diff --git a/tests/repos/test_repo.rs b/tests/repos/test_repo.rs index b157cd870..80e49a35d 100644 --- a/tests/repos/test_repo.rs +++ b/tests/repos/test_repo.rs @@ -9,8 +9,9 @@ use git_ai::git::repo_storage::PersistedWorkingLog; use git_ai::git::repository as GitAiRepository; use git_ai::observability::wrapper_performance_targets::BenchmarkResult; use git2::Repository; -use insta::assert_debug_snapshot; +use insta::{Settings, assert_debug_snapshot}; use rand::Rng; +use std::cell::Cell; use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; @@ -61,6 +62,8 @@ pub struct TestRepo { test_db_path: PathBuf, git_mode: TestGitMode, core_hooks_dir: Option, + base_path: Option, + base_test_db_path: Option, } #[allow(dead_code)] @@ -94,7 +97,7 @@ impl TestRepo { }); } - pub fn new() -> Self { + fn new_base_repo() -> Self { let mut rng = rand::thread_rng(); let n: u64 = rng.gen_range(0..10000000000); let base = std::env::temp_dir(); @@ -118,6 +121,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.apply_default_config_patch(); @@ -126,6 +131,63 @@ impl TestRepo { repo } + pub fn new() -> Self { + if WORKTREE_MODE.with(|flag| flag.get()) { + return Self::new_worktree_variant(); + } + + Self::new_base_repo() + } + + fn new_worktree_variant() -> Self { + let base_repo = Self::new_base_repo(); + base_repo.ensure_head_commit(); + + // Keep the base worktree off the default branch so tests can freely mutate it. + let base_branch = base_repo.current_branch(); + if !base_branch.is_empty() { + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let temp_branch = format!("base-worktree-{}", n); + base_repo + .git_og(&["checkout", "-b", &temp_branch]) + .expect("failed to create base worktree branch"); + } + + let worktree = base_repo.add_worktree("auto"); + + let base_path = base_repo.path.clone(); + let base_test_db_path = base_repo.test_db_path.clone(); + let feature_flags = base_repo.feature_flags.clone(); + let config_patch = base_repo.config_patch.clone(); + let git_mode = base_repo.git_mode; + let core_hooks_dir = base_repo.core_hooks_dir.clone(); + + let worktree_path = worktree.path.clone(); + let worktree_test_db_path = worktree.test_db_path.clone(); + + std::mem::forget(base_repo); + std::mem::forget(worktree); + + Self { + path: worktree_path, + feature_flags, + config_patch, + test_db_path: worktree_test_db_path, + git_mode, + core_hooks_dir, + base_path: Some(base_path), + base_test_db_path: Some(base_test_db_path), + } + } + + fn ensure_head_commit(&self) { + if self.git_og(&["rev-parse", "--verify", "HEAD"]).is_err() { + self.git_og(&["commit", "--allow-empty", "-m", "initial"]) + .expect("failed to create initial commit for worktree"); + } + } + /// Create a standalone bare repository for testing pub fn new_bare() -> Self { let mut rng = rand::thread_rng(); @@ -144,6 +206,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.initialize_core_hooks_if_needed(); repo @@ -183,6 +247,8 @@ impl TestRepo { test_db_path: upstream_test_db_path, git_mode: test_git_mode(), core_hooks_dir: upstream_core_hooks_dir, + base_path: None, + base_test_db_path: None, }; // Clone upstream to create mirror with origin configured @@ -227,6 +293,8 @@ impl TestRepo { test_db_path: mirror_test_db_path, git_mode: test_git_mode(), core_hooks_dir: mirror_core_hooks_dir, + base_path: None, + base_test_db_path: None, }; upstream.apply_default_config_patch(); @@ -258,6 +326,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.apply_default_config_patch(); repo.initialize_core_hooks_if_needed(); @@ -268,6 +338,51 @@ impl TestRepo { self.feature_flags = feature_flags; } + pub fn add_worktree(&self, name: &str) -> WorktreeRepo { + self.add_worktree_with_branch(name, None) + } + + pub fn add_worktree_with_branch(&self, name: &str, branch: Option<&str>) -> WorktreeRepo { + self.ensure_head_commit(); + + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let worktree_path = std::env::temp_dir().join(format!("{}-worktree-{}", n, name)); + + let branch_name = branch + .map(|b| b.to_string()) + .unwrap_or_else(|| format!("worktree-{}-{}", name, n)); + + let branch_ref = format!("refs/heads/{}", branch_name); + let branch_exists = self.git_og(&["show-ref", "--verify", &branch_ref]).is_ok(); + + let mut args = vec!["worktree", "add"]; + if branch_exists { + args.push(worktree_path.to_str().expect("valid path")); + args.push(branch_name.as_str()); + } else { + args.push("-b"); + args.push(branch_name.as_str()); + args.push(worktree_path.to_str().expect("valid path")); + } + + self.git_og(&args).expect("failed to add worktree"); + + let db_n: u64 = rng.gen_range(0..10000000000); + let test_db_path = std::env::temp_dir().join(format!("{}-db", db_n)); + + WorktreeRepo { + base_path: self.path.clone(), + path: worktree_path, + worktree_name: branch_name, + feature_flags: self.feature_flags.clone(), + config_patch: self.config_patch.clone(), + test_db_path, + git_mode: self.git_mode, + core_hooks_dir: self.core_hooks_dir.clone(), + } + } + /// Patch the git-ai config for this test repo /// Allows overriding specific config properties like ignore_prompts, telemetry settings, etc. /// The patch is applied via environment variable when running git-ai commands @@ -752,8 +867,363 @@ impl TestRepo { } } +#[derive(Clone, Debug)] +pub struct WorktreeRepo { + base_path: PathBuf, + path: PathBuf, + worktree_name: String, + pub feature_flags: FeatureFlags, + pub(crate) config_patch: Option, + test_db_path: PathBuf, + git_mode: TestGitMode, + core_hooks_dir: Option, +} + +impl WorktreeRepo { + pub fn path(&self) -> &PathBuf { + &self.path + } + + pub fn base_path(&self) -> &PathBuf { + &self.base_path + } + + pub fn worktree_name(&self) -> &str { + &self.worktree_name + } + + pub fn canonical_path(&self) -> PathBuf { + self.path + .canonicalize() + .expect("failed to canonicalize worktree path") + } + + pub fn test_db_path(&self) -> &PathBuf { + &self.test_db_path + } + + pub fn current_branch(&self) -> String { + self.git(&["branch", "--show-current"]) + .unwrap() + .trim() + .to_string() + } + + pub fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai_with_env(args, &[]) + } + + pub fn git(&self, args: &[&str]) -> Result { + self.git_with_env(args, &[], None) + } + + pub fn git_from_working_dir( + &self, + working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + self.git_with_env(args, &[], Some(working_dir)) + } + + pub fn git_ai_from_working_dir( + &self, + working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + let binary_path = get_binary_path(); + + let mut command = Command::new(binary_path); + + let absolute_working_dir = working_dir.canonicalize().map_err(|e| { + format!( + "Failed to canonicalize working directory {}: {}", + working_dir.display(), + e + ) + })?; + command.args(args).current_dir(&absolute_working_dir); + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git-ai command: {:?}", args)); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else { + Err(stderr) + } + } + + pub fn git_og(&self, args: &[&str]) -> Result { + let mut full_args: Vec = + vec!["-C".to_string(), self.path.to_str().unwrap().to_string()]; + full_args.extend(args.iter().map(|s| s.to_string())); + + GitAiRepository::exec_git(&full_args) + .map(|output| { + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + } + }) + .map_err(|e| e.to_string()) + } + + fn run_git_command( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&Path>, + force_c_flag: bool, + ) -> Result { + let mut command = self.build_git_command(args, envs, working_dir, force_c_flag)?; + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git command: {:?}", args)); + Self::command_output_to_result(output) + } + + fn build_git_command( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&Path>, + force_c_flag: bool, + ) -> Result { + let mut command = if self.git_mode.uses_wrapper() { + Command::new(get_binary_path()) + } else { + Command::new(git_ai::config::Config::get().git_cmd()) + }; + + let mut full_args: Vec = Vec::new(); + + if self.git_mode.uses_core_hooks() { + let hooks_dir = self.core_hooks_dir.as_ref().ok_or_else(|| { + "core hooks mode is enabled but no hooks dir is configured".to_string() + })?; + full_args.push("-c".to_string()); + full_args.push(format!("core.hooksPath={}", hooks_dir.display())); + } + + if force_c_flag || working_dir.is_none() { + full_args.push("-C".to_string()); + full_args.push(self.path.to_str().unwrap().to_string()); + } + + full_args.extend(args.iter().map(|arg| arg.to_string())); + command.args(&full_args); + + if let Some(working_dir_path) = working_dir { + let absolute_working_dir = working_dir_path.canonicalize().map_err(|e| { + format!( + "Failed to canonicalize working directory {}: {}", + working_dir_path.display(), + e + ) + })?; + command.current_dir(absolute_working_dir); + } + + if self.git_mode.uses_wrapper() { + command.env("GIT_AI", "git"); + } + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + for (key, value) in envs { + command.env(key, value); + } + + Ok(command) + } + + fn command_output_to_result(output: Output) -> Result { + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else if stderr.is_empty() { + Err(stdout) + } else { + Err(stderr) + } + } + + pub fn git_with_env( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + self.run_git_command(args, envs, working_dir, false) + } + + pub fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + let binary_path = get_binary_path(); + + let mut command = Command::new(binary_path); + command.args(args).current_dir(&self.path); + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + + for (key, value) in envs { + command.env(key, value); + } + + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git-ai command: {:?}", args)); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else { + Err(stderr) + } + } + + pub fn commit(&self, message: &str) -> Result { + self.commit_with_env(message, &[], None) + } + + pub fn stage_all_and_commit(&self, message: &str) -> Result { + self.git(&["add", "-A"]).expect("add --all should succeed"); + self.commit(message) + } + + pub fn commit_with_env( + &self, + message: &str, + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + let output = self.git_with_env(&["commit", "-m", message], envs, working_dir); + + match output { + Ok(combined) => { + let repo = GitAiRepository::find_repository_in_path(self.path.to_str().unwrap()) + .map_err(|e| format!("Failed to find repository: {}", e))?; + + let head_commit = repo + .head() + .map_err(|e| format!("Failed to get HEAD: {}", e))? + .target() + .map_err(|e| format!("Failed to get HEAD target: {}", e))?; + + let authorship_log = + match git_ai::git::refs::show_authorship_note(&repo, &head_commit) { + Some(content) => AuthorshipLog::deserialize_from_string(&content) + .map_err(|e| format!("Failed to parse authorship log: {}", e))?, + None => { + return Err("No authorship log found for the new commit".to_string()); + } + }; + + Ok(NewCommit { + commit_sha: head_commit, + authorship_log, + stdout: combined, + }) + } + Err(e) => Err(e), + } + } +} + +impl Drop for WorktreeRepo { + fn drop(&mut self) { + let _ = Command::new(git_ai::config::Config::get().git_cmd()) + .args([ + "-C", + self.base_path.to_str().unwrap(), + "worktree", + "remove", + "--force", + self.path.to_str().unwrap(), + ]) + .output(); + let _ = fs::remove_dir_all(self.path.clone()); + let _ = fs::remove_dir_all(self.test_db_path.clone()); + } +} + impl Drop for TestRepo { fn drop(&mut self) { + if let Some(base_path) = &self.base_path { + let _ = Command::new(git_ai::config::Config::get().git_cmd()) + .args([ + "-C", + base_path.to_str().unwrap(), + "worktree", + "remove", + "--force", + self.path.to_str().unwrap(), + ]) + .output(); + let _ = fs::remove_dir_all(self.path.clone()); + let _ = fs::remove_dir_all(self.test_db_path.clone()); + let _ = fs::remove_dir_all(base_path.clone()); + if let Some(base_test_db_path) = &self.base_test_db_path { + let _ = fs::remove_dir_all(base_test_db_path.clone()); + } + if let Some(core_hooks_dir) = &self.core_hooks_dir { + let _ = fs::remove_dir_all(core_hooks_dir); + } + return; + } + fs::remove_dir_all(self.path.clone()).expect("failed to remove test repo"); // Also clean up the test database directory (may not exist if no DB operations were done) let _ = fs::remove_dir_all(self.test_db_path.clone()); @@ -782,6 +1252,32 @@ impl NewCommit { static COMPILED_BINARY: OnceLock = OnceLock::new(); static DEFAULT_BRANCH_NAME: OnceLock = OnceLock::new(); +thread_local! { + static WORKTREE_MODE: Cell = const { Cell::new(false) }; +} + +pub fn with_worktree_mode(f: F) -> R +where + F: FnOnce() -> R, +{ + WORKTREE_MODE.with(|flag| { + let previous = flag.replace(true); + struct Reset<'a> { + flag: &'a Cell, + previous: bool, + } + impl<'a> Drop for Reset<'a> { + fn drop(&mut self) { + self.flag.set(self.previous); + } + } + let _reset = Reset { flag, previous }; + + let mut settings = Settings::clone_current(); + settings.set_snapshot_suffix("worktree"); + settings.bind(f) + }) +} fn get_default_branch_name() -> String { let output = Command::new("git") diff --git a/tests/reset.rs b/tests/reset.rs index e41f3ed10..b8931239f 100644 --- a/tests/reset.rs +++ b/tests/reset.rs @@ -565,3 +565,20 @@ fn test_reset_soft_detached_head_preserves_ai_authorship() { .unwrap(); file.assert_lines_and_blame(vec!["base line".human(), "ai line".ai()]); } + +worktree_test_wrappers! { + test_reset_hard_deletes_working_log, + test_reset_soft_reconstructs_working_log, + test_reset_mixed_reconstructs_working_log, + test_reset_to_same_commit_is_noop, + test_reset_multiple_commits, + test_reset_preserves_uncommitted_changes, + test_reset_with_pathspec, + test_reset_forward_is_noop, + test_reset_mixed_ai_human_changes, + test_reset_merge, + test_reset_with_new_files, + test_reset_with_deleted_files, + test_reset_mixed_pathspec_preserves_ai_authorship, + test_reset_mixed_pathspec_multiple_commits, +} diff --git a/tests/show_prompt.rs b/tests/show_prompt.rs index 214940b21..1bb18e609 100644 --- a/tests/show_prompt.rs +++ b/tests/show_prompt.rs @@ -167,3 +167,18 @@ fn show_prompt_with_offset_skips_occurrences() { err ); } + +worktree_test_wrappers! { + parse_args_requires_prompt_id, + parse_args_parses_basic_id, + parse_args_parses_commit_flag, + parse_args_parses_offset_flag, + parse_args_rejects_commit_and_offset_together, + parse_args_rejects_multiple_prompt_ids, + parse_args_requires_commit_value, + parse_args_requires_offset_value, + parse_args_rejects_invalid_offset, + parse_args_rejects_unknown_flag, + show_prompt_returns_latest_prompt_by_default, + show_prompt_with_offset_skips_occurrences, +} diff --git a/tests/simple_additions.rs b/tests/simple_additions.rs index 9d37d2be3..2ef05c597 100644 --- a/tests/simple_additions.rs +++ b/tests/simple_additions.rs @@ -1271,3 +1271,30 @@ fn test_ai_edits_file_with_spaces_in_filename() { "Line 3".human(), ]); } + +worktree_test_wrappers! { + test_simple_additions_empty_repo, + test_simple_additions_with_base_commit, + test_simple_additions_on_top_of_ai_contributions, + test_simple_additions_new_file_not_git_added, + test_ai_human_interleaved_line_attribution, + test_simple_ai_then_human_deletion, + test_multiple_ai_checkpoints_with_human_deletions, + test_complex_mixed_additions_and_deletions, + test_ai_adds_lines_multiple_commits, + test_partial_staging_filters_unstaged_lines, + test_human_stages_some_ai_lines, + test_multiple_ai_sessions_with_partial_staging, + test_ai_adds_then_commits_in_batches, + test_ai_edits_with_partial_staging, + test_unstaged_changes_not_committed, + test_unstaged_ai_lines_saved_to_working_log, + test_new_file_partial_staging_two_commits, + test_mock_ai_with_pathspecs, + test_with_duplicate_lines, + test_ai_deletion_with_human_checkpoint_in_same_commit, + test_large_ai_readme_rewrite_with_no_data_bug, + test_deletion_within_a_single_line_attribution, + test_deletion_of_multiple_lines_by_ai, + test_ai_edits_file_with_spaces_in_filename, +} diff --git a/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap b/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap new file mode 100644 index 000000000..2d5832dc8 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 284 +expression: normalized +--- +"COMMIT_SHA (tool1 TIMESTAMP 1) line 1\nCOMMIT_SHA (tool1 TIMESTAMP 2) line 2\nCOMMIT_SHA (tool1 TIMESTAMP 3) line 3\nCOMMIT_SHA (mock_ai TIMESTAMP 4) line 4\nCOMMIT_SHA (tool2 TIMESTAMP 5) line 5\nCOMMIT_SHA (mock_ai TIMESTAMP 6) line 6\nCOMMIT_SHA (mock_ai TIMESTAMP 7) line 7\n" diff --git a/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap b/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap new file mode 100644 index 000000000..41e4453a2 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 455 +expression: normalized_b +--- +"COMMIT_SHA (subsequent-tool TIMESTAMP 1) line 1 from INITIAL\nCOMMIT_SHA (subsequent-tool TIMESTAMP 2) line 2 from INITIAL\n" diff --git a/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap b/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap new file mode 100644 index 000000000..539f359b3 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 121 +expression: normalized +--- +"COMMIT_SHA (test-tool TIMESTAMP 1) line 1 from INITIAL\nCOMMIT_SHA (test-tool TIMESTAMP 2) line 2 from INITIAL\nCOMMIT_SHA (test-tool TIMESTAMP 3) line 3 from INITIAL\n" diff --git a/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap b/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap new file mode 100644 index 000000000..3bf171c54 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 190 +expression: normalized +--- +"COMMIT_SHA (override-tool TIMESTAMP 1) line 1\nCOMMIT_SHA (override-tool TIMESTAMP 2) line 2\nCOMMIT_SHA (Test User TIMESTAMP 3) line 3 modified\n" diff --git a/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap b/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap new file mode 100644 index 000000000..83eda3c99 --- /dev/null +++ b/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 363 +expression: normalized_b +--- +"COMMIT_SHA (mock_ai TIMESTAMP 1) line 1 in B\nCOMMIT_SHA (mock_ai TIMESTAMP 2) line 2 in B\n" diff --git a/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap b/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap new file mode 100644 index 000000000..cd3db89a5 --- /dev/null +++ b/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 356 +expression: normalized_a +--- +"COMMIT_SHA (toolA TIMESTAMP 1) line 1 in A\nCOMMIT_SHA (toolA TIMESTAMP 2) line 2 in A\n" diff --git a/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap b/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap new file mode 100644 index 000000000..f2ae57eb5 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 280 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 0%\nšŸ¤– ai ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 100%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 30 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_all_human@worktree.snap b/tests/snapshots/stats__markdown_stats_all_human@worktree.snap new file mode 100644 index 000000000..220fcb02b --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_all_human@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 257 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 100%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 0%\n```\n\n
\nMore stats\n\n- 0.0 lines generated for every 1 accepted\n- 0 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap b/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap new file mode 100644 index 000000000..7bf4943f8 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 234 +expression: markdown +--- +"(no additions)\n" diff --git a/tests/snapshots/stats__markdown_stats_formatting@worktree.snap b/tests/snapshots/stats__markdown_stats_formatting@worktree.snap new file mode 100644 index 000000000..6dd771318 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_formatting@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 386 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 38%\nšŸ¤ mixed ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 15%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 46%\n```\n\n
\nMore stats\n\n- 1.7 lines generated for every 1 accepted\n- 25 seconds waiting for AI \n- Top model: cursor::claude-3.5-sonnet (6 accepted lines, 10 generated lines)\n\n
" diff --git a/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap b/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap new file mode 100644 index 000000000..2dd7fb618 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 350 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 2%\nšŸ¤– ai ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 98%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 10 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_mixed@worktree.snap b/tests/snapshots/stats__markdown_stats_mixed@worktree.snap new file mode 100644 index 000000000..67c1cead6 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_mixed@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 303 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 33%\nšŸ¤ mixed ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 17%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 50%\n```\n\n
\nMore stats\n\n- 1.7 lines generated for every 1 accepted\n- 45 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap b/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap new file mode 100644 index 000000000..eee58d61a --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 326 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 40%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 60%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 15 seconds waiting for AI \n\n
" diff --git a/tests/squash_merge.rs b/tests/squash_merge.rs index 7d6c31c73..e013e1808 100644 --- a/tests/squash_merge.rs +++ b/tests/squash_merge.rs @@ -293,3 +293,10 @@ fn test_prepare_working_log_squash_with_mixed_additions() { "Sum of accepted_lines across prompts should match ai_accepted stat" ); } + +worktree_test_wrappers! { + test_prepare_working_log_simple_squash, + test_prepare_working_log_squash_with_main_changes, + test_prepare_working_log_squash_multiple_sessions, + test_prepare_working_log_squash_with_mixed_additions, +} diff --git a/tests/stash_attribution.rs b/tests/stash_attribution.rs index 4f17f6223..27a500f3a 100644 --- a/tests/stash_attribution.rs +++ b/tests/stash_attribution.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -994,3 +995,23 @@ fn test_stash_apply_reset_apply_again() { "Expected AI prompts in authorship log after multiple apply/reset cycles" ); } + +worktree_test_wrappers! { + test_stash_pop_with_ai_attribution, + test_stash_apply_with_ai_attribution, + test_stash_apply_named_reference, + test_stash_multiple_files, + test_stash_with_existing_initial_attributions, + test_stash_pop_default_reference, + test_stash_pop_empty_repo, + test_stash_mixed_human_and_ai, + test_stash_push_with_pathspec_single_file, + test_stash_push_with_pathspec_directory, + test_stash_push_multiple_pathspecs, + test_stash_pop_with_conflict, + test_stash_mixed_staged_and_unstaged, + test_stash_pop_onto_head_with_ai_changes, + test_stash_pop_across_branches, + test_stash_pop_across_branches_with_conflict, + test_stash_apply_reset_apply_again, +} diff --git a/tests/stats.rs b/tests/stats.rs index 6dc8c24c7..52049bb22 100644 --- a/tests/stats.rs +++ b/tests/stats.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::stats::CommitStats; use insta::assert_debug_snapshot; @@ -190,6 +191,11 @@ fn test_stats_cli_range() { #[test] fn test_stats_cli_empty_tree_range() { let repo = TestRepo::new(); + let base_commit_count = repo + .git(&["rev-list", "--count", "HEAD"]) + .ok() + .and_then(|count| count.trim().parse::().ok()) + .unwrap_or(0); // First commit: AI line let mut file = repo.filename("history.txt"); @@ -218,9 +224,9 @@ fn test_stats_cli_empty_tree_range() { serde_json::from_str(&output).unwrap(); // Entire history from empty tree to HEAD: - // - 2 commits in range + // - base commits (e.g., worktree bootstrap) + 2 new commits in range // - 1 AI-added line, 1 human-added line in final diff - assert_eq!(stats.authorship_stats.total_commits, 2); + assert_eq!(stats.authorship_stats.total_commits, base_commit_count + 2); assert_eq!(stats.range_stats.git_diff_added_lines, 2); assert_eq!(stats.range_stats.ai_additions, 1); // human_additions is computed as git_diff_added_lines - ai_accepted @@ -620,3 +626,16 @@ fn test_post_commit_large_ignored_files_do_not_trigger_skip_warning() { assert_eq!(stats.ai_additions, 0); assert_eq!(stats.human_additions, 0); } + +worktree_test_wrappers! { + test_authorship_log_stats, + test_stats_cli_range, + test_stats_cli_empty_tree_range, + test_markdown_stats_deletion_only, + test_markdown_stats_all_human, + test_markdown_stats_all_ai, + test_markdown_stats_mixed, + test_markdown_stats_no_mixed, + test_markdown_stats_minimal_human, + test_markdown_stats_formatting, +} diff --git a/tests/worktrees.rs b/tests/worktrees.rs new file mode 100644 index 000000000..a61c35f8c --- /dev/null +++ b/tests/worktrees.rs @@ -0,0 +1,853 @@ +#[macro_use] +mod repos; + +use std::fs; +use std::path::PathBuf; +use std::process::Command; + +use git_ai::authorship::stats::CommitStats; +use git_ai::git::group_files_by_repository; +use rand::Rng; +use serde::Deserialize; +use serde_json::Value; + +use repos::test_repo::{NewCommit, TestRepo, WorktreeRepo, default_branchname}; + +trait RepoOps { + fn path(&self) -> &PathBuf; + fn git(&self, args: &[&str]) -> Result; + fn git_ai(&self, args: &[&str]) -> Result; + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result; + fn commit(&self, message: &str) -> Result; +} + +impl RepoOps for TestRepo { + fn path(&self) -> &PathBuf { + self.path() + } + fn git(&self, args: &[&str]) -> Result { + self.git(args) + } + fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai(args) + } + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + self.git_ai_with_env(args, envs) + } + fn commit(&self, message: &str) -> Result { + self.commit(message) + } +} + +impl RepoOps for WorktreeRepo { + fn path(&self) -> &PathBuf { + self.path() + } + fn git(&self, args: &[&str]) -> Result { + self.git(args) + } + fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai(args) + } + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + self.git_ai_with_env(args, envs) + } + fn commit(&self, message: &str) -> Result { + self.commit(message) + } +} + +#[derive(Debug, Deserialize)] +struct StatusJson { + stats: CommitStats, + checkpoints: Vec, +} + +#[derive(Debug, Deserialize)] +struct StatusCheckpoint { + additions: u32, + deletions: u32, + tool_model: String, + is_human: bool, +} + +fn write_file(repo: &impl RepoOps, relative: &str, contents: &str) -> PathBuf { + let path = repo.path().join(relative); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("failed to create parent directories"); + } + fs::write(&path, contents).expect("failed to write file"); + path +} + +fn parse_status_json(output: &str) -> StatusJson { + let json = extract_json_object(output); + serde_json::from_str(&json).expect("status output should be valid JSON") +} + +fn status_summary(repo: &impl RepoOps) -> (CommitStats, Vec<(u32, u32, bool, String)>) { + let output = repo + .git_ai(&["status", "--json"]) + .expect("git-ai status should succeed"); + let parsed = parse_status_json(&output); + let checkpoints = parsed + .checkpoints + .iter() + .map(|cp| { + ( + cp.additions, + cp.deletions, + cp.is_human, + cp.tool_model.clone(), + ) + }) + .collect::>(); + (parsed.stats, checkpoints) +} + +fn status_summary_with_env( + repo: &impl RepoOps, + envs: &[(&str, &str)], +) -> (CommitStats, Vec<(u32, u32, bool, String)>) { + let output = repo + .git_ai_with_env(&["status", "--json"], envs) + .expect("git-ai status should succeed"); + let parsed = parse_status_json(&output); + let checkpoints = parsed + .checkpoints + .iter() + .map(|cp| { + ( + cp.additions, + cp.deletions, + cp.is_human, + cp.tool_model.clone(), + ) + }) + .collect::>(); + (parsed.stats, checkpoints) +} + +fn stats_key_fields(stats: &CommitStats) -> (u32, u32, u32, u32, u32, u32) { + ( + stats.human_additions, + stats.mixed_additions, + stats.ai_additions, + stats.ai_accepted, + stats.git_diff_added_lines, + stats.git_diff_deleted_lines, + ) +} + +fn worktree_git_dir(worktree: &WorktreeRepo) -> PathBuf { + let output = worktree + .git(&["rev-parse", "--git-dir"]) + .expect("rev-parse --git-dir should succeed"); + let git_dir = PathBuf::from(output.trim()); + if git_dir.is_relative() { + worktree.path().join(git_dir) + } else { + git_dir + } +} + +fn worktree_commondir(worktree: &WorktreeRepo) -> PathBuf { + let git_dir = worktree_git_dir(worktree); + let commondir_path = git_dir.join("commondir"); + let commondir_contents = fs::read_to_string(&commondir_path).expect("commondir should exist"); + let commondir = PathBuf::from(commondir_contents.trim()); + let resolved = if commondir.is_absolute() { + commondir + } else { + git_dir.join(commondir) + }; + resolved.canonicalize().unwrap_or(resolved) +} + +fn extract_json_object(output: &str) -> String { + let start = output.find('{').unwrap_or(0); + let end = output.rfind('}').unwrap_or(output.len().saturating_sub(1)); + output[start..=end].to_string() +} + +fn normalize_diff(output: &str) -> String { + output + .lines() + .filter(|line| !line.starts_with("index ")) + .collect::>() + .join("\n") +} + +fn parse_blame(output: &str) -> Vec<(String, String)> { + output + .lines() + .filter(|line| !line.trim().is_empty()) + .map(|line| { + if let Some(start_paren) = line.find('(') { + if let Some(end_paren) = line.find(')') { + let author_section = &line[start_paren + 1..end_paren]; + let content = line[end_paren + 1..].trim().to_string(); + + let parts: Vec<&str> = author_section.trim().split_whitespace().collect(); + let mut author_parts = Vec::new(); + for part in parts { + if part.chars().next().unwrap_or('a').is_ascii_digit() { + break; + } + author_parts.push(part); + } + let author = author_parts.join(" "); + return (author, content); + } + } + ("unknown".to_string(), line.to_string()) + }) + .collect() +} + +fn temp_dir_with_prefix(prefix: &str) -> PathBuf { + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let path = std::env::temp_dir().join(format!("{}-{}", prefix, n)); + fs::create_dir_all(&path).expect("failed to create temp dir"); + path +} + +fn checkpoint_and_commit( + repo: &impl RepoOps, + relative: &str, + contents: &str, + message: &str, + ai: bool, +) -> NewCommit { + write_file(repo, relative, contents); + let checkpoint_args = if ai { + vec!["checkpoint", "mock_ai"] + } else { + vec!["checkpoint"] + }; + repo.git_ai(&checkpoint_args) + .expect("checkpoint should succeed"); + repo.git(&["add", "-A"]).expect("add should succeed"); + repo.commit(message).expect("commit should succeed") +} + +#[test] +fn test_worktree_checkpoint_status_parity() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + write_file(&base_repo, "file.txt", "one\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (base_stats, base_checkpoints) = status_summary(&base_repo); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("status"); + write_file(&worktree, "file.txt", "one\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (wt_stats, wt_checkpoints) = status_summary(&worktree); + + assert_eq!(stats_key_fields(&base_stats), stats_key_fields(&wt_stats)); + assert_eq!(base_checkpoints, wt_checkpoints); +} + +#[test] +fn test_worktree_diff_parity() { + let base_repo = TestRepo::new(); + let base_commit = + checkpoint_and_commit(&base_repo, "file.txt", "line1\nline2\n", "base", false); + let base_diff = base_repo + .git_ai(&["diff", &base_commit.commit_sha]) + .unwrap(); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("diff"); + let wt_commit = + checkpoint_and_commit(&worktree, "file.txt", "line1\nline2\n", "worktree", false); + let wt_diff = worktree.git_ai(&["diff", &wt_commit.commit_sha]).unwrap(); + + assert_eq!(normalize_diff(&base_diff), normalize_diff(&wt_diff)); +} + +#[test] +fn test_worktree_commit_authorship_parity() { + let base_repo = TestRepo::new(); + let base_commit = checkpoint_and_commit(&base_repo, "file.txt", "line1\n", "base", true); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("authorship"); + let wt_commit = checkpoint_and_commit(&worktree, "file.txt", "line1\n", "worktree", true); + + let base_attestations = base_commit.authorship_log.attestations.len(); + let wt_attestations = wt_commit.authorship_log.attestations.len(); + assert_eq!(base_attestations, wt_attestations); + + let base_entries: usize = base_commit + .authorship_log + .attestations + .iter() + .map(|a| a.entries.len()) + .sum(); + let wt_entries: usize = wt_commit + .authorship_log + .attestations + .iter() + .map(|a| a.entries.len()) + .sum(); + assert_eq!(base_entries, wt_entries); +} + +#[test] +fn test_worktree_blame_parity() { + let base_repo = TestRepo::new(); + checkpoint_and_commit(&base_repo, "file.txt", "human\n", "base", false); + checkpoint_and_commit(&base_repo, "file.txt", "human\nai\n", "base-ai", true); + let base_blame = base_repo.git_ai(&["blame", "file.txt"]).unwrap(); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("blame"); + checkpoint_and_commit(&worktree, "file.txt", "human\n", "wt", false); + checkpoint_and_commit(&worktree, "file.txt", "human\nai\n", "wt-ai", true); + let wt_blame = worktree.git_ai(&["blame", "file.txt"]).unwrap(); + + let base_parsed = parse_blame(&base_blame); + let wt_parsed = parse_blame(&wt_blame); + assert_eq!(base_parsed, wt_parsed); +} + +#[test] +fn test_worktree_subdir_repository_discovery() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("subdir"); + write_file(&worktree, "nested/file.txt", "content\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let subdir = worktree.path().join("nested"); + let output = worktree + .git_ai_from_working_dir(&subdir, &["status", "--json"]) + .expect("status from subdir should succeed"); + let parsed = parse_status_json(&output); + assert!(!parsed.checkpoints.is_empty()); +} + +#[test] +fn test_group_files_by_repository_with_worktree() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("group"); + let file_path = write_file(&worktree, "file.txt", "content\n"); + + let (repos, orphans) = + group_files_by_repository(&[file_path.to_string_lossy().to_string()], None); + + assert!(orphans.is_empty()); + assert_eq!(repos.len(), 1); + let (found_repo, files) = repos.values().next().unwrap(); + assert_eq!(files.len(), 1); + let workdir = found_repo.workdir().expect("workdir should exist"); + assert_eq!(workdir, worktree.canonical_path()); +} + +#[test] +fn test_worktree_branch_switch_and_merge() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("merge"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + let base_branch = worktree.current_branch(); + + worktree + .git(&["switch", "-c", "feature-merge"]) + .expect("switch to feature should succeed"); + checkpoint_and_commit(&worktree, "file.txt", "base\nfeature\n", "feature", false); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + worktree + .git(&["merge", "feature-merge"]) + .expect("merge should succeed"); + + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("feature")); +} + +#[test] +fn test_worktree_rebase_and_cherry_pick() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("rebase"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + let base_branch = worktree.current_branch(); + + worktree + .git(&["switch", "-c", "feature-rebase"]) + .expect("switch to feature should succeed"); + checkpoint_and_commit(&worktree, "feature.txt", "feature\n", "feature", false); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + checkpoint_and_commit(&worktree, "main.txt", "main\n", "main", false); + + worktree + .git(&["switch", "feature-rebase"]) + .expect("switch to feature should succeed"); + worktree + .git(&["rebase", &base_branch]) + .expect("rebase should succeed"); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + let cherry_sha = worktree + .git(&["rev-parse", "feature-rebase"]) + .unwrap() + .trim() + .to_string(); + worktree + .git(&["cherry-pick", &cherry_sha]) + .expect("cherry-pick should succeed"); + + let feature_contents = fs::read_to_string(worktree.path().join("feature.txt")).unwrap(); + let main_contents = fs::read_to_string(worktree.path().join("main.txt")).unwrap(); + assert!(feature_contents.contains("feature")); + assert!(main_contents.contains("main")); +} + +#[test] +fn test_worktree_stash_and_reset() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("stash"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + write_file(&worktree, "file.txt", "base\nchange\n"); + + worktree.git(&["stash"]).expect("stash should succeed"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert_eq!(contents, "base\n"); + + worktree.git(&["stash", "pop"]).expect("stash pop"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("change")); + + worktree + .git(&["reset", "--hard", "HEAD"]) + .expect("reset should succeed"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert_eq!(contents, "base\n"); +} + +#[test] +fn test_worktree_amend() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("amend"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + write_file(&worktree, "file.txt", "base\namended\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + worktree.git(&["add", "-A"]).unwrap(); + worktree + .git(&["commit", "--amend", "--no-edit"]) + .expect("amend should succeed"); + + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("amended")); +} + +#[test] +fn test_worktree_stats_json() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("stats"); + checkpoint_and_commit(&worktree, "file.txt", "line1\nline2\n", "stats", true); + + let output = worktree + .git_ai(&["stats", "--json"]) + .expect("stats should succeed"); + let json = extract_json_object(&output); + let parsed: CommitStats = serde_json::from_str(&json).expect("stats JSON"); + assert!(parsed.git_diff_added_lines > 0); +} + +#[test] +fn test_worktree_notes_visible_from_base_repo() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("notes"); + let commit = checkpoint_and_commit(&worktree, "file.txt", "line1\n", "note", true); + + let base_repo = git_ai::git::find_repository_in_path(repo.path().to_str().unwrap()) + .expect("find repository"); + let note = git_ai::git::refs::show_authorship_note(&base_repo, &commit.commit_sha); + assert!(note.is_some()); +} + +#[test] +fn test_worktree_multiple_worktrees_diverge() { + let repo = TestRepo::new(); + let wt_one = repo.add_worktree("one"); + let wt_two = repo.add_worktree("two"); + + checkpoint_and_commit(&wt_one, "file.txt", "one\n", "one", false); + checkpoint_and_commit(&wt_two, "file.txt", "two\n", "two", false); + + let log_one = wt_one.git(&["log", "-1", "--pretty=%s"]).unwrap(); + let log_two = wt_two.git(&["log", "-1", "--pretty=%s"]).unwrap(); + + assert!(log_one.trim().contains("one")); + assert!(log_two.trim().contains("two")); +} + +#[test] +fn test_worktree_default_branch_name_is_respected() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("branchname"); + + let default_branch = default_branchname(); + let current_branch = worktree.current_branch(); + + assert!( + current_branch.starts_with("worktree-") + || current_branch == default_branch + || current_branch == "HEAD", + "unexpected worktree branch: {} (default: {})", + current_branch, + default_branch + ); +} + +#[test] +fn test_worktree_config_resolves_path_with_temp_home() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("config"); + + let remote_path = temp_dir_with_prefix("git-ai-remote"); + let init_output = Command::new("git") + .args(["init", "--bare", remote_path.to_str().unwrap()]) + .output() + .expect("git init --bare"); + assert!(init_output.status.success()); + + worktree + .git(&["remote", "add", "origin", remote_path.to_str().unwrap()]) + .expect("remote add should succeed"); + + let temp_home = temp_dir_with_prefix("git-ai-home"); + let output = worktree.git_ai_with_env( + &["config", "set", "exclude_repositories", "."], + &[("HOME", temp_home.to_str().unwrap())], + ); + assert!(output.is_ok(), "config set should succeed: {:?}", output); + + let config_path = temp_home.join(".git-ai").join("config.json"); + let config_contents = fs::read_to_string(&config_path).expect("config.json should exist"); + let json: Value = serde_json::from_str(&config_contents).expect("valid json"); + let excludes = json + .get("exclude_repositories") + .and_then(|v| v.as_array()) + .cloned() + .unwrap_or_default(); + assert!( + excludes.iter().any(|v| { + v.as_str() + .map(|s| s.contains(remote_path.to_str().unwrap())) + .unwrap_or(false) + }), + "exclude_repositories should include remote url/path" + ); + + let _ = fs::remove_dir_all(temp_home); + let _ = fs::remove_dir_all(remote_path); +} + +#[test] +fn test_worktree_config_overrides_common_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-override"); + worktree + .git(&["config", "--worktree", "user.name", "Worktree"]) + .expect("set worktree user.name"); + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (_, base_checkpoints) = status_summary(&base_repo); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Worktree") + ); +} + +#[test] +fn test_worktree_config_falls_back_to_common_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-fallback"); + let _ = worktree.git(&["config", "--worktree", "--unset-all", "user.name"]); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!(wt_checkpoints.first().map(|cp| cp.3.as_str()), Some("Base")); +} + +#[test] +fn test_worktree_config_overrides_global_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-global"); + worktree + .git(&["config", "--worktree", "user.name", "Worktree"]) + .expect("set worktree user.name"); + + let temp_home = temp_dir_with_prefix("git-ai-home"); + let home_str = temp_home.to_str().expect("valid home path"); + base_repo + .git_with_env( + &["config", "--global", "user.name", "Global"], + &[("HOME", home_str)], + None, + ) + .expect("set global user.name"); + + let envs = [("HOME", home_str)]; + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai_with_env(&["checkpoint"], &envs).unwrap(); + let (_, base_checkpoints) = status_summary_with_env(&base_repo, &envs); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai_with_env(&["checkpoint"], &envs).unwrap(); + let (_, wt_checkpoints) = status_summary_with_env(&worktree, &envs); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Worktree") + ); + + let _ = fs::remove_dir_all(temp_home); +} + +#[test] +fn test_worktree_config_worktree_ignored_without_extension() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + + let worktree = base_repo.add_worktree("config-worktree-off"); + let wt_config_path = worktree_git_dir(&worktree).join("config.worktree"); + let config_contents = "[user]\n\tname = WorktreeFile\n"; + fs::write(&wt_config_path, config_contents).expect("write config.worktree"); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!(wt_checkpoints.first().map(|cp| cp.3.as_str()), Some("Base")); +} + +#[test] +fn test_worktree_include_if_onbranch_applies() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + + let include_dir = temp_dir_with_prefix("git-ai-onbranch"); + let include_path = include_dir.join("onbranch.config"); + fs::write(&include_path, "[user]\n\tname = OnBranch\n").expect("write onbranch include"); + + let include_key = "includeIf.onbranch:worktree-onbranch-*.path"; + base_repo + .git(&[ + "config", + "--add", + include_key, + include_path.to_str().expect("valid include path"), + ]) + .expect("set includeIf.onbranch"); + + let worktree = base_repo.add_worktree("onbranch"); + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (_, base_checkpoints) = status_summary(&base_repo); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("OnBranch") + ); + + let _ = fs::remove_dir_all(include_dir); +} + +#[test] +fn test_worktree_locked_allows_status() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = base_repo.add_worktree("locked"); + let worktree_path = worktree.path().to_str().expect("valid worktree path"); + + base_repo + .git_og(&["worktree", "lock", worktree_path]) + .expect("worktree lock should succeed"); + + let output = worktree.git_ai(&["status", "--json"]); + assert!(output.is_ok(), "status should work on locked worktree"); + + let _ = base_repo.git_og(&["worktree", "unlock", worktree_path]); +} + +#[test] +fn test_worktree_removed_does_not_break_base_status() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = base_repo.add_worktree("removed"); + let worktree_path = worktree.path().to_str().expect("valid worktree path"); + + base_repo + .git_og(&["worktree", "remove", "-f", worktree_path]) + .expect("worktree remove should succeed"); + + let output = base_repo.git_ai(&["status", "--json"]); + assert!(output.is_ok(), "base status should succeed after removal"); +} + +#[test] +fn test_worktree_detached_head_checkpoint() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("detached"); + worktree + .git(&["checkout", "--detach"]) + .expect("detach HEAD"); + + write_file(&worktree, "file.txt", "detached\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let output = worktree + .git_ai(&["status", "--json"]) + .expect("status should succeed"); + let parsed = parse_status_json(&output); + assert!(!parsed.checkpoints.is_empty()); +} + +#[test] +fn test_worktree_commondir_resolution_matches_git() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("commondir"); + + let expected_common = worktree_commondir(&worktree); + let found_repo = git_ai::git::find_repository_in_path(worktree.path().to_str().unwrap()) + .expect("find repository"); + let actual_common = found_repo + .common_git_dir() + .canonicalize() + .unwrap_or_else(|_| found_repo.common_git_dir().to_path_buf()); + + assert_eq!(expected_common, actual_common); +} + +#[test] +fn test_worktree_storage_lives_in_worktree_git_dir() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("storage"); + write_file(&worktree, "file.txt", "content\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let git_dir = worktree_git_dir(&worktree); + let found_repo = git_ai::git::find_repository_in_path(worktree.path().to_str().unwrap()) + .expect("find repository"); + let expected_prefix = git_dir.join("ai").join("working_logs"); + let actual = found_repo.storage.working_logs.clone(); + assert!( + actual.starts_with(&expected_prefix), + "working logs should live under worktree git dir (expected prefix {:?}, got {:?})", + expected_prefix, + actual + ); + + let head_sha = found_repo.head().expect("head").target().expect("head sha"); + let checkpoints_file = actual.join(head_sha).join("checkpoints.jsonl"); + assert!(checkpoints_file.exists(), "checkpoint log should exist"); +} + +#[test] +fn test_worktree_working_logs_are_isolated() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let wt_one = repo.add_worktree("isolation-one"); + let wt_two = repo.add_worktree("isolation-two"); + + write_file(&wt_one, "file.txt", "one\n"); + wt_one.git_ai(&["checkpoint"]).unwrap(); + + let output = wt_two + .git_ai(&["status", "--json"]) + .expect("status should succeed"); + let parsed = parse_status_json(&output); + assert!( + parsed.checkpoints.is_empty(), + "worktree checkpoints should not leak across worktrees" + ); +} From fbe69d7d2f53b7e88d49e43c39d828e79f967ab9 Mon Sep 17 00:00:00 2001 From: Sasha Varlamov Date: Sat, 14 Feb 2026 15:45:16 -0500 Subject: [PATCH 2/5] Fix Windows core-hooks home resolution and hook chaining --- src/commands/core_hooks.rs | 77 +++++++++++++++++++++++++++++++------- 1 file changed, 64 insertions(+), 13 deletions(-) diff --git a/src/commands/core_hooks.rs b/src/commands/core_hooks.rs index 74a201a22..322ecc33c 100644 --- a/src/commands/core_hooks.rs +++ b/src/commands/core_hooks.rs @@ -1339,9 +1339,35 @@ fn core_hook_state_path(repository: &Repository) -> PathBuf { repository.path().join("ai").join(CORE_HOOK_STATE_FILE) } +fn home_dir_from_env() -> Option { + for key in ["GIT_AI_HOME", "HOME", "USERPROFILE"] { + if let Some(value) = std::env::var_os(key) + && !value.is_empty() + { + return Some(PathBuf::from(value)); + } + } + + #[cfg(windows)] + { + if let (Some(home_drive), Some(home_path)) = + (std::env::var_os("HOMEDRIVE"), std::env::var_os("HOMEPATH")) + && !home_drive.is_empty() + && !home_path.is_empty() + { + let mut combined = PathBuf::from(home_drive); + combined.push(home_path); + return Some(combined); + } + } + + None +} + /// Returns the managed global core-hooks directory. pub fn managed_core_hooks_dir() -> Result { - let home = dirs::home_dir() + let home = home_dir_from_env() + .or_else(dirs::home_dir) .ok_or_else(|| GitAiError::Generic("Unable to determine home directory".to_string()))?; Ok(home.join(".git-ai").join("core-hooks")) } @@ -1380,24 +1406,49 @@ if [ -n "$previous_hooks_dir" ]; then fi fi +is_windows_shell=0 +case "$(uname -s 2>/dev/null)" in + MINGW*|MSYS*|CYGWIN*) is_windows_shell=1 ;; +esac + +run_chained_hook() {{ + hook_path="$1" + shift + + if [ "$hook_path" = "$0" ]; then + return 0 + fi + + if [ "$is_windows_shell" = "1" ]; then + if [ -f "$hook_path" ]; then + sh "$hook_path" "$@" + return $? + fi + return 0 + fi + + if [ -x "$hook_path" ]; then + "$hook_path" "$@" + return $? + fi + + return 0 +}} + if [ -n "$previous_hooks_dir" ]; then previous_hook="$previous_hooks_dir/{hook}" - if [ -x "$previous_hook" ] && [ "$previous_hook" != "$0" ]; then - "$previous_hook" "$@" - previous_status=$? - if [ $previous_status -ne 0 ]; then - exit $previous_status - fi + run_chained_hook "$previous_hook" "$@" + previous_status=$? + if [ $previous_status -ne 0 ]; then + exit $previous_status fi else repo_git_dir="${{GIT_DIR:-.git}}" repo_hook="$repo_git_dir/hooks/{hook}" - if [ -x "$repo_hook" ] && [ "$repo_hook" != "$0" ]; then - "$repo_hook" "$@" - repo_status=$? - if [ $repo_status -ne 0 ]; then - exit $repo_status - fi + run_chained_hook "$repo_hook" "$@" + repo_status=$? + if [ $repo_status -ne 0 ]; then + exit $repo_status fi fi From 07b97dfac12a5a2a606f8fe38d8604a5bf620d06 Mon Sep 17 00:00:00 2001 From: Sasha Varlamov Date: Sat, 14 Feb 2026 16:17:46 -0500 Subject: [PATCH 3/5] Harden core-hooks stash/reset handling and expand hook e2e coverage --- src/commands/core_hooks.rs | 241 ++++++++++++++++++++++---- tests/core_hooks_install_e2e.rs | 106 +++++++++++ tests/corehooks_wrapper_regression.rs | 56 +++++- 3 files changed, 371 insertions(+), 32 deletions(-) diff --git a/src/commands/core_hooks.rs b/src/commands/core_hooks.rs index 322ecc33c..8ff1710d9 100644 --- a/src/commands/core_hooks.rs +++ b/src/commands/core_hooks.rs @@ -48,6 +48,7 @@ struct CoreHookState { pending_pull_autostash: Option, pending_cherry_pick: Option, pending_stash_apply: Option, + pending_stash_ref_update: Option, pending_prepared_orig_head_ms: Option, pending_commit_base_head: Option, } @@ -57,6 +58,12 @@ struct PendingStashApplyState { created_at_ms: u128, } +#[derive(Debug, Clone, Serialize, Deserialize)] +struct PendingStashRefUpdateState { + created_at_ms: u128, + stash_count_before: usize, +} + #[derive(Debug, Clone, Serialize, Deserialize)] struct PendingAutostashState { authorship_log_json: String, @@ -503,8 +510,7 @@ fn handle_reference_transaction( let mut saw_orig_head_update = false; let mut moved_branch_ref: Option<(String, String)> = None; let mut moved_head_ref: Option<(String, String)> = None; - let mut created_stash_sha: Option = None; - let mut deleted_stash_sha: Option = None; + let mut stash_ref_update: Option<(String, String)> = None; let mut created_cherry_pick_head: Option = None; let mut deleted_cherry_pick_head: Option = None; let mut created_auto_merge_sha: Option = None; @@ -540,14 +546,8 @@ fn handle_reference_transaction( moved_head_ref = Some((old.to_string(), new.to_string())); } - if reference == "refs/stash" { - let (created, deleted) = stash_ref_transition(old, new); - if let Some(created) = created { - created_stash_sha = Some(created); - } - if let Some(deleted) = deleted { - deleted_stash_sha = Some(deleted); - } + if reference == "refs/stash" && old != new { + stash_ref_update = Some((old.to_string(), new.to_string())); } if reference == "CHERRY_PICK_HEAD" { @@ -579,6 +579,15 @@ fn handle_reference_transaction( } } + if stash_ref_update.is_some() + && let Some(stash_count_before) = stash_entry_count(repository) + { + state.pending_stash_ref_update = Some(PendingStashRefUpdateState { + created_at_ms: now_ms(), + stash_count_before, + }); + } + let has_recent_orig_head = state .pending_prepared_orig_head_ms .map(|ts| now_ms().saturating_sub(ts) <= STATE_EVENT_MAX_AGE_MS) @@ -598,6 +607,12 @@ fn handle_reference_transaction( state.pending_prepared_orig_head_ms = None; } + if let Some(pending) = state.pending_stash_ref_update.as_ref() + && now_ms().saturating_sub(pending.created_at_ms) > STATE_EVENT_MAX_AGE_MS + { + state.pending_stash_ref_update = None; + } + // Drop stale pull-autostash snapshots that never got restored. if let Some(pending) = state.pending_pull_autostash.as_ref() && now_ms().saturating_sub(pending.created_at_ms) > PENDING_PULL_AUTOSTASH_MAX_AGE_MS @@ -608,21 +623,55 @@ fn handle_reference_transaction( return Ok(()); } + let mut state = load_core_hook_state(repository)?; + let stash_count_before = state.pending_stash_ref_update.take().and_then(|pending| { + if now_ms().saturating_sub(pending.created_at_ms) <= STATE_EVENT_MAX_AGE_MS { + Some(pending.stash_count_before) + } else { + None + } + }); + save_core_hook_state(repository, &state)?; + + let stash_count_after = stash_entry_count(repository); + let reflog_action = reflog_action(); + let (created_stash_sha, deleted_stash_sha) = stash_ref_update + .as_ref() + .map(|(old, new)| { + resolve_stash_ref_transition( + old, + new, + stash_count_before, + stash_count_after, + reflog_action.as_deref(), + ) + }) + .unwrap_or((None, None)); + + let auto_merge_created = created_auto_merge_sha.is_some(); + for remote in remotes_to_sync { let _ = fetch_authorship_notes(repository, &remote); } + if auto_merge_created { + mark_pending_stash_apply(repository)?; + } + if let Some(stash_sha) = created_stash_sha { let _ = handle_stash_created(repository, &stash_sha); } if let Some(stash_sha) = deleted_stash_sha { - let _ = restore_stash_attributions_from_sha(repository, &stash_sha); - clear_pending_stash_apply(repository)?; - } - - if created_auto_merge_sha.is_some() { - mark_pending_stash_apply(repository)?; + if should_restore_deleted_stash(auto_merge_created, reflog_action.as_deref()) { + let _ = restore_stash_attributions_from_sha(repository, &stash_sha); + clear_pending_stash_apply(repository)?; + } else { + debug_log(&format!( + "Skipping stash attribution restore for deleted stash {} (likely stash drop)", + stash_sha + )); + } } if let Some(source_commit) = created_cherry_pick_head { @@ -776,18 +825,96 @@ fn is_zero_oid(oid: &str) -> bool { !oid.is_empty() && oid.chars().all(|c| c == '0') } -fn stash_ref_transition(old: &str, new: &str) -> (Option, Option) { +#[derive(Debug, Clone, PartialEq, Eq)] +enum StashRefTransition { + Created { + stash_sha: String, + }, + Deleted { + stash_sha: String, + }, + AmbiguousReplace { + old_stash_sha: String, + new_stash_sha: String, + }, + Unchanged, +} + +fn classify_stash_ref_transition(old: &str, new: &str) -> StashRefTransition { + if old == new { + return StashRefTransition::Unchanged; + } + if is_zero_oid(old) && !is_zero_oid(new) { - return (Some(new.to_string()), None); + return StashRefTransition::Created { + stash_sha: new.to_string(), + }; } - if !is_zero_oid(old) { - // `old -> zero` is stash drop/pop of last entry. - // `old -> non-zero` is stash pop when additional stash entries remain. - return (None, Some(old.to_string())); + if !is_zero_oid(old) && is_zero_oid(new) { + return StashRefTransition::Deleted { + stash_sha: old.to_string(), + }; + } + + if !is_zero_oid(old) && !is_zero_oid(new) { + return StashRefTransition::AmbiguousReplace { + old_stash_sha: old.to_string(), + new_stash_sha: new.to_string(), + }; + } + + StashRefTransition::Unchanged +} + +fn resolve_stash_ref_transition( + old: &str, + new: &str, + stash_count_before: Option, + stash_count_after: Option, + reflog_action: Option<&str>, +) -> (Option, Option) { + match classify_stash_ref_transition(old, new) { + StashRefTransition::Created { stash_sha } => (Some(stash_sha), None), + StashRefTransition::Deleted { stash_sha } => (None, Some(stash_sha)), + StashRefTransition::AmbiguousReplace { + old_stash_sha, + new_stash_sha, + } => match (stash_count_before, stash_count_after) { + (Some(before), Some(after)) if after > before => (Some(new_stash_sha), None), + (Some(before), Some(after)) if after < before => (None, Some(old_stash_sha)), + _ if reflog_action + .map(|action| action.starts_with("stash push") || action == "stash") + .unwrap_or(false) => + { + (Some(new_stash_sha), None) + } + _ if reflog_action + .map(|action| action.starts_with("stash pop") || action.starts_with("stash drop")) + .unwrap_or(false) => + { + (None, Some(old_stash_sha)) + } + _ => (None, None), + }, + StashRefTransition::Unchanged => (None, None), } +} - (None, None) +fn should_restore_deleted_stash(auto_merge_created: bool, reflog_action: Option<&str>) -> bool { + if auto_merge_created { + return true; + } + + reflog_action + .map(|action| action.starts_with("stash pop")) + .unwrap_or(false) +} + +fn stash_entry_count(repository: &Repository) -> Option { + list_stash_shas(repository) + .ok() + .map(|entries| entries.len()) } fn build_rebase_complete_event_from_start( @@ -1101,6 +1228,7 @@ fn detect_reset_mode_from_worktree(repository: &Repository) -> ResetKind { let has_unstaged_changes = entries.iter().any(|entry| { entry.unstaged != crate::git::status::StatusCode::Unmodified && entry.unstaged != crate::git::status::StatusCode::Ignored + && entry.unstaged != crate::git::status::StatusCode::Untracked }); if has_staged_changes { @@ -1396,6 +1524,7 @@ if [ -f "$previous_hooks_file" ]; then "~") previous_hooks_dir="$HOME" ;; "~/"*) previous_hooks_dir="$HOME/${{previous_hooks_dir#\~/}}" ;; esac + previous_hooks_dir=$(printf '%s' "$previous_hooks_dir" | tr '\\' '/') fi if [ -n "$previous_hooks_dir" ]; then @@ -1415,7 +1544,9 @@ run_chained_hook() {{ hook_path="$1" shift - if [ "$hook_path" = "$0" ]; then + hook_path_normalized=$(printf '%s' "$hook_path" | tr '\\' '/') + self_path_normalized=$(printf '%s' "$0" | tr '\\' '/') + if [ "$hook_path_normalized" = "$self_path_normalized" ]; then return 0 fi @@ -1485,7 +1616,7 @@ pub(crate) fn normalize_hook_binary_path(git_ai_binary: &Path) -> String { mod tests { use super::{ build_rebase_complete_event_from_start, find_repository_for_hook, is_zero_oid, - stash_ref_transition, + resolve_stash_ref_transition, should_restore_deleted_stash, }; use crate::git::rewrite_log::{RebaseStartEvent, RewriteLogEvent}; use crate::git::test_utils::TmpRepo; @@ -1516,14 +1647,62 @@ mod tests { } #[test] - fn stash_ref_transition_treats_nonzero_to_nonzero_as_popped_stash() { - let popped_sha = "1111111111111111111111111111111111111111"; - let next_sha = "2222222222222222222222222222222222222222"; + fn stash_ref_transition_nonzero_to_nonzero_with_depth_growth_is_creation() { + let old_sha = "1111111111111111111111111111111111111111"; + let new_sha = "2222222222222222222222222222222222222222"; + + let (created, deleted) = + resolve_stash_ref_transition(old_sha, new_sha, Some(1), Some(2), None); - let (created, deleted) = stash_ref_transition(popped_sha, next_sha); + assert_eq!(created.as_deref(), Some(new_sha)); + assert!(deleted.is_none()); + } + + #[test] + fn stash_ref_transition_nonzero_to_nonzero_with_depth_shrink_is_deletion() { + let old_sha = "1111111111111111111111111111111111111111"; + let new_sha = "2222222222222222222222222222222222222222"; - assert!(created.is_none(), "stash pop should not mark created stash"); - assert_eq!(deleted.as_deref(), Some(popped_sha)); + let (created, deleted) = + resolve_stash_ref_transition(old_sha, new_sha, Some(2), Some(1), None); + + assert!(created.is_none()); + assert_eq!(deleted.as_deref(), Some(old_sha)); + } + + #[test] + fn stash_ref_transition_nonzero_to_nonzero_without_depth_signal_is_ignored() { + let old_sha = "1111111111111111111111111111111111111111"; + let new_sha = "2222222222222222222222222222222222222222"; + + let (created, deleted) = resolve_stash_ref_transition(old_sha, new_sha, None, None, None); + + assert!(created.is_none()); + assert!(deleted.is_none()); + } + + #[test] + fn restore_deleted_stash_requires_pop_signal() { + assert!(should_restore_deleted_stash(true, None)); + assert!(should_restore_deleted_stash(false, Some("stash pop"))); + assert!(!should_restore_deleted_stash(false, Some("stash drop"))); + assert!(!should_restore_deleted_stash(false, None)); + } + + #[test] + fn stash_ref_transition_nonzero_to_nonzero_uses_reflog_fallback_when_depth_missing() { + let old_sha = "1111111111111111111111111111111111111111"; + let new_sha = "2222222222222222222222222222222222222222"; + + let (created, deleted) = + resolve_stash_ref_transition(old_sha, new_sha, None, None, Some("stash push")); + assert_eq!(created.as_deref(), Some(new_sha)); + assert!(deleted.is_none()); + + let (created, deleted) = + resolve_stash_ref_transition(old_sha, new_sha, None, None, Some("stash pop")); + assert!(created.is_none()); + assert_eq!(deleted.as_deref(), Some(old_sha)); } #[test] diff --git a/tests/core_hooks_install_e2e.rs b/tests/core_hooks_install_e2e.rs index 7750a78a0..3d68872e2 100644 --- a/tests/core_hooks_install_e2e.rs +++ b/tests/core_hooks_install_e2e.rs @@ -438,6 +438,41 @@ fn post_commit_chains_previous_global_hook() { ); } +#[test] +fn post_commit_chains_previous_global_hook_with_helper_script() { + let sandbox = HookConfigSandbox::new(); + let previous_hooks_dir = sandbox.temp.path().join("previous-hooks"); + let helper_dir = previous_hooks_dir.join("_"); + let marker = sandbox.temp.path().join("previous-helper-postcommit-ran"); + let marker_escaped = shell_escape(&marker); + + sandbox.write_hook( + &helper_dir, + "helper.sh", + &format!( + "#!/bin/sh\nprintf '%s\\n' previous-helper-ran >> \"{}\"\n", + marker_escaped + ), + ); + sandbox.write_hook( + &previous_hooks_dir, + "post-commit", + "#!/bin/sh\n. \"$(dirname -- \"$0\")/_/helper.sh\"\nexit 0\n", + ); + sandbox.set_global_hooks_path(&previous_hooks_dir); + sandbox.install_hooks(); + + sandbox.commit_file( + "previous-helper.txt", + "helper\n", + "run chained previous post-commit helper hook", + ); + assert!( + marker.exists(), + "previous global post-commit helper hook did not run" + ); +} + #[test] fn falls_back_to_repo_dot_git_hooks_when_no_previous_global_path() { let sandbox = HookConfigSandbox::new(); @@ -450,6 +485,54 @@ fn falls_back_to_repo_dot_git_hooks_when_no_previous_global_path() { assert!(marker.exists(), "repo .git/hooks/post-commit did not run"); } +#[test] +fn pre_push_chains_previous_global_hook_and_forwards_remote_args() { + let sandbox = HookConfigSandbox::new(); + let previous_hooks_dir = sandbox.temp.path().join("previous-hooks"); + let marker = sandbox.temp.path().join("previous-pre-push-args"); + let marker_escaped = shell_escape(&marker); + + sandbox.write_hook( + &previous_hooks_dir, + "pre-push", + &format!( + "#!/bin/sh\nprintf '%s|%s\\n' \"$1\" \"$2\" >> \"{}\"\nexit 0\n", + marker_escaped + ), + ); + sandbox.set_global_hooks_path(&previous_hooks_dir); + sandbox.install_hooks(); + + let remote_path = sandbox.temp.path().join("remote.git"); + sandbox.run_git_ok(&["init", "--bare", remote_path.to_str().expect("remote path")]); + sandbox.run_git_ok(&[ + "remote", + "add", + "origin", + remote_path.to_str().expect("remote path"), + ]); + + sandbox.run_git_ok(&["push", "-u", "origin", "HEAD"]); + + let marker_content = fs::read_to_string(&marker).expect("read pre-push marker"); + let line = marker_content + .lines() + .find(|line| !line.trim().is_empty()) + .expect("pre-push hook did not capture any args"); + let mut parts = line.splitn(2, '|'); + let remote_name = parts.next().unwrap_or_default(); + let remote_url = parts.next().unwrap_or_default(); + + assert_eq!( + remote_name, "origin", + "pre-push remote name was not forwarded" + ); + assert!( + !remote_url.trim().is_empty(), + "pre-push remote URL argument was not forwarded" + ); +} + #[test] fn does_not_run_repo_dot_git_hook_when_previous_global_path_exists() { let sandbox = HookConfigSandbox::new(); @@ -797,6 +880,29 @@ fn local_core_hooks_path_survives_global_install_uninstall_cycles() { ); } +#[test] +fn local_core_hooks_path_remains_functional_after_global_uninstall() { + let sandbox = HookConfigSandbox::new(); + let husky_dir = sandbox.repo.join(".husky"); + let marker = sandbox.temp.path().join("local-husky-after-uninstall-ran"); + + sandbox.write_hook(&husky_dir, "pre-commit", &marker_hook_script(&marker, 0)); + sandbox.set_local_hooks_path_raw(".husky"); + + sandbox.install_hooks(); + sandbox.uninstall_hooks(); + + sandbox.commit_file( + "local-after-uninstall.txt", + "local hook\n", + "local hook should still run after uninstall", + ); + assert!( + marker.exists(), + "repo-local hooks must keep working after global managed hooks are uninstalled" + ); +} + #[test] fn previous_hooks_self_reference_does_not_recurse_or_hang() { let sandbox = HookConfigSandbox::new(); diff --git a/tests/corehooks_wrapper_regression.rs b/tests/corehooks_wrapper_regression.rs index 661dcba83..2c65b3c50 100644 --- a/tests/corehooks_wrapper_regression.rs +++ b/tests/corehooks_wrapper_regression.rs @@ -1,9 +1,10 @@ mod repos; use git_ai::git::repository::find_repository_in_path; -use git_ai::git::rewrite_log::RewriteLogEvent; +use git_ai::git::rewrite_log::{ResetKind, RewriteLogEvent}; use repos::test_file::ExpectedLineExt; use repos::test_repo::TestRepo; +use std::fs; #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct RewriteEventCounts { @@ -52,6 +53,20 @@ fn rewrite_event_counts(repo: &TestRepo) -> RewriteEventCounts { } } +fn latest_reset_kind(repo: &TestRepo) -> Option { + let gitai_repo = + find_repository_in_path(repo.path().to_str().unwrap()).expect("failed to open repository"); + let events = gitai_repo + .storage + .read_rewrite_events() + .expect("failed to read rewrite events"); + + events.into_iter().find_map(|event| match event { + RewriteLogEvent::Reset { reset } => Some(reset.kind), + _ => None, + }) +} + #[test] fn test_commit_dry_run_does_not_record_rewrite_event() { let repo = TestRepo::new(); @@ -130,6 +145,45 @@ fn test_reset_rewrite_event_recorded_once() { ); } +#[test] +fn test_reset_hard_with_untracked_files_records_hard_mode() { + let repo = TestRepo::new(); + + let mut file = repo.filename("tracked.txt"); + file.set_contents(vec!["line 1".to_string()]); + let first_commit = repo + .stage_all_and_commit("first commit") + .expect("first commit should succeed"); + + file.set_contents(vec!["line 1".human(), "line 2 ai".ai()]); + repo.git_ai(&["checkpoint", "mock_ai"]) + .expect("checkpoint should succeed"); + repo.stage_all_and_commit("second commit") + .expect("second commit should succeed"); + + fs::write(repo.path().join("scratch.tmp"), "left alone\n").expect("write untracked file"); + let before_counts = rewrite_event_counts(&repo); + + repo.git(&["reset", "--hard", &first_commit.commit_sha]) + .expect("reset --hard should succeed"); + + let after_counts = rewrite_event_counts(&repo); + assert_eq!( + after_counts.reset, + before_counts.reset + 1, + "expected exactly one reset rewrite event for reset --hard", + ); + assert_eq!( + latest_reset_kind(&repo), + Some(ResetKind::Hard), + "untracked files must not cause reset --hard to be recorded as mixed", + ); + assert!( + repo.read_file("scratch.tmp").is_some(), + "untracked file should remain after reset --hard", + ); +} + #[test] fn test_commit_amend_rewrite_event_recorded_once() { let repo = TestRepo::new(); From 1b1d0234c6a0fa4892d003540b52a87583a47ceb Mon Sep 17 00:00:00 2001 From: Sasha Varlamov Date: Sat, 14 Feb 2026 14:56:59 -0500 Subject: [PATCH 4/5] Add comprehensive worktree support coverage across modes --- src/error.rs | 1 + src/git/repository.rs | 101 ++- src/git/status.rs | 4 + tests/ai_tab.rs | 10 + tests/amend.rs | 13 + tests/blame_flags.rs | 31 + tests/checkout_switch.rs | 15 + tests/checkpoint_size.rs | 4 + tests/cherry_pick.rs | 11 + tests/chinese_text_edits.rs | 7 + tests/ci_squash_rebase.rs | 8 + tests/claude_code.rs | 4 + tests/continue_cli.rs | 7 + tests/cursor.rs | 5 + tests/diff.rs | 20 + tests/gemini.rs | 8 + tests/github_copilot_integration.rs | 9 + tests/gix_config_tests.rs | 15 + tests/ignore_prompts.rs | 8 + tests/initial_attributions.rs | 9 + tests/internal_db_integration.rs | 13 + tests/merge_rebase.rs | 5 + tests/prompt_across_commit.rs | 4 + tests/prompt_hash_migration.rs | 6 + tests/pull_rebase_ff.rs | 9 + tests/realistic_complex_edits.rs | 14 + tests/rebase.rs | 24 + tests/repos/mod.rs | 120 +++ tests/repos/test_repo.rs | 500 +++++++++- tests/reset.rs | 17 + tests/show_prompt.rs | 15 + tests/simple_additions.rs | 27 + ...ons__initial_and_blame_merge@worktree.snap | 6 + ...ons_in_subsequent_checkpoint@worktree.snap | 6 + ...__initial_only_no_blame_data@worktree.snap | 6 + ...tions__initial_wins_overlaps@worktree.snap | 6 + ...ons__partial_file_coverage@worktree-2.snap | 6 + ...tions__partial_file_coverage@worktree.snap | 6 + ...stats__markdown_stats_all_ai@worktree.snap | 6 + ...ts__markdown_stats_all_human@worktree.snap | 6 + ...markdown_stats_deletion_only@worktree.snap | 6 + ...s__markdown_stats_formatting@worktree.snap | 6 + ...markdown_stats_minimal_human@worktree.snap | 6 + .../stats__markdown_stats_mixed@worktree.snap | 6 + ...ats__markdown_stats_no_mixed@worktree.snap | 6 + tests/squash_merge.rs | 7 + tests/stash_attribution.rs | 21 + tests/stats.rs | 23 +- tests/worktrees.rs | 853 ++++++++++++++++++ 49 files changed, 1977 insertions(+), 49 deletions(-) create mode 100644 tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap create mode 100644 tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap create mode 100644 tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap create mode 100644 tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_all_ai@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_all_human@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_formatting@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_mixed@worktree.snap create mode 100644 tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap create mode 100644 tests/worktrees.rs diff --git a/src/error.rs b/src/error.rs index fa621e5d8..8b06a10dd 100644 --- a/src/error.rs +++ b/src/error.rs @@ -12,6 +12,7 @@ pub enum GitAiError { args: Vec, }, /// Errors from Gix + #[allow(dead_code)] GixError(String), JsonError(serde_json::Error), Utf8Error(std::str::Utf8Error), diff --git a/src/git/repository.rs b/src/git/repository.rs index 58988234c..72484513b 100644 --- a/src/git/repository.rs +++ b/src/git/repository.rs @@ -1,5 +1,3 @@ -use regex::Regex; - use crate::authorship::authorship_log_serialization::AuthorshipLog; use crate::authorship::rebase_authorship::rewrite_authorship_if_needed; use crate::config; @@ -854,6 +852,8 @@ impl<'a> Iterator for References<'a> { pub struct Repository { global_args: Vec, git_dir: PathBuf, + #[allow(dead_code)] + common_git_dir: PathBuf, pub storage: RepoStorage, pub pre_command_base_commit: Option, pub pre_command_refname: Option, @@ -964,6 +964,12 @@ impl Repository { self.git_dir.as_path() } + /// Returns the common .git directory shared by all worktrees. + #[allow(dead_code)] + pub fn common_git_dir(&self) -> &Path { + self.common_git_dir.as_path() + } + // Get the path of the working directory for this repository. // If this repository is bare, then None is returned. pub fn workdir(&self) -> Result { @@ -1049,66 +1055,53 @@ impl Repository { Ok(remotes) } - /// Get the git config file for this repository and fallback to global config if not found. - fn get_git_config_file(&self) -> Result, GitAiError> { - match gix_config::File::from_git_dir(self.path().to_path_buf()) { - Ok(git_config_file) => Ok(git_config_file), - Err(e) => match gix_config::File::from_globals() { - Ok(system_config) => Ok(system_config), - Err(_) => Err(GitAiError::GixError(e.to_string())), - }, - } - } /// Get config value for a given key as a String. + /// + /// Uses the git CLI so worktree config, includeIf directives, and precedence + /// match native git behavior exactly. pub fn config_get_str(&self, key: &str) -> Result, GitAiError> { - match self.get_git_config_file() { - Ok(git_config_file) => Ok(git_config_file.string(key).map(|cow| cow.to_string())), + let mut args = self.global_args_for_exec(); + args.push("config".to_string()); + args.push("--get".to_string()); + args.push(key.to_string()); + + match exec_git(&args) { + Ok(output) => { + let value = String::from_utf8(output.stdout)?; + Ok(Some(value.trim_end_matches(['\r', '\n']).to_string())) + } + Err(GitAiError::GitCliError { code: Some(1), .. }) => Ok(None), Err(e) => Err(e), } } /// Get all config values matching a regex pattern. - /// - /// Regular expression matching is currently case-sensitive - /// and done against a canonicalized version of the key - /// in which section and variable names are lowercased, but subsection names are not. - /// /// Returns a HashMap of key -> value for all matching config entries. pub fn config_get_regexp( &self, pattern: &str, ) -> Result, GitAiError> { - match self.get_git_config_file() { - Ok(git_config_file) => { - let mut matches: HashMap = HashMap::new(); - - let re = Regex::new(pattern) - .map_err(|e| GitAiError::Generic(format!("Invalid regex pattern: {}", e)))?; - - // iterate over all sections - for section in git_config_file.sections() { - // Support subsections in the key - let section_name = section.header().name().to_string().to_lowercase(); - let subsection = section.header().subsection_name(); + let mut args = self.global_args_for_exec(); + args.push("config".to_string()); + args.push("--get-regexp".to_string()); + args.push(pattern.to_string()); - for value_name in section.body().value_names() { - let value_name_str = value_name.to_string().to_lowercase(); - let full_key = if let Some(sub) = subsection { - format!("{}.{}.{}", section_name, sub, value_name_str) - } else { - format!("{}.{}", section_name, value_name_str) - }; - - if re.is_match(&full_key) - && let Some(value) = - section.body().value(value_name).map(|c| c.to_string()) - { - matches.insert(full_key, value); - } + match exec_git(&args) { + Ok(output) => { + let stdout = String::from_utf8(output.stdout)?; + let mut matches: HashMap = HashMap::new(); + for line in stdout.lines().filter(|line| !line.is_empty()) { + if let Some(split_at) = line.find(char::is_whitespace) { + let key = line[..split_at].to_string(); + let value = line[split_at..].trim_start().to_string(); + matches.insert(key, value); + } else { + matches.insert(line.to_string(), String::new()); } } Ok(matches) } + Err(GitAiError::GitCliError { code: Some(1), .. }) => Ok(HashMap::new()), Err(e) => Err(e), } } @@ -2029,10 +2022,13 @@ pub fn find_repository(global_args: &[String]) -> Result )) })?; + let common_git_dir = resolve_common_git_dir(&git_dir); + Ok(Repository { global_args: normalized_global_args, storage: RepoStorage::for_repo_path(&git_dir, &workdir), git_dir, + common_git_dir, pre_command_base_commit: None, pre_command_refname: None, pre_reset_target_commit: None, @@ -2076,10 +2072,13 @@ pub fn from_bare_repository(git_dir: &Path) -> Result { let canonical_workdir = workdir.canonicalize().unwrap_or_else(|_| workdir.clone()); + let common_git_dir = resolve_common_git_dir(git_dir); + Ok(Repository { global_args, storage: RepoStorage::for_repo_path(git_dir, &workdir), git_dir: git_dir.to_path_buf(), + common_git_dir, pre_command_base_commit: None, pre_command_refname: None, pre_reset_target_commit: None, @@ -2088,6 +2087,18 @@ pub fn from_bare_repository(git_dir: &Path) -> Result { }) } +fn resolve_common_git_dir(git_dir: &Path) -> PathBuf { + let commondir_path = git_dir.join("commondir"); + if let Ok(contents) = std::fs::read_to_string(&commondir_path) { + let relative = contents.trim(); + if !relative.is_empty() { + let resolved = git_dir.join(relative); + return resolved.canonicalize().unwrap_or(resolved); + } + } + git_dir.to_path_buf() +} + pub fn find_repository_in_path(path: &str) -> Result { let global_args = vec!["-C".to_string(), path.to_string()]; find_repository(&global_args) diff --git a/src/git/status.rs b/src/git/status.rs index 3ee2c0160..98fdb669a 100644 --- a/src/git/status.rs +++ b/src/git/status.rs @@ -141,6 +141,10 @@ impl Repository { if skip_untracked { args.push("--untracked-files=no".to_string()); + } else { + // Avoid directory-collapsed untracked entries like `nested/` so downstream + // text-file detection can reason about concrete files. + args.push("--untracked-files=all".to_string()); } // Add combined pathspecs as CLI args only if under the threshold; diff --git a/tests/ai_tab.rs b/tests/ai_tab.rs index c4b23ee9f..f248c168f 100644 --- a/tests/ai_tab.rs +++ b/tests/ai_tab.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -366,3 +367,12 @@ fn test_ai_tab_e2e_handles_dirty_files_map() { "}".ai(), ]); } + +worktree_test_wrappers! { + test_ai_tab_before_edit_checkpoint_includes_dirty_files, + test_ai_tab_after_edit_checkpoint_includes_dirty_files_and_paths, + test_ai_tab_rejects_invalid_hook_event, + test_ai_tab_requires_non_empty_tool_and_model, + test_ai_tab_e2e_marks_ai_lines, + test_ai_tab_e2e_handles_dirty_files_map, +} diff --git a/tests/amend.rs b/tests/amend.rs index 01d911a58..c0d1ebdb0 100644 --- a/tests/amend.rs +++ b/tests/amend.rs @@ -489,3 +489,16 @@ fn test_amend_repeated_round_trips_preserve_exact_line_authorship() { "// AI trailing note".ai() ]); } + +worktree_test_wrappers! { + test_amend_add_lines_at_top, + test_amend_add_lines_in_middle, + test_amend_add_lines_at_bottom, + test_amend_multiple_changes, + test_amend_with_unstaged_ai_code_in_other_file, + test_amend_preserves_unstaged_ai_attribution, + test_amend_with_multiple_files_mixed_staging, + test_amend_with_partially_staged_ai_file, + test_amend_with_partially_staged_mixed_content, + test_amend_with_unstaged_middle_section, +} diff --git a/tests/blame_flags.rs b/tests/blame_flags.rs index 10599af52..2d54e9ebf 100644 --- a/tests/blame_flags.rs +++ b/tests/blame_flags.rs @@ -1185,3 +1185,34 @@ fn test_blame_ai_human_author() { ] ); } + +worktree_test_wrappers! { + test_blame_basic_format, + test_blame_line_range, + test_blame_porcelain_format, + test_blame_show_email, + test_blame_show_name, + test_blame_show_number, + test_blame_suppress_author, + test_blame_long_rev, + test_blame_raw_timestamp, + test_blame_abbrev, + test_blame_blank_boundary, + test_blame_show_root, + test_blame_date_format, + test_blame_multiple_flags, + test_blame_incremental_format, + test_blame_line_porcelain, + test_blame_with_ai_authorship, + test_blame_contents_from_stdin, + test_blame_mark_unknown_without_authorship_log, + test_blame_mark_unknown_mixed_commits, + test_blame_mark_unknown_backward_compatible, + test_blame_auto_detects_git_blame_ignore_revs_file, + test_blame_no_ignore_revs_file_flag_disables_auto_detection, + test_blame_explicit_ignore_revs_file_takes_precedence, + test_blame_respects_git_config_blame_ignore_revs_file, + test_blame_without_ignore_revs_file_works_normally, + test_blame_ignore_revs_with_multiple_commits, + test_blame_ai_human_author, +} diff --git a/tests/checkout_switch.rs b/tests/checkout_switch.rs index 97c90c011..09f85e1de 100644 --- a/tests/checkout_switch.rs +++ b/tests/checkout_switch.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -418,3 +419,17 @@ fn test_checkout_pathspec_multiple_files() { file_b.assert_lines_and_blame(vec!["Original B".human()]); file_c.assert_lines_and_blame(vec!["Modified C by AI".ai()]); } + +worktree_test_wrappers! { + test_checkout_branch_migrates_working_log, + test_checkout_force_deletes_working_log, + test_checkout_pathspec_removes_file_attributions, + test_switch_branch_migrates_working_log, + test_switch_discard_changes_deletes_working_log, + test_switch_force_flag_deletes_working_log, + test_checkout_merge_migrates_working_log, + test_switch_merge_migrates_working_log, + test_checkout_same_branch_no_op, + test_checkout_with_mixed_attribution, + test_checkout_pathspec_multiple_files, +} diff --git a/tests/checkpoint_size.rs b/tests/checkpoint_size.rs index 606747299..b050719ac 100644 --- a/tests/checkpoint_size.rs +++ b/tests/checkpoint_size.rs @@ -88,3 +88,7 @@ fn test_checkpoint_size_logging_large_ai_rewrites() { ); } } + +worktree_test_wrappers! { + test_checkpoint_size_logging_large_ai_rewrites, +} diff --git a/tests/cherry_pick.rs b/tests/cherry_pick.rs index 778c7b5f8..7a4ef104d 100644 --- a/tests/cherry_pick.rs +++ b/tests/cherry_pick.rs @@ -571,3 +571,14 @@ fn test_cherry_pick_empty_commits() { "File content should be preserved after cherry-pick/abort" ); } + +worktree_test_wrappers! { + test_single_commit_cherry_pick, + test_multiple_commits_cherry_pick, + test_cherry_pick_with_conflict_and_continue, + test_cherry_pick_abort, + test_cherry_pick_no_ai_authorship, + test_cherry_pick_multiple_ai_sessions, + test_cherry_pick_identical_trees, + test_cherry_pick_empty_commits, +} diff --git a/tests/chinese_text_edits.rs b/tests/chinese_text_edits.rs index 40dfa2b29..03667b82f 100644 --- a/tests/chinese_text_edits.rs +++ b/tests/chinese_text_edits.rs @@ -153,3 +153,10 @@ fn test_chinese_reflow_preserves_ai() { ")".ai(), ]); } + +worktree_test_wrappers! { + test_chinese_simple_additions, + test_chinese_ai_then_human_edits, + test_chinese_deletions_and_insertions, + test_chinese_partial_staging, +} diff --git a/tests/ci_squash_rebase.rs b/tests/ci_squash_rebase.rs index 59042aa52..e386b23ee 100644 --- a/tests/ci_squash_rebase.rs +++ b/tests/ci_squash_rebase.rs @@ -481,3 +481,11 @@ fn test_ci_rebase_merge_multiple_commits() { "function human() { }".human() ]); } + +worktree_test_wrappers! { + test_ci_squash_merge_basic, + test_ci_squash_merge_multiple_files, + test_ci_squash_merge_mixed_content, + test_ci_squash_merge_with_manual_changes, + test_ci_rebase_merge_multiple_commits, +} diff --git a/tests/claude_code.rs b/tests/claude_code.rs index cd2beb97a..f47086a3d 100644 --- a/tests/claude_code.rs +++ b/tests/claude_code.rs @@ -366,3 +366,7 @@ fn test_user_text_content_blocks_are_parsed_correctly() { "Second message should be Assistant" ); } + +worktree_test_wrappers! { + test_claude_e2e_prefers_latest_checkpoint_for_prompts, +} diff --git a/tests/continue_cli.rs b/tests/continue_cli.rs index b772ee7a6..da619126c 100644 --- a/tests/continue_cli.rs +++ b/tests/continue_cli.rs @@ -765,3 +765,10 @@ fn test_continue_cli_e2e_preserves_model_on_commit() { ); assert_eq!(prompt_record.agent_id.tool, "continue-cli"); } + +worktree_test_wrappers! { + test_continue_cli_e2e_with_attribution, + test_continue_cli_e2e_human_checkpoint, + test_continue_cli_e2e_multiple_tool_calls, + test_continue_cli_e2e_preserves_model_on_commit, +} diff --git a/tests/cursor.rs b/tests/cursor.rs index 9b4ae1002..3cbeac0b2 100644 --- a/tests/cursor.rs +++ b/tests/cursor.rs @@ -572,3 +572,8 @@ fn test_cursor_e2e_with_resync() { // The temp directory and database will be automatically cleaned up when temp_dir goes out of scope } + +worktree_test_wrappers! { + test_cursor_e2e_with_attribution, + test_cursor_e2e_with_resync, +} diff --git a/tests/diff.rs b/tests/diff.rs index 8526be676..95d2291e3 100644 --- a/tests/diff.rs +++ b/tests/diff.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; use repos::test_repo::TestRepo; @@ -794,3 +795,22 @@ fn test_diff_range_multiple_commits() { "Should have attribution markers" ); } + +worktree_test_wrappers! { + test_diff_single_commit, + test_diff_commit_range, + test_diff_shows_ai_attribution, + test_diff_shows_human_attribution, + test_diff_multiple_files, + test_diff_initial_commit, + test_diff_pure_additions, + test_diff_pure_deletions, + test_diff_mixed_ai_and_human, + test_diff_with_head_ref, + test_diff_output_format, + test_diff_error_on_no_args, + test_diff_json_output_with_escaped_newlines, + test_diff_preserves_context_lines, + test_diff_exact_sequence_verification, + test_diff_range_multiple_commits, +} diff --git a/tests/gemini.rs b/tests/gemini.rs index 3ce45405e..872b5a156 100644 --- a/tests/gemini.rs +++ b/tests/gemini.rs @@ -899,3 +899,11 @@ fn test_gemini_e2e_partial_staging() { // ai_line5 is not committed because it's unstaged ]); } + +worktree_test_wrappers! { + test_gemini_e2e_with_attribution, + test_gemini_e2e_human_checkpoint, + test_gemini_e2e_multiple_tool_calls, + test_gemini_e2e_with_resync, + test_gemini_e2e_partial_staging, +} diff --git a/tests/github_copilot_integration.rs b/tests/github_copilot_integration.rs index f2f43988d..52ab337ba 100644 --- a/tests/github_copilot_integration.rs +++ b/tests/github_copilot_integration.rs @@ -271,3 +271,12 @@ fn test_github_copilot_human_checkpoint_with_clean_file() { // The new line should be human file.assert_lines_and_blame(lines!["const x = 1;".human(), "const y = 2;".human(),]); } + +worktree_test_wrappers! { + test_github_copilot_human_checkpoint_before_edit, + test_github_copilot_human_checkpoint_scoped_to_files, + test_github_copilot_human_then_ai_checkpoint, + test_github_copilot_multiple_files_with_dirty_files, + test_github_copilot_empty_will_edit_filepaths_fails, + test_github_copilot_human_checkpoint_with_clean_file, +} diff --git a/tests/gix_config_tests.rs b/tests/gix_config_tests.rs index d11711efe..f418476a5 100644 --- a/tests/gix_config_tests.rs +++ b/tests/gix_config_tests.rs @@ -303,3 +303,18 @@ fn test_config_get_regexp_bare_repo() { assert_eq!(result.get("baretest.key1"), Some(&"value1".to_string())); assert_eq!(result.get("baretest.key2"), Some(&"value2".to_string())); } + +worktree_test_wrappers! { + test_config_get_str_simple_value, + test_config_get_str_subsection, + test_config_get_str_missing_key_returns_none, + test_config_get_str_special_chars, + test_config_get_regexp_subsection, + test_config_get_regexp_no_matches, + test_config_get_regexp_with_subsections, + test_config_get_regexp_case_insensitive_keys, + test_config_falls_back_to_global, + test_config_local_overrides_global, + test_config_get_str_bare_repo, + test_config_get_regexp_bare_repo, +} diff --git a/tests/ignore_prompts.rs b/tests/ignore_prompts.rs index 93767ae80..31718a844 100644 --- a/tests/ignore_prompts.rs +++ b/tests/ignore_prompts.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::transcript::{AiTranscript, Message}; @@ -307,3 +308,10 @@ fn test_prompt_sharing_disabled_with_empty_transcript() { // The key thing is the checkpoint should succeed assert!(!commit.commit_sha.is_empty()); } + +worktree_test_wrappers! { + test_checkpoint_with_prompt_sharing_enabled, + test_checkpoint_with_prompt_sharing_disabled_strips_messages, + test_multiple_checkpoints_with_messages, + test_prompt_sharing_disabled_with_empty_transcript, +} diff --git a/tests/initial_attributions.rs b/tests/initial_attributions.rs index c1e1daf7d..dd9c0bee6 100644 --- a/tests/initial_attributions.rs +++ b/tests/initial_attributions.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::attribution_tracker::LineAttribution; @@ -442,3 +443,11 @@ fn test_initial_attributions_in_subsequent_checkpoint() { assert_debug_snapshot!(normalized_b); } + +worktree_test_wrappers! { + test_initial_only_no_blame_data, + test_initial_wins_overlaps, + test_initial_and_blame_merge, + test_partial_file_coverage, + test_initial_attributions_in_subsequent_checkpoint, +} diff --git a/tests/internal_db_integration.rs b/tests/internal_db_integration.rs index 754bdb897..d78467cee 100644 --- a/tests/internal_db_integration.rs +++ b/tests/internal_db_integration.rs @@ -613,3 +613,16 @@ fn test_thinking_transcript_saves_to_internal_db_after_commit() { "Should have all messages including thinking" ); } + +worktree_test_wrappers! { + test_checkpoint_saves_prompt_to_internal_db, + test_commit_updates_prompt_with_commit_sha_and_model, + test_post_commit_uses_latest_transcript_messages, + test_multiple_checkpoints_same_session_deduplicated, + test_different_sessions_create_separate_prompts, + test_line_stats_saved_to_db_after_commit, + test_human_author_saved_to_db_after_commit, + test_workdir_saved_to_db, + test_mock_ai_checkpoint_saves_to_internal_db, + test_thinking_transcript_saves_to_internal_db_after_commit, +} diff --git a/tests/merge_rebase.rs b/tests/merge_rebase.rs index b2214b8a1..1de4552f2 100644 --- a/tests/merge_rebase.rs +++ b/tests/merge_rebase.rs @@ -265,3 +265,8 @@ fn test_blame_after_merge_conflict_resolution() { "Line 10".human(), ]); } + +worktree_test_wrappers! { + test_blame_after_merge_with_ai_contributions, + test_blame_after_merge_conflict_resolution, +} diff --git a/tests/prompt_across_commit.rs b/tests/prompt_across_commit.rs index 5e1443446..4f0576952 100644 --- a/tests/prompt_across_commit.rs +++ b/tests/prompt_across_commit.rs @@ -60,3 +60,7 @@ fn test_change_across_commits() { assert_eq!(second_ai_entry.line_ranges, vec![LineRange::Single(6)]); assert_ne!(second_ai_entry.hash, initial_ai_entry.hash); } + +worktree_test_wrappers! { + test_change_across_commits, +} diff --git a/tests/prompt_hash_migration.rs b/tests/prompt_hash_migration.rs index d4293daae..606e57640 100644 --- a/tests/prompt_hash_migration.rs +++ b/tests/prompt_hash_migration.rs @@ -335,3 +335,9 @@ fn test_prompt_hash_migration_unstaged_ai_lines_saved_to_working_log() { "ai_line7".ai(), ]); } + +worktree_test_wrappers! { + test_prompt_hash_migration_ai_adds_lines_multiple_commits, + test_prompt_hash_migration_ai_adds_then_commits_in_batches, + test_prompt_hash_migration_unstaged_ai_lines_saved_to_working_log, +} diff --git a/tests/pull_rebase_ff.rs b/tests/pull_rebase_ff.rs index 1328fcc39..934aa9a5c 100644 --- a/tests/pull_rebase_ff.rs +++ b/tests/pull_rebase_ff.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -557,3 +558,11 @@ fn test_failed_pull_rebase_without_autostash_does_not_leak_stale_ai_metadata() { "stale pull-autostash attribution leaked into later human-only commit" ); } + +worktree_test_wrappers! { + test_fast_forward_pull_preserves_ai_attribution, + test_pull_rebase_autostash_preserves_uncommitted_ai_attribution, + test_pull_rebase_autostash_with_mixed_attribution, + test_pull_rebase_autostash_via_git_config, + test_fast_forward_pull_without_local_changes, +} diff --git a/tests/realistic_complex_edits.rs b/tests/realistic_complex_edits.rs index 7dae483e6..8a145b744 100644 --- a/tests/realistic_complex_edits.rs +++ b/tests/realistic_complex_edits.rs @@ -1636,3 +1636,17 @@ pub fn get_user_by_email(email: &str) -> Option { "CREATE INDEX idx_users_email ON users(email);".ai(), ]); } + +worktree_test_wrappers! { + test_realistic_refactoring_sequence, + test_realistic_api_endpoint_expansion, + test_realistic_test_file_evolution, + test_realistic_config_file_with_comments, + test_realistic_jsx_component_development, + test_realistic_class_with_multiple_methods, + test_realistic_middleware_chain_development, + test_realistic_sql_migration_sequence, + test_realistic_refactoring_with_deletions, + test_realistic_formatting_and_whitespace_changes, + test_realistic_multi_file_commit, +} diff --git a/tests/rebase.rs b/tests/rebase.rs index 3c378dfd5..5087eabfc 100644 --- a/tests/rebase.rs +++ b/tests/rebase.rs @@ -1492,3 +1492,27 @@ cat {} > "$1" "function feature3() {}".ai() ]); } + +worktree_test_wrappers! { + test_rebase_no_conflicts_identical_trees, + test_rebase_with_different_trees, + test_rebase_multiple_commits, + test_rebase_mixed_authorship, + test_rebase_fast_forward, + test_rebase_interactive_reorder, + test_rebase_skip, + test_rebase_keep_empty, + test_rebase_rerere, + test_rebase_patch_stack, + test_rebase_already_up_to_date, + test_rebase_with_conflicts, + test_rebase_abort, + test_rebase_branch_switch_during, + test_rebase_autosquash, + test_rebase_autostash, + test_rebase_exec, + test_rebase_preserve_merges, + test_rebase_commit_splitting, + test_rebase_squash_preserves_all_authorship, + test_rebase_reword_commit_with_children, +} diff --git a/tests/repos/mod.rs b/tests/repos/mod.rs index 697f267e1..f021505e0 100644 --- a/tests/repos/mod.rs +++ b/tests/repos/mod.rs @@ -11,6 +11,12 @@ macro_rules! subdir_test_variants { #[test] fn []() $body + // Variant 1b: Run from subdirectory on a worktree + #[test] + fn []() { + $crate::repos::test_repo::with_worktree_mode(|| $body); + } + // Variant 2: Run with -C flag from arbitrary directory #[test] fn []() { @@ -66,6 +72,120 @@ macro_rules! subdir_test_variants { type TestRepo = TestRepoWithCFlag; $body } + + // Variant 2b: Run with -C flag from arbitrary directory on a worktree + #[test] + fn []() { + $crate::repos::test_repo::with_worktree_mode(|| { + // Wrapper struct that intercepts git calls to use -C flag + struct TestRepoWithCFlag { + inner: $crate::repos::test_repo::TestRepo, + } + + #[allow(dead_code)] + impl TestRepoWithCFlag { + fn new() -> Self { + Self { inner: $crate::repos::test_repo::TestRepo::new() } + } + + fn git_from_working_dir( + &self, + _working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + // Prepend -C to args and run from arbitrary directory + let arbitrary_dir = std::env::temp_dir(); + self.inner + .git_with_env_using_c_flag(args, &[], &arbitrary_dir) + } + + fn git_with_env( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + if working_dir.is_some() { + // If working_dir is specified, prepend -C and run from arbitrary dir + let arbitrary_dir = std::env::temp_dir(); + self.inner + .git_with_env_using_c_flag(args, envs, &arbitrary_dir) + } else { + // No working_dir, use normal behavior + self.inner.git_with_env(args, envs, None) + } + } + } + + // Forward all other methods via Deref + impl std::ops::Deref for TestRepoWithCFlag { + type Target = $crate::repos::test_repo::TestRepo; + fn deref(&self) -> &Self::Target { + &self.inner + } + } + + // Type alias to shadow TestRepo + type TestRepo = TestRepoWithCFlag; + $body + }); + } + } + }; +} + +#[macro_export] +macro_rules! worktree_test_variants { + ( + fn $test_name:ident() $body:block + ) => { + paste::paste! { + // Variant 1: Run against a normal repo (baseline behavior) + #[test] + fn []() $body + + // Variant 2: Run against a linked worktree + #[test] + fn []() { + // Wrapper struct that keeps the base repo alive while exposing worktree APIs. + struct TestRepoWithWorktree { + _base: $crate::repos::test_repo::TestRepo, + worktree: $crate::repos::test_repo::WorktreeRepo, + } + + impl TestRepoWithWorktree { + fn new() -> Self { + let base = $crate::repos::test_repo::TestRepo::new(); + let worktree = base.add_worktree(stringify!($test_name)); + Self { _base: base, worktree } + } + } + + impl std::ops::Deref for TestRepoWithWorktree { + type Target = $crate::repos::test_repo::WorktreeRepo; + fn deref(&self) -> &Self::Target { + &self.worktree + } + } + + // Type alias to shadow TestRepo + type TestRepo = TestRepoWithWorktree; + $body + } + } + }; +} + +#[macro_export] +macro_rules! worktree_test_wrappers { + ( $( $test_name:ident ),+ $(,)? ) => { + paste::paste! { + $( + #[test] + fn [<$test_name _on_worktree>]() { + $crate::repos::test_repo::with_worktree_mode(|| $test_name()); + } + )+ } }; } diff --git a/tests/repos/test_repo.rs b/tests/repos/test_repo.rs index b157cd870..80e49a35d 100644 --- a/tests/repos/test_repo.rs +++ b/tests/repos/test_repo.rs @@ -9,8 +9,9 @@ use git_ai::git::repo_storage::PersistedWorkingLog; use git_ai::git::repository as GitAiRepository; use git_ai::observability::wrapper_performance_targets::BenchmarkResult; use git2::Repository; -use insta::assert_debug_snapshot; +use insta::{Settings, assert_debug_snapshot}; use rand::Rng; +use std::cell::Cell; use std::fs; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; @@ -61,6 +62,8 @@ pub struct TestRepo { test_db_path: PathBuf, git_mode: TestGitMode, core_hooks_dir: Option, + base_path: Option, + base_test_db_path: Option, } #[allow(dead_code)] @@ -94,7 +97,7 @@ impl TestRepo { }); } - pub fn new() -> Self { + fn new_base_repo() -> Self { let mut rng = rand::thread_rng(); let n: u64 = rng.gen_range(0..10000000000); let base = std::env::temp_dir(); @@ -118,6 +121,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.apply_default_config_patch(); @@ -126,6 +131,63 @@ impl TestRepo { repo } + pub fn new() -> Self { + if WORKTREE_MODE.with(|flag| flag.get()) { + return Self::new_worktree_variant(); + } + + Self::new_base_repo() + } + + fn new_worktree_variant() -> Self { + let base_repo = Self::new_base_repo(); + base_repo.ensure_head_commit(); + + // Keep the base worktree off the default branch so tests can freely mutate it. + let base_branch = base_repo.current_branch(); + if !base_branch.is_empty() { + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let temp_branch = format!("base-worktree-{}", n); + base_repo + .git_og(&["checkout", "-b", &temp_branch]) + .expect("failed to create base worktree branch"); + } + + let worktree = base_repo.add_worktree("auto"); + + let base_path = base_repo.path.clone(); + let base_test_db_path = base_repo.test_db_path.clone(); + let feature_flags = base_repo.feature_flags.clone(); + let config_patch = base_repo.config_patch.clone(); + let git_mode = base_repo.git_mode; + let core_hooks_dir = base_repo.core_hooks_dir.clone(); + + let worktree_path = worktree.path.clone(); + let worktree_test_db_path = worktree.test_db_path.clone(); + + std::mem::forget(base_repo); + std::mem::forget(worktree); + + Self { + path: worktree_path, + feature_flags, + config_patch, + test_db_path: worktree_test_db_path, + git_mode, + core_hooks_dir, + base_path: Some(base_path), + base_test_db_path: Some(base_test_db_path), + } + } + + fn ensure_head_commit(&self) { + if self.git_og(&["rev-parse", "--verify", "HEAD"]).is_err() { + self.git_og(&["commit", "--allow-empty", "-m", "initial"]) + .expect("failed to create initial commit for worktree"); + } + } + /// Create a standalone bare repository for testing pub fn new_bare() -> Self { let mut rng = rand::thread_rng(); @@ -144,6 +206,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.initialize_core_hooks_if_needed(); repo @@ -183,6 +247,8 @@ impl TestRepo { test_db_path: upstream_test_db_path, git_mode: test_git_mode(), core_hooks_dir: upstream_core_hooks_dir, + base_path: None, + base_test_db_path: None, }; // Clone upstream to create mirror with origin configured @@ -227,6 +293,8 @@ impl TestRepo { test_db_path: mirror_test_db_path, git_mode: test_git_mode(), core_hooks_dir: mirror_core_hooks_dir, + base_path: None, + base_test_db_path: None, }; upstream.apply_default_config_patch(); @@ -258,6 +326,8 @@ impl TestRepo { test_db_path, git_mode: test_git_mode(), core_hooks_dir, + base_path: None, + base_test_db_path: None, }; repo.apply_default_config_patch(); repo.initialize_core_hooks_if_needed(); @@ -268,6 +338,51 @@ impl TestRepo { self.feature_flags = feature_flags; } + pub fn add_worktree(&self, name: &str) -> WorktreeRepo { + self.add_worktree_with_branch(name, None) + } + + pub fn add_worktree_with_branch(&self, name: &str, branch: Option<&str>) -> WorktreeRepo { + self.ensure_head_commit(); + + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let worktree_path = std::env::temp_dir().join(format!("{}-worktree-{}", n, name)); + + let branch_name = branch + .map(|b| b.to_string()) + .unwrap_or_else(|| format!("worktree-{}-{}", name, n)); + + let branch_ref = format!("refs/heads/{}", branch_name); + let branch_exists = self.git_og(&["show-ref", "--verify", &branch_ref]).is_ok(); + + let mut args = vec!["worktree", "add"]; + if branch_exists { + args.push(worktree_path.to_str().expect("valid path")); + args.push(branch_name.as_str()); + } else { + args.push("-b"); + args.push(branch_name.as_str()); + args.push(worktree_path.to_str().expect("valid path")); + } + + self.git_og(&args).expect("failed to add worktree"); + + let db_n: u64 = rng.gen_range(0..10000000000); + let test_db_path = std::env::temp_dir().join(format!("{}-db", db_n)); + + WorktreeRepo { + base_path: self.path.clone(), + path: worktree_path, + worktree_name: branch_name, + feature_flags: self.feature_flags.clone(), + config_patch: self.config_patch.clone(), + test_db_path, + git_mode: self.git_mode, + core_hooks_dir: self.core_hooks_dir.clone(), + } + } + /// Patch the git-ai config for this test repo /// Allows overriding specific config properties like ignore_prompts, telemetry settings, etc. /// The patch is applied via environment variable when running git-ai commands @@ -752,8 +867,363 @@ impl TestRepo { } } +#[derive(Clone, Debug)] +pub struct WorktreeRepo { + base_path: PathBuf, + path: PathBuf, + worktree_name: String, + pub feature_flags: FeatureFlags, + pub(crate) config_patch: Option, + test_db_path: PathBuf, + git_mode: TestGitMode, + core_hooks_dir: Option, +} + +impl WorktreeRepo { + pub fn path(&self) -> &PathBuf { + &self.path + } + + pub fn base_path(&self) -> &PathBuf { + &self.base_path + } + + pub fn worktree_name(&self) -> &str { + &self.worktree_name + } + + pub fn canonical_path(&self) -> PathBuf { + self.path + .canonicalize() + .expect("failed to canonicalize worktree path") + } + + pub fn test_db_path(&self) -> &PathBuf { + &self.test_db_path + } + + pub fn current_branch(&self) -> String { + self.git(&["branch", "--show-current"]) + .unwrap() + .trim() + .to_string() + } + + pub fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai_with_env(args, &[]) + } + + pub fn git(&self, args: &[&str]) -> Result { + self.git_with_env(args, &[], None) + } + + pub fn git_from_working_dir( + &self, + working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + self.git_with_env(args, &[], Some(working_dir)) + } + + pub fn git_ai_from_working_dir( + &self, + working_dir: &std::path::Path, + args: &[&str], + ) -> Result { + let binary_path = get_binary_path(); + + let mut command = Command::new(binary_path); + + let absolute_working_dir = working_dir.canonicalize().map_err(|e| { + format!( + "Failed to canonicalize working directory {}: {}", + working_dir.display(), + e + ) + })?; + command.args(args).current_dir(&absolute_working_dir); + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git-ai command: {:?}", args)); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else { + Err(stderr) + } + } + + pub fn git_og(&self, args: &[&str]) -> Result { + let mut full_args: Vec = + vec!["-C".to_string(), self.path.to_str().unwrap().to_string()]; + full_args.extend(args.iter().map(|s| s.to_string())); + + GitAiRepository::exec_git(&full_args) + .map(|output| { + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + } + }) + .map_err(|e| e.to_string()) + } + + fn run_git_command( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&Path>, + force_c_flag: bool, + ) -> Result { + let mut command = self.build_git_command(args, envs, working_dir, force_c_flag)?; + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git command: {:?}", args)); + Self::command_output_to_result(output) + } + + fn build_git_command( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&Path>, + force_c_flag: bool, + ) -> Result { + let mut command = if self.git_mode.uses_wrapper() { + Command::new(get_binary_path()) + } else { + Command::new(git_ai::config::Config::get().git_cmd()) + }; + + let mut full_args: Vec = Vec::new(); + + if self.git_mode.uses_core_hooks() { + let hooks_dir = self.core_hooks_dir.as_ref().ok_or_else(|| { + "core hooks mode is enabled but no hooks dir is configured".to_string() + })?; + full_args.push("-c".to_string()); + full_args.push(format!("core.hooksPath={}", hooks_dir.display())); + } + + if force_c_flag || working_dir.is_none() { + full_args.push("-C".to_string()); + full_args.push(self.path.to_str().unwrap().to_string()); + } + + full_args.extend(args.iter().map(|arg| arg.to_string())); + command.args(&full_args); + + if let Some(working_dir_path) = working_dir { + let absolute_working_dir = working_dir_path.canonicalize().map_err(|e| { + format!( + "Failed to canonicalize working directory {}: {}", + working_dir_path.display(), + e + ) + })?; + command.current_dir(absolute_working_dir); + } + + if self.git_mode.uses_wrapper() { + command.env("GIT_AI", "git"); + } + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + for (key, value) in envs { + command.env(key, value); + } + + Ok(command) + } + + fn command_output_to_result(output: Output) -> Result { + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else if stderr.is_empty() { + Err(stdout) + } else { + Err(stderr) + } + } + + pub fn git_with_env( + &self, + args: &[&str], + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + self.run_git_command(args, envs, working_dir, false) + } + + pub fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + let binary_path = get_binary_path(); + + let mut command = Command::new(binary_path); + command.args(args).current_dir(&self.path); + + if let Some(patch) = &self.config_patch + && let Ok(patch_json) = serde_json::to_string(patch) + { + command.env("GIT_AI_TEST_CONFIG_PATCH", patch_json); + } + + command.env("GIT_AI_TEST_DB_PATH", self.test_db_path.to_str().unwrap()); + + for (key, value) in envs { + command.env(key, value); + } + + let output = command + .output() + .unwrap_or_else(|_| panic!("Failed to execute git-ai command: {:?}", args)); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() { + let combined = if stdout.is_empty() { + stderr + } else if stderr.is_empty() { + stdout + } else { + format!("{}{}", stdout, stderr) + }; + Ok(combined) + } else { + Err(stderr) + } + } + + pub fn commit(&self, message: &str) -> Result { + self.commit_with_env(message, &[], None) + } + + pub fn stage_all_and_commit(&self, message: &str) -> Result { + self.git(&["add", "-A"]).expect("add --all should succeed"); + self.commit(message) + } + + pub fn commit_with_env( + &self, + message: &str, + envs: &[(&str, &str)], + working_dir: Option<&std::path::Path>, + ) -> Result { + let output = self.git_with_env(&["commit", "-m", message], envs, working_dir); + + match output { + Ok(combined) => { + let repo = GitAiRepository::find_repository_in_path(self.path.to_str().unwrap()) + .map_err(|e| format!("Failed to find repository: {}", e))?; + + let head_commit = repo + .head() + .map_err(|e| format!("Failed to get HEAD: {}", e))? + .target() + .map_err(|e| format!("Failed to get HEAD target: {}", e))?; + + let authorship_log = + match git_ai::git::refs::show_authorship_note(&repo, &head_commit) { + Some(content) => AuthorshipLog::deserialize_from_string(&content) + .map_err(|e| format!("Failed to parse authorship log: {}", e))?, + None => { + return Err("No authorship log found for the new commit".to_string()); + } + }; + + Ok(NewCommit { + commit_sha: head_commit, + authorship_log, + stdout: combined, + }) + } + Err(e) => Err(e), + } + } +} + +impl Drop for WorktreeRepo { + fn drop(&mut self) { + let _ = Command::new(git_ai::config::Config::get().git_cmd()) + .args([ + "-C", + self.base_path.to_str().unwrap(), + "worktree", + "remove", + "--force", + self.path.to_str().unwrap(), + ]) + .output(); + let _ = fs::remove_dir_all(self.path.clone()); + let _ = fs::remove_dir_all(self.test_db_path.clone()); + } +} + impl Drop for TestRepo { fn drop(&mut self) { + if let Some(base_path) = &self.base_path { + let _ = Command::new(git_ai::config::Config::get().git_cmd()) + .args([ + "-C", + base_path.to_str().unwrap(), + "worktree", + "remove", + "--force", + self.path.to_str().unwrap(), + ]) + .output(); + let _ = fs::remove_dir_all(self.path.clone()); + let _ = fs::remove_dir_all(self.test_db_path.clone()); + let _ = fs::remove_dir_all(base_path.clone()); + if let Some(base_test_db_path) = &self.base_test_db_path { + let _ = fs::remove_dir_all(base_test_db_path.clone()); + } + if let Some(core_hooks_dir) = &self.core_hooks_dir { + let _ = fs::remove_dir_all(core_hooks_dir); + } + return; + } + fs::remove_dir_all(self.path.clone()).expect("failed to remove test repo"); // Also clean up the test database directory (may not exist if no DB operations were done) let _ = fs::remove_dir_all(self.test_db_path.clone()); @@ -782,6 +1252,32 @@ impl NewCommit { static COMPILED_BINARY: OnceLock = OnceLock::new(); static DEFAULT_BRANCH_NAME: OnceLock = OnceLock::new(); +thread_local! { + static WORKTREE_MODE: Cell = const { Cell::new(false) }; +} + +pub fn with_worktree_mode(f: F) -> R +where + F: FnOnce() -> R, +{ + WORKTREE_MODE.with(|flag| { + let previous = flag.replace(true); + struct Reset<'a> { + flag: &'a Cell, + previous: bool, + } + impl<'a> Drop for Reset<'a> { + fn drop(&mut self) { + self.flag.set(self.previous); + } + } + let _reset = Reset { flag, previous }; + + let mut settings = Settings::clone_current(); + settings.set_snapshot_suffix("worktree"); + settings.bind(f) + }) +} fn get_default_branch_name() -> String { let output = Command::new("git") diff --git a/tests/reset.rs b/tests/reset.rs index e41f3ed10..b8931239f 100644 --- a/tests/reset.rs +++ b/tests/reset.rs @@ -565,3 +565,20 @@ fn test_reset_soft_detached_head_preserves_ai_authorship() { .unwrap(); file.assert_lines_and_blame(vec!["base line".human(), "ai line".ai()]); } + +worktree_test_wrappers! { + test_reset_hard_deletes_working_log, + test_reset_soft_reconstructs_working_log, + test_reset_mixed_reconstructs_working_log, + test_reset_to_same_commit_is_noop, + test_reset_multiple_commits, + test_reset_preserves_uncommitted_changes, + test_reset_with_pathspec, + test_reset_forward_is_noop, + test_reset_mixed_ai_human_changes, + test_reset_merge, + test_reset_with_new_files, + test_reset_with_deleted_files, + test_reset_mixed_pathspec_preserves_ai_authorship, + test_reset_mixed_pathspec_multiple_commits, +} diff --git a/tests/show_prompt.rs b/tests/show_prompt.rs index 214940b21..1bb18e609 100644 --- a/tests/show_prompt.rs +++ b/tests/show_prompt.rs @@ -167,3 +167,18 @@ fn show_prompt_with_offset_skips_occurrences() { err ); } + +worktree_test_wrappers! { + parse_args_requires_prompt_id, + parse_args_parses_basic_id, + parse_args_parses_commit_flag, + parse_args_parses_offset_flag, + parse_args_rejects_commit_and_offset_together, + parse_args_rejects_multiple_prompt_ids, + parse_args_requires_commit_value, + parse_args_requires_offset_value, + parse_args_rejects_invalid_offset, + parse_args_rejects_unknown_flag, + show_prompt_returns_latest_prompt_by_default, + show_prompt_with_offset_skips_occurrences, +} diff --git a/tests/simple_additions.rs b/tests/simple_additions.rs index 9d37d2be3..2ef05c597 100644 --- a/tests/simple_additions.rs +++ b/tests/simple_additions.rs @@ -1271,3 +1271,30 @@ fn test_ai_edits_file_with_spaces_in_filename() { "Line 3".human(), ]); } + +worktree_test_wrappers! { + test_simple_additions_empty_repo, + test_simple_additions_with_base_commit, + test_simple_additions_on_top_of_ai_contributions, + test_simple_additions_new_file_not_git_added, + test_ai_human_interleaved_line_attribution, + test_simple_ai_then_human_deletion, + test_multiple_ai_checkpoints_with_human_deletions, + test_complex_mixed_additions_and_deletions, + test_ai_adds_lines_multiple_commits, + test_partial_staging_filters_unstaged_lines, + test_human_stages_some_ai_lines, + test_multiple_ai_sessions_with_partial_staging, + test_ai_adds_then_commits_in_batches, + test_ai_edits_with_partial_staging, + test_unstaged_changes_not_committed, + test_unstaged_ai_lines_saved_to_working_log, + test_new_file_partial_staging_two_commits, + test_mock_ai_with_pathspecs, + test_with_duplicate_lines, + test_ai_deletion_with_human_checkpoint_in_same_commit, + test_large_ai_readme_rewrite_with_no_data_bug, + test_deletion_within_a_single_line_attribution, + test_deletion_of_multiple_lines_by_ai, + test_ai_edits_file_with_spaces_in_filename, +} diff --git a/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap b/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap new file mode 100644 index 000000000..2d5832dc8 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_and_blame_merge@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 284 +expression: normalized +--- +"COMMIT_SHA (tool1 TIMESTAMP 1) line 1\nCOMMIT_SHA (tool1 TIMESTAMP 2) line 2\nCOMMIT_SHA (tool1 TIMESTAMP 3) line 3\nCOMMIT_SHA (mock_ai TIMESTAMP 4) line 4\nCOMMIT_SHA (tool2 TIMESTAMP 5) line 5\nCOMMIT_SHA (mock_ai TIMESTAMP 6) line 6\nCOMMIT_SHA (mock_ai TIMESTAMP 7) line 7\n" diff --git a/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap b/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap new file mode 100644 index 000000000..41e4453a2 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_attributions_in_subsequent_checkpoint@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 455 +expression: normalized_b +--- +"COMMIT_SHA (subsequent-tool TIMESTAMP 1) line 1 from INITIAL\nCOMMIT_SHA (subsequent-tool TIMESTAMP 2) line 2 from INITIAL\n" diff --git a/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap b/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap new file mode 100644 index 000000000..539f359b3 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_only_no_blame_data@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 121 +expression: normalized +--- +"COMMIT_SHA (test-tool TIMESTAMP 1) line 1 from INITIAL\nCOMMIT_SHA (test-tool TIMESTAMP 2) line 2 from INITIAL\nCOMMIT_SHA (test-tool TIMESTAMP 3) line 3 from INITIAL\n" diff --git a/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap b/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap new file mode 100644 index 000000000..3bf171c54 --- /dev/null +++ b/tests/snapshots/initial_attributions__initial_wins_overlaps@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 190 +expression: normalized +--- +"COMMIT_SHA (override-tool TIMESTAMP 1) line 1\nCOMMIT_SHA (override-tool TIMESTAMP 2) line 2\nCOMMIT_SHA (Test User TIMESTAMP 3) line 3 modified\n" diff --git a/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap b/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap new file mode 100644 index 000000000..83eda3c99 --- /dev/null +++ b/tests/snapshots/initial_attributions__partial_file_coverage@worktree-2.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 363 +expression: normalized_b +--- +"COMMIT_SHA (mock_ai TIMESTAMP 1) line 1 in B\nCOMMIT_SHA (mock_ai TIMESTAMP 2) line 2 in B\n" diff --git a/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap b/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap new file mode 100644 index 000000000..cd3db89a5 --- /dev/null +++ b/tests/snapshots/initial_attributions__partial_file_coverage@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/initial_attributions.rs +assertion_line: 356 +expression: normalized_a +--- +"COMMIT_SHA (toolA TIMESTAMP 1) line 1 in A\nCOMMIT_SHA (toolA TIMESTAMP 2) line 2 in A\n" diff --git a/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap b/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap new file mode 100644 index 000000000..f2ae57eb5 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_all_ai@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 280 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 0%\nšŸ¤– ai ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 100%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 30 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_all_human@worktree.snap b/tests/snapshots/stats__markdown_stats_all_human@worktree.snap new file mode 100644 index 000000000..220fcb02b --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_all_human@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 257 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 100%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 0%\n```\n\n
\nMore stats\n\n- 0.0 lines generated for every 1 accepted\n- 0 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap b/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap new file mode 100644 index 000000000..7bf4943f8 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_deletion_only@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 234 +expression: markdown +--- +"(no additions)\n" diff --git a/tests/snapshots/stats__markdown_stats_formatting@worktree.snap b/tests/snapshots/stats__markdown_stats_formatting@worktree.snap new file mode 100644 index 000000000..6dd771318 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_formatting@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 386 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 38%\nšŸ¤ mixed ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 15%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 46%\n```\n\n
\nMore stats\n\n- 1.7 lines generated for every 1 accepted\n- 25 seconds waiting for AI \n- Top model: cursor::claude-3.5-sonnet (6 accepted lines, 10 generated lines)\n\n
" diff --git a/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap b/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap new file mode 100644 index 000000000..2dd7fb618 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_minimal_human@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 350 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 2%\nšŸ¤– ai ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 98%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 10 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_mixed@worktree.snap b/tests/snapshots/stats__markdown_stats_mixed@worktree.snap new file mode 100644 index 000000000..67c1cead6 --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_mixed@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 303 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 33%\nšŸ¤ mixed ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 17%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 50%\n```\n\n
\nMore stats\n\n- 1.7 lines generated for every 1 accepted\n- 45 seconds waiting for AI \n\n
" diff --git a/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap b/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap new file mode 100644 index 000000000..eee58d61a --- /dev/null +++ b/tests/snapshots/stats__markdown_stats_no_mixed@worktree.snap @@ -0,0 +1,6 @@ +--- +source: tests/stats.rs +assertion_line: 326 +expression: markdown +--- +"Stats powered by [Git AI](https://github.com/git-ai-project/git-ai)\n\n```text\n🧠 you ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ 40%\nšŸ¤– ai ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–‘ā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆā–ˆ 60%\n```\n\n
\nMore stats\n\n- 1.0 lines generated for every 1 accepted\n- 15 seconds waiting for AI \n\n
" diff --git a/tests/squash_merge.rs b/tests/squash_merge.rs index 7d6c31c73..e013e1808 100644 --- a/tests/squash_merge.rs +++ b/tests/squash_merge.rs @@ -293,3 +293,10 @@ fn test_prepare_working_log_squash_with_mixed_additions() { "Sum of accepted_lines across prompts should match ai_accepted stat" ); } + +worktree_test_wrappers! { + test_prepare_working_log_simple_squash, + test_prepare_working_log_squash_with_main_changes, + test_prepare_working_log_squash_multiple_sessions, + test_prepare_working_log_squash_with_mixed_additions, +} diff --git a/tests/stash_attribution.rs b/tests/stash_attribution.rs index 4f17f6223..27a500f3a 100644 --- a/tests/stash_attribution.rs +++ b/tests/stash_attribution.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use repos::test_file::ExpectedLineExt; @@ -994,3 +995,23 @@ fn test_stash_apply_reset_apply_again() { "Expected AI prompts in authorship log after multiple apply/reset cycles" ); } + +worktree_test_wrappers! { + test_stash_pop_with_ai_attribution, + test_stash_apply_with_ai_attribution, + test_stash_apply_named_reference, + test_stash_multiple_files, + test_stash_with_existing_initial_attributions, + test_stash_pop_default_reference, + test_stash_pop_empty_repo, + test_stash_mixed_human_and_ai, + test_stash_push_with_pathspec_single_file, + test_stash_push_with_pathspec_directory, + test_stash_push_multiple_pathspecs, + test_stash_pop_with_conflict, + test_stash_mixed_staged_and_unstaged, + test_stash_pop_onto_head_with_ai_changes, + test_stash_pop_across_branches, + test_stash_pop_across_branches_with_conflict, + test_stash_apply_reset_apply_again, +} diff --git a/tests/stats.rs b/tests/stats.rs index 6dc8c24c7..52049bb22 100644 --- a/tests/stats.rs +++ b/tests/stats.rs @@ -1,3 +1,4 @@ +#[macro_use] mod repos; use git_ai::authorship::stats::CommitStats; use insta::assert_debug_snapshot; @@ -190,6 +191,11 @@ fn test_stats_cli_range() { #[test] fn test_stats_cli_empty_tree_range() { let repo = TestRepo::new(); + let base_commit_count = repo + .git(&["rev-list", "--count", "HEAD"]) + .ok() + .and_then(|count| count.trim().parse::().ok()) + .unwrap_or(0); // First commit: AI line let mut file = repo.filename("history.txt"); @@ -218,9 +224,9 @@ fn test_stats_cli_empty_tree_range() { serde_json::from_str(&output).unwrap(); // Entire history from empty tree to HEAD: - // - 2 commits in range + // - base commits (e.g., worktree bootstrap) + 2 new commits in range // - 1 AI-added line, 1 human-added line in final diff - assert_eq!(stats.authorship_stats.total_commits, 2); + assert_eq!(stats.authorship_stats.total_commits, base_commit_count + 2); assert_eq!(stats.range_stats.git_diff_added_lines, 2); assert_eq!(stats.range_stats.ai_additions, 1); // human_additions is computed as git_diff_added_lines - ai_accepted @@ -620,3 +626,16 @@ fn test_post_commit_large_ignored_files_do_not_trigger_skip_warning() { assert_eq!(stats.ai_additions, 0); assert_eq!(stats.human_additions, 0); } + +worktree_test_wrappers! { + test_authorship_log_stats, + test_stats_cli_range, + test_stats_cli_empty_tree_range, + test_markdown_stats_deletion_only, + test_markdown_stats_all_human, + test_markdown_stats_all_ai, + test_markdown_stats_mixed, + test_markdown_stats_no_mixed, + test_markdown_stats_minimal_human, + test_markdown_stats_formatting, +} diff --git a/tests/worktrees.rs b/tests/worktrees.rs new file mode 100644 index 000000000..a61c35f8c --- /dev/null +++ b/tests/worktrees.rs @@ -0,0 +1,853 @@ +#[macro_use] +mod repos; + +use std::fs; +use std::path::PathBuf; +use std::process::Command; + +use git_ai::authorship::stats::CommitStats; +use git_ai::git::group_files_by_repository; +use rand::Rng; +use serde::Deserialize; +use serde_json::Value; + +use repos::test_repo::{NewCommit, TestRepo, WorktreeRepo, default_branchname}; + +trait RepoOps { + fn path(&self) -> &PathBuf; + fn git(&self, args: &[&str]) -> Result; + fn git_ai(&self, args: &[&str]) -> Result; + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result; + fn commit(&self, message: &str) -> Result; +} + +impl RepoOps for TestRepo { + fn path(&self) -> &PathBuf { + self.path() + } + fn git(&self, args: &[&str]) -> Result { + self.git(args) + } + fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai(args) + } + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + self.git_ai_with_env(args, envs) + } + fn commit(&self, message: &str) -> Result { + self.commit(message) + } +} + +impl RepoOps for WorktreeRepo { + fn path(&self) -> &PathBuf { + self.path() + } + fn git(&self, args: &[&str]) -> Result { + self.git(args) + } + fn git_ai(&self, args: &[&str]) -> Result { + self.git_ai(args) + } + fn git_ai_with_env(&self, args: &[&str], envs: &[(&str, &str)]) -> Result { + self.git_ai_with_env(args, envs) + } + fn commit(&self, message: &str) -> Result { + self.commit(message) + } +} + +#[derive(Debug, Deserialize)] +struct StatusJson { + stats: CommitStats, + checkpoints: Vec, +} + +#[derive(Debug, Deserialize)] +struct StatusCheckpoint { + additions: u32, + deletions: u32, + tool_model: String, + is_human: bool, +} + +fn write_file(repo: &impl RepoOps, relative: &str, contents: &str) -> PathBuf { + let path = repo.path().join(relative); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("failed to create parent directories"); + } + fs::write(&path, contents).expect("failed to write file"); + path +} + +fn parse_status_json(output: &str) -> StatusJson { + let json = extract_json_object(output); + serde_json::from_str(&json).expect("status output should be valid JSON") +} + +fn status_summary(repo: &impl RepoOps) -> (CommitStats, Vec<(u32, u32, bool, String)>) { + let output = repo + .git_ai(&["status", "--json"]) + .expect("git-ai status should succeed"); + let parsed = parse_status_json(&output); + let checkpoints = parsed + .checkpoints + .iter() + .map(|cp| { + ( + cp.additions, + cp.deletions, + cp.is_human, + cp.tool_model.clone(), + ) + }) + .collect::>(); + (parsed.stats, checkpoints) +} + +fn status_summary_with_env( + repo: &impl RepoOps, + envs: &[(&str, &str)], +) -> (CommitStats, Vec<(u32, u32, bool, String)>) { + let output = repo + .git_ai_with_env(&["status", "--json"], envs) + .expect("git-ai status should succeed"); + let parsed = parse_status_json(&output); + let checkpoints = parsed + .checkpoints + .iter() + .map(|cp| { + ( + cp.additions, + cp.deletions, + cp.is_human, + cp.tool_model.clone(), + ) + }) + .collect::>(); + (parsed.stats, checkpoints) +} + +fn stats_key_fields(stats: &CommitStats) -> (u32, u32, u32, u32, u32, u32) { + ( + stats.human_additions, + stats.mixed_additions, + stats.ai_additions, + stats.ai_accepted, + stats.git_diff_added_lines, + stats.git_diff_deleted_lines, + ) +} + +fn worktree_git_dir(worktree: &WorktreeRepo) -> PathBuf { + let output = worktree + .git(&["rev-parse", "--git-dir"]) + .expect("rev-parse --git-dir should succeed"); + let git_dir = PathBuf::from(output.trim()); + if git_dir.is_relative() { + worktree.path().join(git_dir) + } else { + git_dir + } +} + +fn worktree_commondir(worktree: &WorktreeRepo) -> PathBuf { + let git_dir = worktree_git_dir(worktree); + let commondir_path = git_dir.join("commondir"); + let commondir_contents = fs::read_to_string(&commondir_path).expect("commondir should exist"); + let commondir = PathBuf::from(commondir_contents.trim()); + let resolved = if commondir.is_absolute() { + commondir + } else { + git_dir.join(commondir) + }; + resolved.canonicalize().unwrap_or(resolved) +} + +fn extract_json_object(output: &str) -> String { + let start = output.find('{').unwrap_or(0); + let end = output.rfind('}').unwrap_or(output.len().saturating_sub(1)); + output[start..=end].to_string() +} + +fn normalize_diff(output: &str) -> String { + output + .lines() + .filter(|line| !line.starts_with("index ")) + .collect::>() + .join("\n") +} + +fn parse_blame(output: &str) -> Vec<(String, String)> { + output + .lines() + .filter(|line| !line.trim().is_empty()) + .map(|line| { + if let Some(start_paren) = line.find('(') { + if let Some(end_paren) = line.find(')') { + let author_section = &line[start_paren + 1..end_paren]; + let content = line[end_paren + 1..].trim().to_string(); + + let parts: Vec<&str> = author_section.trim().split_whitespace().collect(); + let mut author_parts = Vec::new(); + for part in parts { + if part.chars().next().unwrap_or('a').is_ascii_digit() { + break; + } + author_parts.push(part); + } + let author = author_parts.join(" "); + return (author, content); + } + } + ("unknown".to_string(), line.to_string()) + }) + .collect() +} + +fn temp_dir_with_prefix(prefix: &str) -> PathBuf { + let mut rng = rand::thread_rng(); + let n: u64 = rng.gen_range(0..10000000000); + let path = std::env::temp_dir().join(format!("{}-{}", prefix, n)); + fs::create_dir_all(&path).expect("failed to create temp dir"); + path +} + +fn checkpoint_and_commit( + repo: &impl RepoOps, + relative: &str, + contents: &str, + message: &str, + ai: bool, +) -> NewCommit { + write_file(repo, relative, contents); + let checkpoint_args = if ai { + vec!["checkpoint", "mock_ai"] + } else { + vec!["checkpoint"] + }; + repo.git_ai(&checkpoint_args) + .expect("checkpoint should succeed"); + repo.git(&["add", "-A"]).expect("add should succeed"); + repo.commit(message).expect("commit should succeed") +} + +#[test] +fn test_worktree_checkpoint_status_parity() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + write_file(&base_repo, "file.txt", "one\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (base_stats, base_checkpoints) = status_summary(&base_repo); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("status"); + write_file(&worktree, "file.txt", "one\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (wt_stats, wt_checkpoints) = status_summary(&worktree); + + assert_eq!(stats_key_fields(&base_stats), stats_key_fields(&wt_stats)); + assert_eq!(base_checkpoints, wt_checkpoints); +} + +#[test] +fn test_worktree_diff_parity() { + let base_repo = TestRepo::new(); + let base_commit = + checkpoint_and_commit(&base_repo, "file.txt", "line1\nline2\n", "base", false); + let base_diff = base_repo + .git_ai(&["diff", &base_commit.commit_sha]) + .unwrap(); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("diff"); + let wt_commit = + checkpoint_and_commit(&worktree, "file.txt", "line1\nline2\n", "worktree", false); + let wt_diff = worktree.git_ai(&["diff", &wt_commit.commit_sha]).unwrap(); + + assert_eq!(normalize_diff(&base_diff), normalize_diff(&wt_diff)); +} + +#[test] +fn test_worktree_commit_authorship_parity() { + let base_repo = TestRepo::new(); + let base_commit = checkpoint_and_commit(&base_repo, "file.txt", "line1\n", "base", true); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("authorship"); + let wt_commit = checkpoint_and_commit(&worktree, "file.txt", "line1\n", "worktree", true); + + let base_attestations = base_commit.authorship_log.attestations.len(); + let wt_attestations = wt_commit.authorship_log.attestations.len(); + assert_eq!(base_attestations, wt_attestations); + + let base_entries: usize = base_commit + .authorship_log + .attestations + .iter() + .map(|a| a.entries.len()) + .sum(); + let wt_entries: usize = wt_commit + .authorship_log + .attestations + .iter() + .map(|a| a.entries.len()) + .sum(); + assert_eq!(base_entries, wt_entries); +} + +#[test] +fn test_worktree_blame_parity() { + let base_repo = TestRepo::new(); + checkpoint_and_commit(&base_repo, "file.txt", "human\n", "base", false); + checkpoint_and_commit(&base_repo, "file.txt", "human\nai\n", "base-ai", true); + let base_blame = base_repo.git_ai(&["blame", "file.txt"]).unwrap(); + + let repo = TestRepo::new(); + let worktree = repo.add_worktree("blame"); + checkpoint_and_commit(&worktree, "file.txt", "human\n", "wt", false); + checkpoint_and_commit(&worktree, "file.txt", "human\nai\n", "wt-ai", true); + let wt_blame = worktree.git_ai(&["blame", "file.txt"]).unwrap(); + + let base_parsed = parse_blame(&base_blame); + let wt_parsed = parse_blame(&wt_blame); + assert_eq!(base_parsed, wt_parsed); +} + +#[test] +fn test_worktree_subdir_repository_discovery() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("subdir"); + write_file(&worktree, "nested/file.txt", "content\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let subdir = worktree.path().join("nested"); + let output = worktree + .git_ai_from_working_dir(&subdir, &["status", "--json"]) + .expect("status from subdir should succeed"); + let parsed = parse_status_json(&output); + assert!(!parsed.checkpoints.is_empty()); +} + +#[test] +fn test_group_files_by_repository_with_worktree() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("group"); + let file_path = write_file(&worktree, "file.txt", "content\n"); + + let (repos, orphans) = + group_files_by_repository(&[file_path.to_string_lossy().to_string()], None); + + assert!(orphans.is_empty()); + assert_eq!(repos.len(), 1); + let (found_repo, files) = repos.values().next().unwrap(); + assert_eq!(files.len(), 1); + let workdir = found_repo.workdir().expect("workdir should exist"); + assert_eq!(workdir, worktree.canonical_path()); +} + +#[test] +fn test_worktree_branch_switch_and_merge() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("merge"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + let base_branch = worktree.current_branch(); + + worktree + .git(&["switch", "-c", "feature-merge"]) + .expect("switch to feature should succeed"); + checkpoint_and_commit(&worktree, "file.txt", "base\nfeature\n", "feature", false); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + worktree + .git(&["merge", "feature-merge"]) + .expect("merge should succeed"); + + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("feature")); +} + +#[test] +fn test_worktree_rebase_and_cherry_pick() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("rebase"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + let base_branch = worktree.current_branch(); + + worktree + .git(&["switch", "-c", "feature-rebase"]) + .expect("switch to feature should succeed"); + checkpoint_and_commit(&worktree, "feature.txt", "feature\n", "feature", false); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + checkpoint_and_commit(&worktree, "main.txt", "main\n", "main", false); + + worktree + .git(&["switch", "feature-rebase"]) + .expect("switch to feature should succeed"); + worktree + .git(&["rebase", &base_branch]) + .expect("rebase should succeed"); + + worktree + .git(&["switch", &base_branch]) + .expect("switch back should succeed"); + let cherry_sha = worktree + .git(&["rev-parse", "feature-rebase"]) + .unwrap() + .trim() + .to_string(); + worktree + .git(&["cherry-pick", &cherry_sha]) + .expect("cherry-pick should succeed"); + + let feature_contents = fs::read_to_string(worktree.path().join("feature.txt")).unwrap(); + let main_contents = fs::read_to_string(worktree.path().join("main.txt")).unwrap(); + assert!(feature_contents.contains("feature")); + assert!(main_contents.contains("main")); +} + +#[test] +fn test_worktree_stash_and_reset() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("stash"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + write_file(&worktree, "file.txt", "base\nchange\n"); + + worktree.git(&["stash"]).expect("stash should succeed"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert_eq!(contents, "base\n"); + + worktree.git(&["stash", "pop"]).expect("stash pop"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("change")); + + worktree + .git(&["reset", "--hard", "HEAD"]) + .expect("reset should succeed"); + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert_eq!(contents, "base\n"); +} + +#[test] +fn test_worktree_amend() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("amend"); + + checkpoint_and_commit(&worktree, "file.txt", "base\n", "base", false); + write_file(&worktree, "file.txt", "base\namended\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + worktree.git(&["add", "-A"]).unwrap(); + worktree + .git(&["commit", "--amend", "--no-edit"]) + .expect("amend should succeed"); + + let contents = fs::read_to_string(worktree.path().join("file.txt")).unwrap(); + assert!(contents.contains("amended")); +} + +#[test] +fn test_worktree_stats_json() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("stats"); + checkpoint_and_commit(&worktree, "file.txt", "line1\nline2\n", "stats", true); + + let output = worktree + .git_ai(&["stats", "--json"]) + .expect("stats should succeed"); + let json = extract_json_object(&output); + let parsed: CommitStats = serde_json::from_str(&json).expect("stats JSON"); + assert!(parsed.git_diff_added_lines > 0); +} + +#[test] +fn test_worktree_notes_visible_from_base_repo() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("notes"); + let commit = checkpoint_and_commit(&worktree, "file.txt", "line1\n", "note", true); + + let base_repo = git_ai::git::find_repository_in_path(repo.path().to_str().unwrap()) + .expect("find repository"); + let note = git_ai::git::refs::show_authorship_note(&base_repo, &commit.commit_sha); + assert!(note.is_some()); +} + +#[test] +fn test_worktree_multiple_worktrees_diverge() { + let repo = TestRepo::new(); + let wt_one = repo.add_worktree("one"); + let wt_two = repo.add_worktree("two"); + + checkpoint_and_commit(&wt_one, "file.txt", "one\n", "one", false); + checkpoint_and_commit(&wt_two, "file.txt", "two\n", "two", false); + + let log_one = wt_one.git(&["log", "-1", "--pretty=%s"]).unwrap(); + let log_two = wt_two.git(&["log", "-1", "--pretty=%s"]).unwrap(); + + assert!(log_one.trim().contains("one")); + assert!(log_two.trim().contains("two")); +} + +#[test] +fn test_worktree_default_branch_name_is_respected() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("branchname"); + + let default_branch = default_branchname(); + let current_branch = worktree.current_branch(); + + assert!( + current_branch.starts_with("worktree-") + || current_branch == default_branch + || current_branch == "HEAD", + "unexpected worktree branch: {} (default: {})", + current_branch, + default_branch + ); +} + +#[test] +fn test_worktree_config_resolves_path_with_temp_home() { + let repo = TestRepo::new(); + let worktree = repo.add_worktree("config"); + + let remote_path = temp_dir_with_prefix("git-ai-remote"); + let init_output = Command::new("git") + .args(["init", "--bare", remote_path.to_str().unwrap()]) + .output() + .expect("git init --bare"); + assert!(init_output.status.success()); + + worktree + .git(&["remote", "add", "origin", remote_path.to_str().unwrap()]) + .expect("remote add should succeed"); + + let temp_home = temp_dir_with_prefix("git-ai-home"); + let output = worktree.git_ai_with_env( + &["config", "set", "exclude_repositories", "."], + &[("HOME", temp_home.to_str().unwrap())], + ); + assert!(output.is_ok(), "config set should succeed: {:?}", output); + + let config_path = temp_home.join(".git-ai").join("config.json"); + let config_contents = fs::read_to_string(&config_path).expect("config.json should exist"); + let json: Value = serde_json::from_str(&config_contents).expect("valid json"); + let excludes = json + .get("exclude_repositories") + .and_then(|v| v.as_array()) + .cloned() + .unwrap_or_default(); + assert!( + excludes.iter().any(|v| { + v.as_str() + .map(|s| s.contains(remote_path.to_str().unwrap())) + .unwrap_or(false) + }), + "exclude_repositories should include remote url/path" + ); + + let _ = fs::remove_dir_all(temp_home); + let _ = fs::remove_dir_all(remote_path); +} + +#[test] +fn test_worktree_config_overrides_common_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-override"); + worktree + .git(&["config", "--worktree", "user.name", "Worktree"]) + .expect("set worktree user.name"); + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (_, base_checkpoints) = status_summary(&base_repo); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Worktree") + ); +} + +#[test] +fn test_worktree_config_falls_back_to_common_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-fallback"); + let _ = worktree.git(&["config", "--worktree", "--unset-all", "user.name"]); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!(wt_checkpoints.first().map(|cp| cp.3.as_str()), Some("Base")); +} + +#[test] +fn test_worktree_config_overrides_global_config() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + base_repo + .git(&["config", "extensions.worktreeConfig", "true"]) + .expect("enable worktree config"); + + let worktree = base_repo.add_worktree("config-global"); + worktree + .git(&["config", "--worktree", "user.name", "Worktree"]) + .expect("set worktree user.name"); + + let temp_home = temp_dir_with_prefix("git-ai-home"); + let home_str = temp_home.to_str().expect("valid home path"); + base_repo + .git_with_env( + &["config", "--global", "user.name", "Global"], + &[("HOME", home_str)], + None, + ) + .expect("set global user.name"); + + let envs = [("HOME", home_str)]; + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai_with_env(&["checkpoint"], &envs).unwrap(); + let (_, base_checkpoints) = status_summary_with_env(&base_repo, &envs); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai_with_env(&["checkpoint"], &envs).unwrap(); + let (_, wt_checkpoints) = status_summary_with_env(&worktree, &envs); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Worktree") + ); + + let _ = fs::remove_dir_all(temp_home); +} + +#[test] +fn test_worktree_config_worktree_ignored_without_extension() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + + let worktree = base_repo.add_worktree("config-worktree-off"); + let wt_config_path = worktree_git_dir(&worktree).join("config.worktree"); + let config_contents = "[user]\n\tname = WorktreeFile\n"; + fs::write(&wt_config_path, config_contents).expect("write config.worktree"); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!(wt_checkpoints.first().map(|cp| cp.3.as_str()), Some("Base")); +} + +#[test] +fn test_worktree_include_if_onbranch_applies() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + base_repo + .git(&["config", "user.name", "Base"]) + .expect("set base user.name"); + + let include_dir = temp_dir_with_prefix("git-ai-onbranch"); + let include_path = include_dir.join("onbranch.config"); + fs::write(&include_path, "[user]\n\tname = OnBranch\n").expect("write onbranch include"); + + let include_key = "includeIf.onbranch:worktree-onbranch-*.path"; + base_repo + .git(&[ + "config", + "--add", + include_key, + include_path.to_str().expect("valid include path"), + ]) + .expect("set includeIf.onbranch"); + + let worktree = base_repo.add_worktree("onbranch"); + + write_file(&base_repo, "file.txt", "base\n"); + base_repo.git_ai(&["checkpoint"]).unwrap(); + let (_, base_checkpoints) = status_summary(&base_repo); + assert_eq!( + base_checkpoints.first().map(|cp| cp.3.as_str()), + Some("Base") + ); + + write_file(&worktree, "file.txt", "worktree\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + let (_, wt_checkpoints) = status_summary(&worktree); + assert_eq!( + wt_checkpoints.first().map(|cp| cp.3.as_str()), + Some("OnBranch") + ); + + let _ = fs::remove_dir_all(include_dir); +} + +#[test] +fn test_worktree_locked_allows_status() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = base_repo.add_worktree("locked"); + let worktree_path = worktree.path().to_str().expect("valid worktree path"); + + base_repo + .git_og(&["worktree", "lock", worktree_path]) + .expect("worktree lock should succeed"); + + let output = worktree.git_ai(&["status", "--json"]); + assert!(output.is_ok(), "status should work on locked worktree"); + + let _ = base_repo.git_og(&["worktree", "unlock", worktree_path]); +} + +#[test] +fn test_worktree_removed_does_not_break_base_status() { + let base_repo = TestRepo::new(); + base_repo + .git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = base_repo.add_worktree("removed"); + let worktree_path = worktree.path().to_str().expect("valid worktree path"); + + base_repo + .git_og(&["worktree", "remove", "-f", worktree_path]) + .expect("worktree remove should succeed"); + + let output = base_repo.git_ai(&["status", "--json"]); + assert!(output.is_ok(), "base status should succeed after removal"); +} + +#[test] +fn test_worktree_detached_head_checkpoint() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("detached"); + worktree + .git(&["checkout", "--detach"]) + .expect("detach HEAD"); + + write_file(&worktree, "file.txt", "detached\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let output = worktree + .git_ai(&["status", "--json"]) + .expect("status should succeed"); + let parsed = parse_status_json(&output); + assert!(!parsed.checkpoints.is_empty()); +} + +#[test] +fn test_worktree_commondir_resolution_matches_git() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("commondir"); + + let expected_common = worktree_commondir(&worktree); + let found_repo = git_ai::git::find_repository_in_path(worktree.path().to_str().unwrap()) + .expect("find repository"); + let actual_common = found_repo + .common_git_dir() + .canonicalize() + .unwrap_or_else(|_| found_repo.common_git_dir().to_path_buf()); + + assert_eq!(expected_common, actual_common); +} + +#[test] +fn test_worktree_storage_lives_in_worktree_git_dir() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let worktree = repo.add_worktree("storage"); + write_file(&worktree, "file.txt", "content\n"); + worktree.git_ai(&["checkpoint"]).unwrap(); + + let git_dir = worktree_git_dir(&worktree); + let found_repo = git_ai::git::find_repository_in_path(worktree.path().to_str().unwrap()) + .expect("find repository"); + let expected_prefix = git_dir.join("ai").join("working_logs"); + let actual = found_repo.storage.working_logs.clone(); + assert!( + actual.starts_with(&expected_prefix), + "working logs should live under worktree git dir (expected prefix {:?}, got {:?})", + expected_prefix, + actual + ); + + let head_sha = found_repo.head().expect("head").target().expect("head sha"); + let checkpoints_file = actual.join(head_sha).join("checkpoints.jsonl"); + assert!(checkpoints_file.exists(), "checkpoint log should exist"); +} + +#[test] +fn test_worktree_working_logs_are_isolated() { + let repo = TestRepo::new(); + repo.git(&["commit", "--allow-empty", "-m", "initial"]) + .unwrap(); + let wt_one = repo.add_worktree("isolation-one"); + let wt_two = repo.add_worktree("isolation-two"); + + write_file(&wt_one, "file.txt", "one\n"); + wt_one.git_ai(&["checkpoint"]).unwrap(); + + let output = wt_two + .git_ai(&["status", "--json"]) + .expect("status should succeed"); + let parsed = parse_status_json(&output); + assert!( + parsed.checkpoints.is_empty(), + "worktree checkpoints should not leak across worktrees" + ); +} From f4f442508f3a5b5d20b1dc4b2b26f8dc1676fcfc Mon Sep 17 00:00:00 2001 From: Sasha Varlamov Date: Sat, 14 Feb 2026 16:36:20 -0500 Subject: [PATCH 5/5] Fix blame boundary/abbrev formatting parity --- src/commands/blame.rs | 73 +++++++++++++++++++++++++++++++------------ 1 file changed, 53 insertions(+), 20 deletions(-) diff --git a/src/commands/blame.rs b/src/commands/blame.rs index 52cf04392..24af92fe4 100644 --- a/src/commands/blame.rs +++ b/src/commands/blame.rs @@ -487,6 +487,34 @@ impl Repository { } let mut hunks: Vec = Vec::new(); + let mut abbrev_cache: HashMap = HashMap::new(); + let mut abbreviate_sha = |sha: &str, min_len: usize| -> Result { + if min_len >= sha.len() { + return Ok(sha.to_string()); + } + if sha.chars().all(|c| c == '0') { + return Ok(sha[..min_len].to_string()); + } + if let Some(existing) = abbrev_cache.get(sha) { + return Ok(existing.clone()); + } + + let mut args = self.global_args_for_exec(); + args.push("rev-parse".to_string()); + args.push(format!("--short={}", min_len.max(4))); + args.push(sha.to_string()); + let output = exec_git(&args)?; + let stdout = String::from_utf8(output.stdout)?; + let resolved = stdout.lines().next().unwrap_or("").trim().to_string(); + + let abbrev = if resolved.is_empty() { + sha.to_string() + } else { + resolved + }; + abbrev_cache.insert(sha.to_string(), abbrev.clone()); + Ok(abbrev) + }; let mut cur_commit: Option = None; let mut cur_final_start: u32 = 0; let mut cur_orig_start: u32 = 0; @@ -591,10 +619,10 @@ impl Repository { } else { options.abbrev.unwrap_or(7) as usize }; - let abbrev = if abbrev_len < prev_sha.len() { - prev_sha[..abbrev_len].to_string() - } else { + let abbrev = if options.long_rev { prev_sha.clone() + } else { + abbreviate_sha(&prev_sha, abbrev_len)? }; hunks.push(BlameHunk { @@ -660,10 +688,10 @@ impl Repository { } else { options.abbrev.unwrap_or(7) as usize }; - let abbrev = if abbrev_len < prev_sha.len() { - prev_sha[..abbrev_len].to_string() - } else { + let abbrev = if options.long_rev { prev_sha.clone() + } else { + abbreviate_sha(&prev_sha, abbrev_len)? }; hunks.push(BlameHunk { @@ -1318,30 +1346,35 @@ fn output_default_format( }; if let Some(hunk) = line_to_hunk.get(&line_num) { - // Determine hash length - match git blame default (7 chars) + // Determine hash column width - git may emit one extra char over requested abbrev + // for non-boundary commits to preserve uniqueness. let hash_len = if options.long_rev { - 40 // Full hash for long revision + 40 } else if let Some(abbrev) = options.abbrev { - abbrev as usize + if hunk.is_boundary { + abbrev as usize + } else { + (abbrev + 1) as usize + } } else { - 7 // Default 7 chars + 7 }; - let sha = if hash_len < hunk.commit_sha.len() { + let sha = if options.long_rev { + hunk.commit_sha.as_str() + } else if hash_len < hunk.commit_sha.len() { &hunk.commit_sha[..hash_len] } else { - &hunk.commit_sha + hunk.commit_sha.as_str() }; - // Add boundary marker if this is a boundary commit - let boundary_marker = if hunk.is_boundary && options.blank_boundary { - "^" - } else { - "" - }; + // Preserve git's hash column width for boundary lines. With -b, git blanks + // the whole boundary column (including the marker slot) instead of printing '^'. let full_sha = if hunk.is_boundary && options.blank_boundary { - format!("{}{}", boundary_marker, " ") // Empty hash for boundary + " ".repeat(hash_len + 1) + } else if hunk.is_boundary { + format!("^{}", sha) } else { - format!("{}{}", boundary_marker, sha) + sha.to_string() }; // Get the author for this line (AI authorship or original)