diff --git a/config/changelog.yml.example b/config/changelog.yml.example index 9091fa0be..2c28ac724 100644 --- a/config/changelog.yml.example +++ b/config/changelog.yml.example @@ -4,16 +4,16 @@ # Available types for changelog entries available_types: - - feature - - enhancement - - bug-fix - - known-issue - breaking-change + - bug-fix - deprecation - docs + - enhancement + - feature + - known-issue + - other - regression - security - - other # Available subtypes for breaking changes available_subtypes: @@ -34,24 +34,23 @@ available_lifecycles: # Available areas (optional - if not specified, all areas are allowed) available_areas: - - search - - security - - machine-learning - - observability - - index-management + # - Autoscaling + # - Search + # - Security + # - Watcher # Add more areas as needed # Available products (optional - if not specified, all products are allowed) available_products: - - elasticsearch - - kibana - - apm - - beats - - elastic-agent - - fleet - - cloud-hosted - - cloud-serverless - - cloud-enterprise + # - elasticsearch + # - kibana + # - apm + # - beats + # - elastic-agent + # - fleet + # - cloud-hosted + # - cloud-serverless + # - cloud-enterprise # Add more products as needed # GitHub label mappings (optional - used when --pr option is specified) @@ -59,22 +58,38 @@ available_products: # When a PR has a label that matches a key, the corresponding type value is used label_to_type: # Example mappings - customize based on your label naming conventions - # "type:feature": feature - # "type:bug": bug-fix - # "type:enhancement": enhancement - # "type:breaking": breaking-change - # "type:security": security + # ">breaking": breaking-change + # ">bug": bug-fix + # ">docs": docs + # ">enhancement": enhancement + # ">feature": feature # Maps GitHub PR labels to changelog area values # Multiple labels can map to the same area, and a single label can map to multiple areas (comma-separated) label_to_areas: # Example mappings - customize based on your label naming conventions - # "area:search": search - # "area:security": security - # "area:ml": machine-learning - # "area:observability": observability - # "area:index": index-management - # "area:multiple": "search, security" # Multiple areas comma-separated + # ":Distributed Coordination/Autoscaling": Autoscaling + # ":Search/Search": Search + # ":Security/Security": Security + # ":Data Management/Watcher": Watcher + # "area:multiple": "Search, Security" # Multiple areas comma-separated + +# Render blockers (optional - used by the "docs-builder changelog render" command) +# Changelogs matching the specified products and areas/types will be commented out in rendered output files +# Dictionary key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") +# Dictionary value contains areas and/or types that should be blocked for those products +render_blockers: + # Multiple products (comma-separated) with areas and types that should be blocked + "cloud-hosted, cloud-serverless": + areas: # List of area values that should be blocked (commented out) during render + - Autoscaling + - Watcher + types: # List of type values that should be blocked (commented out) during render + - docs + # Single product with areas that should be blocked + elasticsearch: + areas: + - Security # Product-specific label blockers (optional) # Maps product IDs to lists of pull request labels that prevent changelog creation for that product diff --git a/docs/_docset.yml b/docs/_docset.yml index 8658d420f..28c3003d8 100644 --- a/docs/_docset.yml +++ b/docs/_docset.yml @@ -160,6 +160,8 @@ toc: children: - file: index.md - file: changelog-add.md + - file: changelog-bundle.md + - file: changelog-render.md - folder: mcp children: - file: index.md diff --git a/docs/cli/release/changelog-bundle.md b/docs/cli/release/changelog-bundle.md new file mode 100644 index 000000000..dfd67577d --- /dev/null +++ b/docs/cli/release/changelog-bundle.md @@ -0,0 +1,61 @@ +# changelog bundle + +Bundle changelog files. + +To create the changelogs, use [](/cli/release/changelog-add.md). + + +## Usage + +```sh +docs-builder changelog bundle [options...] [-h|--help] +``` + +## Options + +`--all` +: Include all changelogs from the directory. +: Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. + +`--directory ` +: Optional: The directory that contains the changelog YAML files. +: Defaults to the current directory. + +`--input-products ?>` +: Filter by products in format "product target lifecycle, ..." +: Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. +: When specified, all three parts (product, target, lifecycle) are required but can be wildcards (`*`). For example: + +- `"cloud-serverless 2025-12-02 ga, cloud-serverless 2025-12-06 beta"` - exact matches +- `"cloud-serverless 2025-12-02 *"` - match cloud-serverless 2025-12-02 with any lifecycle +- `"elasticsearch * *"` - match all elasticsearch changelogs +- `"* 9.3.* *"` - match any product with target starting with "9.3." +- `"* * *"` - match all changelogs (equivalent to `--all`) + +`--output ` +: Optional: The output path for the bundle. +: Can be either (1) a directory path, in which case `changelog-bundle.yaml` is created in that directory, or (2) a file path ending in `.yml` or `.yaml`. +: Defaults to `changelog-bundle.yaml` in the input directory. + +`--output-products ?>` +: Optional: Explicitly set the products array in the output file in format "product target lifecycle, ...". +: This value replaces information that would otherwise by derived from changelogs. + +`--owner ` +: The GitHub repository owner, which is required when pull requests are specified as numbers. + +`--prs ` +: Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. +: Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. +: Each occurrence can be either comma-separated PRs (e.g., `--prs "https://github.com/owner/repo/pull/123,6789"`) or a file path (e.g., `--prs /path/to/file.txt`). +: When specifying PRs directly, provide comma-separated values. +: When specifying a file path, provide a single value that points to a newline-delimited file. + +`--repo ` +: The GitHub repository name, which is required when PRs are specified as numbers. + +`--resolve` +: Optional: Copy the contents of each changelog file into the entries array. +: By default, the bundle contains only the file names and checksums. diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md new file mode 100644 index 000000000..61fc68e48 --- /dev/null +++ b/docs/cli/release/changelog-render.md @@ -0,0 +1,53 @@ +# changelog render + +Generate markdown files from changelog bundle files. + +To create the bundle files, use [](/cli/release/changelog-bundle.md). + +For details and examples, go to [](/contribute/changelog.md). + +## Usage + +```sh +docs-builder changelog render [options...] [-h|--help] +``` + +## Options + +`--input ` +: One or more bundle input files. +: Each bundle is specified as "bundle-file-path|changelog-file-path|repo|link-visibility" using pipe (`|`) as delimiter. +: To merge multiple bundles, separate them with commas: `--input "bundle1|dir1|repo1|keep-links,bundle2|dir2|repo2|hide-links"`. +: For example, `--input "/path/to/changelog-bundle.yaml|/path/to/changelogs|elasticsearch|keep-links"`. +: Only `bundle-file-path` is required for each bundle. +: Use `repo` if your changelogs do not contain full URLs for the pull requests or issues; otherwise they will be incorrectly derived with "elastic/elastic" in the URL by default. +: Use `link-visibility` to control whether PR/issue links are shown or hidden for entries from this bundle. Valid values are `keep-links` (default) or `hide-links`. Use `hide-links` for bundles from private repositories. +: **Important**: Paths must be absolute or use environment variables. Tilde (`~`) expansion is not supported. + +`--output ` +: Optional: The output directory for rendered markdown files. +: Defaults to current directory. + +`--title ` +: Optional: The title to use for section headers, directories, and anchors in output markdown files. +: Defaults to the version in the first bundle. +: If the string contains spaces, they are replaced with dashes when used in directory names and anchors. + +`--subsections` +: Optional: Group entries by area in subsections. +: Defaults to false. + +`--hide-features ` +: Optional: Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. +: Each occurrence can be either comma-separated feature IDs (e.g., `--hide-features "feature:new-search-api,feature:enhanced-analytics"`) or a file path (e.g., `--hide-features /path/to/file.txt`). +: When specifying feature IDs directly, provide comma-separated values. +: When specifying a file path, provide a single value that points to a newline-delimited file. The file should contain one feature ID per line. +: Entries with matching `feature-id` values will be commented out in the markdown output and a warning will be emitted. + +`--config ` +: Optional: Path to the changelog.yml configuration file. +: Defaults to `docs/changelog.yml`. +: This configuration file is where the command looks for `render_blockers` details. + +You can configure `render_blockers` in your `changelog.yml` configuration file to automatically block changelog entries from being rendered based on their products, areas, and/or types. +For more information, refer to [](/contribute/changelog.md#render-blockers). diff --git a/docs/cli/release/index.md b/docs/cli/release/index.md index 5e3ee5477..afefc3000 100644 --- a/docs/cli/release/index.md +++ b/docs/cli/release/index.md @@ -7,3 +7,5 @@ navigation_title: "changelog" These commands are associated with product release documentation. - [changelog add](changelog-add.md) - Create a changelog file +- [changelog bundle](changelog-bundle.md) - Create a changelog bundle file +- [changelog render](changelog-render.md) - Generate markdown output from changelog bundle files diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 80f1a9adc..9015eda51 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -1,8 +1,19 @@ -# Create changelogs +# Create and bundle changelogs -By adding a changelog file for each notable change, you can ultimately generate release documention with a consistent layout for all your products. +By adding a file for each notable change and grouping them into bundles, you can ultimately generate release documention with a consistent layout for all your products. -These instructions rely on the use of a common changelog schema: +1. Create changelogs with the `docs-builder changelog add` command. +2. [Create changelog bundles](#changelog-bundle) with the `docs-builder changelog bundle` command. For example, create a bundle for the pull requests that are included in a product release. +3. [Create documentation](#render-changelogs) with the `docs-builder changelog render` command. + +For more information about running `docs-builder`, go to [Contribute locally](https://www.elastic.co/docs/contribute-docs/locally). + +:::{note} +This command is associated with an ongoing release docs initiative. +Additional workflows are still to come for updating and generating documentation from changelogs. +::: + +The changelogs use the following schema: :::{dropdown} Changelog schema ::::{include} /contribute/_snippets/changelog-fields.md @@ -26,13 +37,6 @@ To use the `docs-builder changelog` commands in your development workflow: 1. [Configure changelog settings](#changelog-settings) to correctly interpret your PR labels. 1. [Create changelogs](#changelog-add) with the `docs-builder changelog add` command. -For more information about running `docs-builder`, go to [Contribute locally](https://www.elastic.co/docs/contribute-docs/locally). - -:::{note} -This command is associated with an ongoing release docs initiative. -Additional workflows are still to come for managing the list of changelogs in each release. -::: - ## Create a changelog configuration file [changelog-settings] You can create a configuration file to limit the acceptable product, type, subtype, and lifecycle values. @@ -47,6 +51,9 @@ If a configuration file exists, the command validates its values before generati - If the configuration file contains `lifecycle`, `product`, `subtype`, or `type` values that don't match the values in `products.yml` and `ChangelogConfiguration.cs`, validation fails. The changelog file is not created. - If the configuration file contains `areas` values and they don't match what you specify in the `--areas` command option, validation fails. The changelog file is not created. +The `available_types`, `available_subtypes`, and `available_lifecycles` fields are optional in the configuration file. +If not specified, all default values from `ChangelogConfiguration.cs` are used. + ### GitHub label mappings You can optionally add `label_to_type` and `label_to_areas` mappings in your changelog configuration. @@ -63,6 +70,48 @@ You can use comma-separated product IDs to share the same list of labels across Refer to the file layout in [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example) and an [example usage](#example-block-label). +### Render blockers [render-blockers] + +You can optionally add `render_blockers` in your changelog configuration to block specific changelog entries from being rendered in markdown output files. +When you run the `docs-builder changelog render` command, changelog entries that match the specified products and areas/types will be commented out in the markdown output. + +By default, the `docs-builder changelog render` command checks the following path: `docs/changelog.yml`. +You can specify a different path with the `--config` command option. + +The `render_blockers` configuration uses a dictionary format where: + +- The key can be a single product ID or comma-separated product IDs (e.g., `"elasticsearch, cloud-serverless"`) +- The value contains `areas` and/or `types` that should be blocked for those products + +An entry is blocked if any product in the changelog entry matches any product key in `render_blockers` AND (any area matches OR any type matches). +If a changelog entry has multiple products, all matching products in `render_blockers` are checked. + +The `types` values in `render_blockers` must exist in the `available_types` list (or in the default types if `available_types` is not specified). + +Example configuration: + +```yaml +render_blockers: + "cloud-hosted, cloud-serverless": + areas: # List of area values that should be blocked (commented out) during render + - Autoscaling + - Watcher + types: # List of type values that should be blocked (commented out) during render + - docs + elasticsearch: # Another single product case + areas: + - Security +``` + +When rendering, entries with: + +- Product `cloud-hosted` or `cloud-serverless` AND (area `Autoscaling` or `Watcher` OR type `docs`) will be commented out +- Product `elasticsearch` AND area `Security` will be commented out + +The command will emit warnings indicating which changelog entries were commented out and why. + +Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). + ## Create changelog files [changelog-add] You can use the `docs-builder changelog add` command to create a changelog file. @@ -86,14 +135,16 @@ Options: --action Optional: What users must do to mitigate [Default: null] --feature-id Optional: Feature flag ID [Default: null] --highlight Optional: Include in release highlights [Default: null] - --output Optional: Output directory for the changelog fragment. Defaults to current directory [Default: null] + --output Optional: Output directory for the changelog. Defaults to current directory [Default: null] --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] --use-pr-number Optional: Use the PR number as the filename instead of generating it from a unique ID and title ``` ### Product format -The `--products` parameter accepts products in the format `"product target lifecycle, ..."` where: +The `docs-builder changelog add` has a `--products` option and the `docs-builder changelog bundle` has `--input-products` and `--output-products` options that all use the same format. + +They accept values with the format `"product target lifecycle, ..."` where: - `product` is the product ID from [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml) (required) - `target` is the target version or date (optional) @@ -131,8 +182,6 @@ When using `--use-pr-number`, you must also provide the `--pr` option. The PR nu ### Create a changelog for multiple products [example-multiple-products] -The following command creates a changelog for a bug fix that applies to two products: - ```sh docs-builder changelog add \ --title "Fixes enrich and lookup join resolution based on minimum transport version" \ <1> @@ -147,36 +196,11 @@ docs-builder changelog add \ 3. The product values are defined in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). 4. The `--prs` value can be a full URL (such as `https://github.com/owner/repo/pull/123`), a short format (such as `owner/repo#123`), just a number (in which case you must also provide `--owner` and `--repo` options), or a path to a file containing newline-delimited PR URLs or numbers. Multiple PRs can be provided comma-separated, or you can specify a file path. You can also mix both formats by specifying `--prs` multiple times. One changelog file will be created for each PR. -The output file has the following format: - -```yaml -pr: https://github.com/elastic/elasticsearch/pull/137431 -type: bug-fix -products: -- product: elasticsearch - target: 9.2.3 -- product: cloud-serverless - target: 2025-12-02 -title: Fixes enrich and lookup join resolution based on minimum transport version -areas: -- ES|QL -``` - ### Create a changelog with PR label mappings [example-map-label] -You can update your changelog configuration file to contain GitHub label mappings, for example: +You can configure label mappings in your changelog configuration file: ```yaml -# Available areas (optional - if not specified, all areas are allowed) -available_areas: - - search - - security - - machine-learning - - observability - - index-management - - ES|QL - # Add more areas as needed - # GitHub label mappings (optional - used when --prs option is specified) # Maps GitHub PR labels to changelog type values # When a PR has a label that matches a key, the corresponding type value is used @@ -201,18 +225,7 @@ docs-builder changelog add \ --config test/changelog.yml ``` -In this case, the changelog file derives the title, type, and areas from the pull request: - -```yaml -pr: https://github.com/elastic/elasticsearch/pull/139272 -type: enhancement -products: -- product: elasticsearch - target: 9.3.0 -areas: -- ES|QL -title: '[ES|QL] Take TOP_SNIPPETS out of snapshot' -``` +In this case, the changelog file derives the title, type, and areas from the pull request. ### Block changelog creation with PR labels [example-block-label] @@ -283,3 +296,275 @@ docs-builder changelog add \ ``` This creates one changelog file for each PR specified, whether from files or directly. + +## Create bundles [changelog-bundle] + +You can use the `docs-builder changelog bundle` command to create a YAML file that lists multiple changelogs. +For up-to-date details, use the `-h` option: + +```sh +Bundle changelogs + +Options: + --directory Optional: Directory containing changelog YAML files. Defaults to current directory [Default: null] + --output Optional: Output path for the bundled changelog. Can be either (1) a directory path, in which case 'changelog-bundle.yaml' is created in that directory, or (2) a file path ending in .yml or .yaml. Defaults to 'changelog-bundle.yaml' in the input directory [Default: null] + --all Include all changelogs in the directory. Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. + --input-products ?> Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02 ga, cloud-serverless 2025-12-06 beta"). When specified, all three parts (product, target, lifecycle) are required but can be wildcards (*). Examples: "elasticsearch * *" matches all elasticsearch changelogs, "cloud-serverless 2025-12-02 *" matches cloud-serverless 2025-12-02 with any lifecycle, "* 9.3.* *" matches any product with target starting with "9.3.", "* * *" matches all changelogs (equivalent to --all). Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. [Default: null] + --output-products ?> Optional: Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. [Default: null] + --resolve Optional: Copy the contents of each changelog file into the entries array. By default, the bundle contains only the file names and checksums. + --prs Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. [Default: null] + --owner GitHub repository owner (required only when PRs are specified as numbers) [Default: null] + --repo GitHub repository name (required only when PRs are specified as numbers) [Default: null] +``` + +You can specify only one of the following filter options: + +- `--all`: Include all changelogs from the directory. +- `--input-products`: Include changelogs for the specified products. Refer to [Filter by product](#changelog-bundle-product). +- `--prs`: Include changelogs for the specified pull request URLs or numbers, or a path to a newline-delimited file containing PR URLs or numbers. Go to [Filter by pull requests](#changelog-bundle-pr). + +By default, the output file contains only the changelog file names and checksums. +You can optionally use the `--resolve` command option to pull all of the content from each changelog into the bundle. + +### Filter by product [changelog-bundle-product] + +You can use the `--input-products` option to create a bundle of changelogs that match the product details. +When using `--input-products`, you must provide all three parts: product, target, and lifecycle. +Each part can be a wildcard (`*`) to match any value. + +```sh +docs-builder changelog bundle \ + --input-products "cloud-serverless 2025-12-02 ga, cloud-serverless 2025-12-06 beta" <1> +``` + +1. Include all changelogs that have the `cloud-serverless` product identifier with target dates of either December 2 2025 (lifecycle `ga`) or December 6 2025 (lifecycle `beta`). For more information about product values, refer to [](#product-format). + +You can use wildcards in any of the three parts: + +```sh +# Bundle any changelogs that have exact matches for either of these clauses +docs-builder changelog bundle --input-products "cloud-serverless 2025-12-02 ga, elasticsearch 9.3.0 beta" + +# Bundle all elasticsearch changelogs regardless of target or lifecycle +docs-builder changelog bundle --input-products "elasticsearch * *" + +# Bundle all cloud-serverless 2025-12-02 changelogs with any lifecycle +docs-builder changelog bundle --input-products "cloud-serverless 2025-12-02 *" + +# Bundle any cloud-serverless changelogs with target starting with "2025-11-" and "ga" lifecycle +docs-builder changelog bundle --input-products "cloud-serverless 2025-11-* ga" + +# Bundle all changelogs (equivalent to --all) +docs-builder changelog bundle --input-products "* * *" +``` + +If you have changelog files that reference those product details, the command creates a file like this: + +```yaml +products: <1> +- product: cloud-serverless + target: 2025-12-02 +- product: cloud-serverless + target: 2025-12-06 +entries: +- file: + name: 1765495972-fixes-enrich-and-lookup-join-resolution-based-on-m.yaml + checksum: 6c3243f56279b1797b5dfff6c02ebf90b9658464 +- file: + name: 1765507778-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 70d197d96752c05b6595edffe6fe3ba3d055c845 +``` + +1. By default these values match your `--input-products` (even if the changelogs have more products). +To specify different product metadata, use the `--output-products` option. + +If you add the `--resolve` option, the contents of each changelog will be included in the output file. + +### Filter by pull requests [changelog-bundle-pr] + +You can use the `--prs` option to create a bundle of the changelogs that relate to those pull requests. +You can provide either a comma-separated list of PRs (`--prs "https://github.com/owner/repo/pull/123,12345"`) or a path to a newline-delimited file (`--prs /path/to/file.txt`). +In the latter case, the file should contain one PR URL or number per line. + +Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`), a short format (such as `owner/repo#123`), or just a number (in which case you must also provide `--owner` and `--repo` options). + +```sh +docs-builder changelog bundle --prs "108875,135873,136886" \ <1> + --repo elasticsearch \ <2> + --owner elastic \ <3> + --output-products "elasticsearch 9.2.2 ga" <4> +``` + +1. The comma-separated list of pull request numbers to seek. +2. The repository in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. +3. The owner in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. +4. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. + +If you have changelog files that reference those pull requests, the command creates a file like this: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 + lifecycle: ga +entries: +- file: + name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 +- file: + name: 1765507798-convert-bytestransportresponse-when-proxying-respo.yaml + checksum: c6dbd4730bf34dbbc877c16c042e6578dd108b62 +- file: + name: 1765507839-use-ivf_pq-for-gpu-index-build-for-large-datasets.yaml + checksum: 451d60283fe5df426f023e824339f82c2900311e +``` + +If you add the `--resolve` option, the contents of each changelog will be included in the output file. + +### Filter by pull request file [changelog-bundle-file] + +If you have a file that lists pull requests (such as PRs associated with a GitHub release): + +```txt +https://github.com/elastic/elasticsearch/pull/108875 +https://github.com/elastic/elasticsearch/pull/135873 +https://github.com/elastic/elasticsearch/pull/136886 +https://github.com/elastic/elasticsearch/pull/137126 +``` + +You can use the `--prs` option with a file path to create a bundle of the changelogs that relate to those pull requests. You can also combine multiple `--prs` options: + +```sh +./docs-builder changelog bundle \ + --prs "https://github.com/elastic/elasticsearch/pull/108875,135873" \ <1> + --prs test/9.2.2.txt \ <2> + --output-products "elasticsearch 9.2.2 ga" <3> + --resolve <4> +``` + +1. Comma-separated list of pull request URLs or numbers. +2. The path for the file that lists the pull requests. If the file contains only PR numbers, you must add `--repo` and `--owner` command options. +3. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. +4. Optionally include the contents of each changelog in the output file. + +If you have changelog files that reference those pull requests, the command creates a file like this: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 + lifecycle: ga +entries: +- file: + name: 1765507778-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 70d197d96752c05b6595edffe6fe3ba3d055c845 + type: bug-fix + title: Break on FieldData when building global ordinals + products: + - product: elasticsearch + areas: + - Aggregations + pr: https://github.com/elastic/elasticsearch/pull/108875 +... +``` + +:::{note} +When a changelog matches multiple `--input-products` filters, it appears only once in the bundle. This deduplication applies even when using `--all` or `--prs`. +::: + +### Output file location + +The `--output` option supports two formats: + +1. **Directory path**: If you specify a directory path (without a filename), the command creates `changelog-bundle.yaml` in that directory: + + ```sh + docs-builder changelog bundle --all --output /path/to/output/dir + # Creates /path/to/output/dir/changelog-bundle.yaml + ``` + +2. **File path**: If you specify a file path ending in `.yml` or `.yaml`, the command uses that exact path: + + ```sh + docs-builder changelog bundle --all --output /path/to/custom-bundle.yaml + # Creates /path/to/custom-bundle.yaml + ``` + +If you specify a file path with a different extension (not `.yml` or `.yaml`), the command returns an error. + +## Create documentation [render-changelogs] + +The `docs-builder changelog render` command creates markdown files from changelog bundles for documentation purposes. +For up-to-date details, use the `-h` command option: + +```sh +Render bundled changelog(s) to markdown files + +Options: + --input Required: Bundle input(s) in format "bundle-file-path|changelog-file-path|repo|link-visibility" (use pipe as delimiter). To merge multiple bundles, separate them with commas. Only bundle-file-path is required. link-visibility can be "hide-links" or "keep-links" (default). Paths must be absolute or use environment variables; tilde (~) expansion is not supported. [Required] + --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] + --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] + --subsections Optional: Group entries by area/component in subsections. For breaking changes with a subtype, groups by subtype instead of area. Defaults to false + --hide-features Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. [Default: null] + --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] +``` + +Before you can use this command you must create changelog files and collect them into bundles. +For example, the `docs-builder changelog bundle` command creates a file like this: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 +entries: +- file: + name: 1765581721-convert-bytestransportresponse-when-proxying-respo.yaml + checksum: d7e74edff1bdd3e23ba4f2f88b92cf61cc7d490a +- file: + name: 1765581721-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: dfafce50c9fd61c3d8db286398f9553e67737f07 +- file: + name: 1765581651-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 704b25348d6daff396259216201053334b5b3c1d +``` + +To create markdown files from this bundle, run the `docs-builder changelog render` command: + +```sh +docs-builder changelog render \ + --input "/path/to/changelog-bundle.yaml|/path/to/changelogs|elasticsearch|keep-links,/path/to/other-bundle.yaml|/path/to/other-changelogs|kibana|hide-links" \ <1> + --title 9.2.2 \ <2> + --output /path/to/release-notes \ <3> + --subsections <4> +``` + +1. Provide information about the changelog bundle(s). The format for each bundle is `"|||"` using pipe (`|`) as delimiter. To merge multiple bundles, separate them with commas (`,`). Only the `` is required for each bundle. The `` is useful if the changelogs are not in the default directory and are not resolved within the bundle. The `` is necessary if your changelogs do not contain full URLs for the pull requests or issues. The `` can be `hide-links` or `keep-links` (default) to control whether PR/issue links are hidden for entries from private repositories. +2. The `--title` value is used for an output folder name and for section titles in the markdown files. If you omit `--title` and the first bundle contains a product `target` value, that value is used. Otherwise, if none of the bundles have product `target` fields, the title defaults to "unknown". +3. By default the command creates the output files in the current directory. +4. By default the changelog areas are not displayed in the output. Add `--subsections` to group changelog details by their `areas`. For breaking changes that have a `subtype` value, the subsections will be grouped by subtype instead of area. + +:::{important} +Paths in the `--input` option must be absolute paths or use environment variables. Tilde (`~`) expansion is not supported. +::: + +For example, the `index.md` output file contains information derived from the changelogs: + +```md +## 9.2.2 [elastic-release-notes-9.2.2] + +### Fixes [elastic-9.2.2-fixes] + +**Network** +* Convert BytesTransportResponse when proxying response from/to local node. [#135873](https://github.com/elastic/elastic/pull/135873) + +**Machine Learning** +* Fix ML calendar event update scalability issues. [#136886](https://github.com/elastic/elastic/pull/136886) + +**Aggregations** +* Break on FieldData when building global ordinals. [#108875](https://github.com/elastic/elastic/pull/108875) +``` + +To comment out the pull request and issue links, for example if they relate to a private repository, add `hide-links` to the `--input` option for that bundle. This allows you to selectively hide links per bundle when merging changelogs from multiple repositories. + +If you have changelogs with `feature-id` values and you want them to be omitted from the output, use the `--hide-features` option. +For more information, refer to [](/cli/release/changelog-render.md). diff --git a/docs/contribute/index.md b/docs/contribute/index.md index 21240317b..f782350dc 100644 --- a/docs/contribute/index.md +++ b/docs/contribute/index.md @@ -10,7 +10,7 @@ Use these guides for tasks like managing documentation files and folders, config - [Move files and folders](move.md): Move files or folders and automatically update all links in the documentation. - [Manage redirects across doc sets](redirects.md): Set up redirects when moving or deleting pages to prevent broken links. -- [Add changelog entries](changelog.md): Create changelog fragments using the command-line interface. +- [](changelog.md): Create and bundle changelogs using the command-line interface. ## Repository management diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs new file mode 100644 index 000000000..1fc3f2ff9 --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs @@ -0,0 +1,22 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input for a single bundle file with optional directory, repo, and link visibility +/// +public class BundleInput +{ + public string BundleFile { get; set; } = string.Empty; + public string? Directory { get; set; } + public string? Repo { get; set; } + /// + /// Whether to hide PR/issue links for entries from this bundle. + /// When true, links are commented out in the markdown output. + /// Defaults to false (links are shown). + /// + public bool HideLinks { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs new file mode 100644 index 000000000..d07e3c521 --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs @@ -0,0 +1,47 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Data structure for bundled changelog YAML file +/// +public class BundledChangelogData +{ + public List Products { get; set; } = []; + public List Entries { get; set; } = []; +} + +public class BundledProduct +{ + public string Product { get; set; } = string.Empty; + public string? Target { get; set; } + public string? Lifecycle { get; set; } +} + +public class BundledEntry +{ + public BundledFile File { get; set; } = new(); + + // Resolved changelog fields (only populated when --resolve is used) + public string? Type { get; set; } + public string? Title { get; set; } + public List? Products { get; set; } + public string? Description { get; set; } + public string? Impact { get; set; } + public string? Action { get; set; } + public string? FeatureId { get; set; } + public bool? Highlight { get; set; } + public string? Subtype { get; set; } + public List? Areas { get; set; } + public string? Pr { get; set; } + public List? Issues { get; set; } +} + +public class BundledFile +{ + public string Name { get; set; } = string.Empty; + public string Checksum { get; set; } = string.Empty; +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs new file mode 100644 index 000000000..8a79bd5f5 --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -0,0 +1,22 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input data for bundling changelog files +/// +public class ChangelogBundleInput +{ + public string Directory { get; set; } = string.Empty; + public string? Output { get; set; } + public bool All { get; set; } + public List? InputProducts { get; set; } + public List? OutputProducts { get; set; } + public bool Resolve { get; set; } + public string[]? Prs { get; set; } + public string? Owner { get; set; } + public string? Repo { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs index 2a6268d3c..03876ffaf 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs @@ -2,6 +2,8 @@ // Elasticsearch B.V licenses this file to you under the Apache 2.0 License. // See the LICENSE file in the project root for more information +using System; + namespace Elastic.Documentation.Services.Changelog; /// @@ -64,6 +66,33 @@ public class ChangelogConfiguration /// public Dictionary>? AddBlockers { get; set; } - public static ChangelogConfiguration Default => new(); + /// + /// Configuration for blocking changelogs from being rendered (commented out in markdown output) + /// Dictionary key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + /// Dictionary value contains areas and/or types that should be blocked for those products + /// Changelogs matching any product key and any area/type in the corresponding entry will be commented out + /// + public Dictionary? RenderBlockers { get; set; } + + private static readonly Lazy DefaultLazy = new(() => new ChangelogConfiguration()); + + public static ChangelogConfiguration Default => DefaultLazy.Value; +} + +/// +/// Configuration entry for blocking changelogs during render +/// +public class RenderBlockersEntry +{ + /// + /// List of area values that should be blocked (commented out) during render + /// + public List? Areas { get; set; } + + /// + /// List of type values that should be blocked (commented out) during render + /// Types must exist in the available_types list (or default AvailableTypes if not specified) + /// + public List? Types { get; set; } } diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs index 0d1c0f738..d7ef6aaa6 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs @@ -5,7 +5,7 @@ namespace Elastic.Documentation.Services.Changelog; /// -/// Input data for creating a changelog fragment +/// Input data for creating a changelog /// public class ChangelogInput { diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs new file mode 100644 index 000000000..c9b86586a --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs @@ -0,0 +1,19 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input data for rendering changelog bundle to markdown +/// +public class ChangelogRenderInput +{ + public List Bundles { get; set; } = []; + public string? Output { get; set; } + public string? Title { get; set; } + public bool Subsections { get; set; } + public string[]? HideFeatures { get; set; } + public string? Config { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs index 6aa2b85e8..3785b2f64 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs @@ -10,5 +10,10 @@ namespace Elastic.Documentation.Services.Changelog; [YamlSerializable(typeof(ChangelogData))] [YamlSerializable(typeof(ProductInfo))] [YamlSerializable(typeof(ChangelogConfiguration))] +[YamlSerializable(typeof(RenderBlockersEntry))] +[YamlSerializable(typeof(BundledChangelogData))] +[YamlSerializable(typeof(BundledProduct))] +[YamlSerializable(typeof(BundledEntry))] +[YamlSerializable(typeof(BundledFile))] public partial class ChangelogYamlStaticContext; diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 976f47d4b..cbd004319 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -5,6 +5,9 @@ using System.Globalization; using System.IO.Abstractions; using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; using Elastic.Documentation.Configuration; using Elastic.Documentation.Diagnostics; using Elastic.Documentation.Services.Changelog; @@ -14,7 +17,7 @@ using YamlDotNet.Serialization.NamingConventions; namespace Elastic.Documentation.Services; -public class ChangelogService( +public partial class ChangelogService( ILoggerFactory logFactory, IConfigurationContext configurationContext, IGitHubPrService? githubPrService = null @@ -24,6 +27,20 @@ public class ChangelogService( private readonly IFileSystem _fileSystem = new FileSystem(); private readonly IGitHubPrService? _githubPrService = githubPrService; + private static class ChangelogEntryTypes + { + public const string Feature = "feature"; + public const string Enhancement = "enhancement"; + public const string Security = "security"; + public const string BugFix = "bug-fix"; + public const string BreakingChange = "breaking-change"; + public const string Deprecation = "deprecation"; + public const string KnownIssue = "known-issue"; + public const string Docs = "docs"; + public const string Regression = "regression"; + public const string Other = "other"; + } + public async Task CreateChangelog( IDiagnosticsCollector collector, ChangelogInput input, @@ -383,7 +400,7 @@ Cancel ctx return true; } - private async Task LoadChangelogConfiguration( + internal async Task LoadChangelogConfiguration( IDiagnosticsCollector collector, string? configPath, Cancel ctx @@ -436,25 +453,66 @@ Cancel ctx var defaultConfig = ChangelogConfiguration.Default; var validProductIds = configurationContext.ProductsConfiguration.Products.Keys.ToHashSet(StringComparer.OrdinalIgnoreCase); - // Validate available_types - foreach (var type in config.AvailableTypes.Where(t => !defaultConfig.AvailableTypes.Contains(t))) + // If available_types is not specified or empty, use defaults + if (config.AvailableTypes == null || config.AvailableTypes.Count == 0) + { + config.AvailableTypes = defaultConfig.AvailableTypes.ToList(); + } + else + { + // Validate available_types - must be subset of defaults + foreach (var type in config.AvailableTypes.Where(t => !defaultConfig.AvailableTypes.Contains(t))) + { + collector.EmitError(finalConfigPath, $"Type '{type}' in changelog.yml is not in the list of available types. Available types: {string.Join(", ", defaultConfig.AvailableTypes)}"); + return null; + } + } + + // If available_subtypes is not specified or empty, use defaults + if (config.AvailableSubtypes == null || config.AvailableSubtypes.Count == 0) + { + config.AvailableSubtypes = defaultConfig.AvailableSubtypes.ToList(); + } + else { - collector.EmitError(finalConfigPath, $"Type '{type}' in changelog.yml is not in the list of available types. Available types: {string.Join(", ", defaultConfig.AvailableTypes)}"); - return null; + // Validate available_subtypes - must be subset of defaults + foreach (var subtype in config.AvailableSubtypes.Where(s => !defaultConfig.AvailableSubtypes.Contains(s))) + { + collector.EmitError(finalConfigPath, $"Subtype '{subtype}' in changelog.yml is not in the list of available subtypes. Available subtypes: {string.Join(", ", defaultConfig.AvailableSubtypes)}"); + return null; + } } - // Validate available_subtypes - foreach (var subtype in config.AvailableSubtypes.Where(s => !defaultConfig.AvailableSubtypes.Contains(s))) + // If available_lifecycles is not specified or empty, use defaults + if (config.AvailableLifecycles == null || config.AvailableLifecycles.Count == 0) { - collector.EmitError(finalConfigPath, $"Subtype '{subtype}' in changelog.yml is not in the list of available subtypes. Available subtypes: {string.Join(", ", defaultConfig.AvailableSubtypes)}"); - return null; + config.AvailableLifecycles = defaultConfig.AvailableLifecycles.ToList(); + } + else + { + // Validate available_lifecycles - must be subset of defaults + foreach (var lifecycle in config.AvailableLifecycles.Where(l => !defaultConfig.AvailableLifecycles.Contains(l))) + { + collector.EmitError(finalConfigPath, $"Lifecycle '{lifecycle}' in changelog.yml is not in the list of available lifecycles. Available lifecycles: {string.Join(", ", defaultConfig.AvailableLifecycles)}"); + return null; + } } - // Validate available_lifecycles - foreach (var lifecycle in config.AvailableLifecycles.Where(l => !defaultConfig.AvailableLifecycles.Contains(l))) + // Validate render_blockers types against available_types + if (config.RenderBlockers != null) { - collector.EmitError(finalConfigPath, $"Lifecycle '{lifecycle}' in changelog.yml is not in the list of available lifecycles. Available lifecycles: {string.Join(", ", defaultConfig.AvailableLifecycles)}"); - return null; + foreach (var (productKey, blockersEntry) in config.RenderBlockers) + { + if (blockersEntry?.Types != null && blockersEntry.Types.Count > 0) + { + var invalidType = blockersEntry.Types.FirstOrDefault(type => !config.AvailableTypes.Contains(type)); + if (invalidType != null) + { + collector.EmitError(finalConfigPath, $"Type '{invalidType}' in render_blockers for '{productKey}' is not in the list of available types. Available types: {string.Join(", ", config.AvailableTypes)}"); + return null; + } + } + } } // Validate available_products (if specified) - must be from products.yml @@ -783,5 +841,2248 @@ private static List MapLabelsToAreas(string[] labels, Dictionary BundleChangelogs( + IDiagnosticsCollector collector, + ChangelogBundleInput input, + Cancel ctx + ) + { + try + { + // Validate input + if (string.IsNullOrWhiteSpace(input.Directory)) + { + collector.EmitError(string.Empty, "Directory is required"); + return false; + } + + if (!_fileSystem.Directory.Exists(input.Directory)) + { + collector.EmitError(input.Directory, "Directory does not exist"); + return false; + } + + // Validate filter options + var specifiedFilters = new List(); + if (input.All) + specifiedFilters.Add("--all"); + if (input.InputProducts is { Count: > 0 }) + specifiedFilters.Add("--input-products"); + if (input.Prs is { Length: > 0 }) + specifiedFilters.Add("--prs"); + + if (specifiedFilters.Count == 0) + { + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --input-products, or --prs"); + return false; + } + + if (specifiedFilters.Count > 1) + { + collector.EmitError(string.Empty, $"Multiple filter options cannot be specified together. You specified: {string.Join(", ", specifiedFilters)}. Please use only one filter option: --all, --input-products, or --prs"); + return false; + } + + // Build product filter patterns (with wildcard support) + var productFilters = new List<(string? productPattern, string? targetPattern, string? lifecyclePattern)>(); + if (input.InputProducts is { Count: > 0 }) + { + foreach (var product in input.InputProducts) + { + productFilters.Add(( + product.Product == "*" ? null : product.Product, + product.Target == "*" ? null : product.Target, + product.Lifecycle == "*" ? null : product.Lifecycle + )); + } + } + + // Helper function to check if a string matches a pattern (supports wildcards) + static bool MatchesPattern(string? value, string? pattern) + { + if (pattern == null) + return true; // Wildcard matches anything (including null/empty) + + if (value == null) + return false; // Non-wildcard pattern doesn't match null + + // If pattern ends with *, do prefix match + if (pattern.EndsWith('*')) + { + var prefix = pattern[..^1]; + return value.StartsWith(prefix, StringComparison.OrdinalIgnoreCase); + } + + // Exact match (case-insensitive) + return string.Equals(value, pattern, StringComparison.OrdinalIgnoreCase); + } + + // Load PRs - check if --prs contains a file path or a list of PRs + var prsToMatch = new HashSet(StringComparer.OrdinalIgnoreCase); + if (input.Prs is { Length: > 0 }) + { + // If there's exactly one value, check if it's a file path + if (input.Prs.Length == 1) + { + var singleValue = input.Prs[0]; + + // Check if it's a URL - URLs should always be treated as PRs, not file paths + var isUrl = singleValue.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + singleValue.StartsWith("https://", StringComparison.OrdinalIgnoreCase); + + if (isUrl) + { + // Treat as PR identifier + _ = prsToMatch.Add(singleValue); + } + else if (_fileSystem.File.Exists(singleValue)) + { + // File exists, read PRs from it + var prsFileContent = await _fileSystem.File.ReadAllTextAsync(singleValue, ctx); + var prsFromFile = prsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToArray(); + + foreach (var pr in prsFromFile) + { + _ = prsToMatch.Add(pr); + } + } + else + { + // Check if it's in short PR format (owner/repo#number) before treating as file path + var hashIndex = singleValue.LastIndexOf('#'); + var isShortPrFormat = false; + if (hashIndex > 0 && hashIndex < singleValue.Length - 1) + { + var repoPart = singleValue[..hashIndex]; + var prPart = singleValue[(hashIndex + 1)..]; + var repoParts = repoPart.Split('/'); + // Check if it matches owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + { + isShortPrFormat = true; + _ = prsToMatch.Add(singleValue); + } + } + + if (!isShortPrFormat) + { + // Check if it looks like a file path (contains path separators or has extension) + var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || + singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(singleValue); + + if (looksLikeFilePath) + { + // File path doesn't exist - if there are no other PRs, return error; otherwise emit warning + if (prsToMatch.Count == 0) + { + collector.EmitError(singleValue, $"File does not exist: {singleValue}"); + return false; + } + else + { + collector.EmitWarning(singleValue, $"File does not exist, skipping: {singleValue}"); + } + } + else + { + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(singleValue); + } + } + } + } + else + { + // Multiple values - process all values first, then check for errors + var nonExistentFiles = new List(); + foreach (var value in input.Prs) + { + // Check if it's a URL - URLs should always be treated as PRs + var isUrl = value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + value.StartsWith("https://", StringComparison.OrdinalIgnoreCase); + + if (isUrl) + { + // Treat as PR identifier + _ = prsToMatch.Add(value); + } + else if (_fileSystem.File.Exists(value)) + { + // File exists, read PRs from it + var prsFileContent = await _fileSystem.File.ReadAllTextAsync(value, ctx); + var prsFromFile = prsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToArray(); + + foreach (var pr in prsFromFile) + { + _ = prsToMatch.Add(pr); + } + } + else + { + // Check if it's in short PR format (owner/repo#number) before treating as file path + var hashIndex = value.LastIndexOf('#'); + var isShortPrFormat = false; + if (hashIndex > 0 && hashIndex < value.Length - 1) + { + var repoPart = value[..hashIndex]; + var prPart = value[(hashIndex + 1)..]; + var repoParts = repoPart.Split('/'); + // Check if it matches owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + { + isShortPrFormat = true; + _ = prsToMatch.Add(value); + } + } + + if (!isShortPrFormat) + { + // Check if it looks like a file path + var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || + value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(value); + + if (looksLikeFilePath) + { + // Track non-existent files to check later + nonExistentFiles.Add(value); + } + else + { + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(value); + } + } + } + } + + // After processing all values, handle non-existent files + if (nonExistentFiles.Count > 0) + { + // If there are no valid PRs and we have non-existent files, return error + if (prsToMatch.Count == 0) + { + collector.EmitError(nonExistentFiles[0], $"File does not exist: {nonExistentFiles[0]}"); + return false; + } + else + { + // Emit warnings for non-existent files since we have valid PRs + foreach (var file in nonExistentFiles) + { + collector.EmitWarning(file, $"File does not exist, skipping: {file}"); + } + } + } + } + } + + // Validate that if any PR is just a number (not a URL and not in owner/repo#number format), + // then owner and repo must be provided + if (prsToMatch.Count > 0) + { + var hasNumericOnlyPr = false; + foreach (var pr in prsToMatch) + { + // Check if it's a URL - URLs don't need owner/repo + var isUrl = pr.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + pr.StartsWith("https://", StringComparison.OrdinalIgnoreCase); + + if (isUrl) + continue; + + // Check if it's in owner/repo#number format - these don't need owner/repo + var hashIndex = pr.LastIndexOf('#'); + if (hashIndex > 0 && hashIndex < pr.Length - 1) + { + var repoPart = pr[..hashIndex].Trim(); + var prPart = pr[(hashIndex + 1)..].Trim(); + var repoParts = repoPart.Split('/'); + // If it has a # and the part before # contains a /, it's likely owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + continue; + } + + // If it's just a number, it needs owner/repo + if (int.TryParse(pr, out _)) + { + hasNumericOnlyPr = true; + break; + } + } + + if (hasNumericOnlyPr && (string.IsNullOrWhiteSpace(input.Owner) || string.IsNullOrWhiteSpace(input.Repo))) + { + collector.EmitError(string.Empty, "When --prs contains PR numbers (not URLs or owner/repo#number format), both --owner and --repo must be provided"); + return false; + } + } + + // Product filters are already built above with wildcard support + + // Determine output path to exclude it from input files + var outputPath = input.Output ?? _fileSystem.Path.Combine(input.Directory, "changelog-bundle.yaml"); + var outputFileName = _fileSystem.Path.GetFileName(outputPath); + + // Read all YAML files from directory (exclude bundle files and output file) + var allYamlFiles = _fileSystem.Directory.GetFiles(input.Directory, "*.yaml", SearchOption.TopDirectoryOnly) + .Concat(_fileSystem.Directory.GetFiles(input.Directory, "*.yml", SearchOption.TopDirectoryOnly)) + .ToList(); + + var yamlFiles = new List(); + foreach (var filePath in allYamlFiles) + { + var fileName = _fileSystem.Path.GetFileName(filePath); + + // Exclude the output file + if (fileName.Equals(outputFileName, StringComparison.OrdinalIgnoreCase)) + continue; + + // Check if file is a bundle file by looking for "entries:" key (unique to bundle files) + try + { + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + // Bundle files have "entries:" at root level, changelog files don't + if (fileContent.Contains("entries:", StringComparison.Ordinal) && + fileContent.Contains("products:", StringComparison.Ordinal)) + { + _logger.LogDebug("Skipping bundle file: {FileName}", fileName); + continue; + } + } + catch (Exception ex) when (ex is not (OutOfMemoryException or StackOverflowException or ThreadAbortException)) + { + // If we can't read the file, skip it + _logger.LogWarning(ex, "Failed to read file {FileName} for bundle detection", fileName); + continue; + } + + yamlFiles.Add(filePath); + } + + if (yamlFiles.Count == 0) + { + collector.EmitError(input.Directory, "No YAML files found in directory"); + return false; + } + + _logger.LogInformation("Found {Count} YAML files in directory", yamlFiles.Count); + + // Deserialize and filter changelog files + var deserializer = new StaticDeserializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .Build(); + + var changelogEntries = new List<(ChangelogData data, string filePath, string fileName, string checksum)>(); + var matchedPrs = new HashSet(StringComparer.OrdinalIgnoreCase); + var seenChangelogs = new HashSet(); // For deduplication (using checksum) + + foreach (var filePath in yamlFiles) + { + try + { + var fileName = _fileSystem.Path.GetFileName(filePath); + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + + // Compute checksum (SHA1) + var checksum = ComputeSha1(fileContent); + + // Deserialize YAML (skip comment lines) + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + + // Normalize "version:" to "target:" in products section for compatibility + // Some changelog files may use "version" instead of "target" + // Match "version:" with various indentation levels + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + var data = deserializer.Deserialize(normalizedYaml); + + if (data == null) + { + _logger.LogWarning("Skipping file {FileName}: failed to deserialize", fileName); + continue; + } + + // Check for duplicates (using checksum) + if (seenChangelogs.Contains(checksum)) + { + _logger.LogDebug("Skipping duplicate changelog: {FileName} (checksum: {Checksum})", fileName, checksum); + continue; + } + + // Apply filters + if (input.All) + { + // Include all - no filtering needed + } + else if (productFilters.Count > 0) + { + // Filter by products with wildcard support + var matches = false; + foreach (var (productPattern, targetPattern, lifecyclePattern) in productFilters) + { + // Check if any product in the changelog matches this filter + foreach (var changelogProduct in data.Products) + { + var productMatches = MatchesPattern(changelogProduct.Product, productPattern); + var targetMatches = MatchesPattern(changelogProduct.Target, targetPattern); + var lifecycleMatches = MatchesPattern(changelogProduct.Lifecycle, lifecyclePattern); + + if (productMatches && targetMatches && lifecycleMatches) + { + matches = true; + break; + } + } + + if (matches) + break; + } + + if (!matches) + { + continue; + } + } + else if (prsToMatch.Count > 0) + { + // Filter by PRs + var matches = false; + if (!string.IsNullOrWhiteSpace(data.Pr)) + { + // Normalize PR for comparison + var normalizedPr = NormalizePrForComparison(data.Pr, input.Owner, input.Repo); + foreach (var pr in prsToMatch) + { + var normalizedPrToMatch = NormalizePrForComparison(pr, input.Owner, input.Repo); + if (normalizedPr == normalizedPrToMatch) + { + matches = true; + _ = matchedPrs.Add(pr); + break; + } + } + } + + if (!matches) + { + continue; + } + } + + // Add to seen set and entries list + _ = seenChangelogs.Add(checksum); + changelogEntries.Add((data, filePath, fileName, checksum)); + } + catch (YamlException ex) + { + _logger.LogWarning(ex, "Failed to parse YAML file {FilePath}", filePath); + collector.EmitError(filePath, $"Failed to parse YAML: {ex.Message}"); + continue; + } + catch (Exception ex) when (ex is not (OutOfMemoryException or StackOverflowException or ThreadAbortException)) + { + _logger.LogWarning(ex, "Error processing file {FilePath}", filePath); + collector.EmitError(filePath, $"Error processing file: {ex.Message}"); + continue; + } + } + + // Warn about unmatched PRs if filtering by PRs + if (prsToMatch.Count > 0) + { + var unmatchedPrs = prsToMatch.Where(pr => !matchedPrs.Contains(pr)).ToList(); + if (unmatchedPrs.Count > 0) + { + foreach (var unmatchedPr in unmatchedPrs) + { + collector.EmitWarning(string.Empty, $"No changelog file found for PR: {unmatchedPr}"); + } + } + } + + _logger.LogInformation("Found {Count} matching changelog entries", changelogEntries.Count); + + // Build bundled data + var bundledData = new BundledChangelogData(); + + // Set products array in output + // If --output-products was specified, use those values (override any from changelogs) + if (input.OutputProducts is { Count: > 0 }) + { + bundledData.Products = input.OutputProducts + .OrderBy(p => p.Product) + .ThenBy(p => p.Target ?? string.Empty) + .ThenBy(p => p.Lifecycle ?? string.Empty) + .Select(p => new BundledProduct + { + Product = p.Product, + Target = p.Target == "*" ? null : p.Target, + Lifecycle = p.Lifecycle == "*" ? null : p.Lifecycle + }) + .ToList(); + } + // If --input-products was specified (and --output-products was not), extract from matched changelog entries + // This ensures the products array reflects the actual values from the changelogs, not the filter + else if (input.InputProducts is { Count: > 0 } && changelogEntries.Count > 0) + { + var productVersions = new HashSet<(string product, string version, string? lifecycle)>(); + foreach (var (data, _, _, _) in changelogEntries) + { + foreach (var product in data.Products) + { + var version = product.Target ?? string.Empty; + _ = productVersions.Add((product.Product, version, product.Lifecycle)); + } + } + + bundledData.Products = productVersions + .OrderBy(pv => pv.product) + .ThenBy(pv => pv.version) + .ThenBy(pv => pv.lifecycle ?? string.Empty) + .Select(pv => new BundledProduct + { + Product = pv.product, + Target = string.IsNullOrWhiteSpace(pv.version) ? null : pv.version, + Lifecycle = pv.lifecycle + }) + .ToList(); + } + // Otherwise, extract unique products/versions/lifecycles from changelog entries + else if (changelogEntries.Count > 0) + { + var productVersions = new HashSet<(string product, string version, string? lifecycle)>(); + foreach (var (data, _, _, _) in changelogEntries) + { + foreach (var product in data.Products) + { + var version = product.Target ?? string.Empty; + _ = productVersions.Add((product.Product, version, product.Lifecycle)); + } + } + + bundledData.Products = productVersions + .OrderBy(pv => pv.product) + .ThenBy(pv => pv.version) + .ThenBy(pv => pv.lifecycle ?? string.Empty) + .Select(pv => new BundledProduct + { + Product = pv.product, + Target = string.IsNullOrWhiteSpace(pv.version) ? null : pv.version, + Lifecycle = pv.lifecycle + }) + .ToList(); + } + else + { + // No entries and no products specified - initialize to empty list + bundledData.Products = []; + } + + // Check if we should allow empty result + if (changelogEntries.Count == 0) + { + collector.EmitError(string.Empty, "No changelog entries matched the filter criteria"); + return false; + } + + // Check for products with same product ID but different versions + var productsByProductId = bundledData.Products.GroupBy(p => p.Product, StringComparer.OrdinalIgnoreCase) + .Where(g => g.Count() > 1) + .ToList(); + + foreach (var productGroup in productsByProductId) + { + var targets = productGroup.Select(p => + { + var target = string.IsNullOrWhiteSpace(p.Target) ? "(no target)" : p.Target; + if (!string.IsNullOrWhiteSpace(p.Lifecycle)) + { + target = $"{target} {p.Lifecycle}"; + } + return target; + }).ToList(); + collector.EmitWarning(string.Empty, $"Product '{productGroup.Key}' has multiple targets in bundle: {string.Join(", ", targets)}"); + } + + // Build entries + if (changelogEntries.Count == 0) + { + // No entries - initialize to empty list + bundledData.Entries = []; + } + else if (input.Resolve) + { + // When resolving, include changelog contents and validate required fields + var resolvedEntries = new List(); + foreach (var (data, filePath, fileName, checksum) in changelogEntries) + { + // Validate required fields + if (string.IsNullOrWhiteSpace(data.Title)) + { + collector.EmitError(filePath, "Changelog file is missing required field: title"); + return false; + } + + if (string.IsNullOrWhiteSpace(data.Type)) + { + collector.EmitError(filePath, "Changelog file is missing required field: type"); + return false; + } + + if (data.Products == null || data.Products.Count == 0) + { + collector.EmitError(filePath, "Changelog file is missing required field: products"); + return false; + } + + // Validate products have required fields + if (data.Products.Any(product => string.IsNullOrWhiteSpace(product.Product))) + { + collector.EmitError(filePath, "Changelog file has product entry missing required field: product"); + return false; + } + + resolvedEntries.Add(new BundledEntry + { + File = new BundledFile + { + Name = fileName, + Checksum = checksum + }, + Type = data.Type, + Title = data.Title, + Products = data.Products, + Description = data.Description, + Impact = data.Impact, + Action = data.Action, + FeatureId = data.FeatureId, + Highlight = data.Highlight, + Subtype = data.Subtype, + Areas = data.Areas, + Pr = data.Pr, + Issues = data.Issues + }); + } + + bundledData.Entries = resolvedEntries; + } + else + { + // Only include file information + bundledData.Entries = changelogEntries + .Select(e => new BundledEntry + { + File = new BundledFile + { + Name = e.fileName, + Checksum = e.checksum + } + }) + .ToList(); + } + + // Generate bundled YAML + var bundleSerializer = new StaticSerializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .ConfigureDefaultValuesHandling(DefaultValuesHandling.OmitNull | DefaultValuesHandling.OmitEmptyCollections) + .Build(); + + var bundledYaml = bundleSerializer.Serialize(bundledData); + + // Output path was already determined above when filtering files + var outputDir = _fileSystem.Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(outputDir) && !_fileSystem.Directory.Exists(outputDir)) + { + _ = _fileSystem.Directory.CreateDirectory(outputDir); + } + + // If output file already exists, generate a unique filename + if (_fileSystem.File.Exists(outputPath)) + { + var directory = _fileSystem.Path.GetDirectoryName(outputPath) ?? string.Empty; + var fileNameWithoutExtension = _fileSystem.Path.GetFileNameWithoutExtension(outputPath); + var extension = _fileSystem.Path.GetExtension(outputPath); + var timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(); + var uniqueFileName = $"{fileNameWithoutExtension}-{timestamp}{extension}"; + outputPath = _fileSystem.Path.Combine(directory, uniqueFileName); + _logger.LogInformation("Output file already exists, using unique filename: {OutputPath}", outputPath); + } + + // Write bundled file + await _fileSystem.File.WriteAllTextAsync(outputPath, bundledYaml, ctx); + _logger.LogInformation("Created bundled changelog: {OutputPath}", outputPath); + + return true; + } + catch (OperationCanceledException) + { + throw; + } + catch (IOException ioEx) + { + collector.EmitError(string.Empty, $"IO error bundling changelogs: {ioEx.Message}", ioEx); + return false; + } + catch (UnauthorizedAccessException uaEx) + { + collector.EmitError(string.Empty, $"Access denied bundling changelogs: {uaEx.Message}", uaEx); + return false; + } + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA5350:Do not use insecure cryptographic algorithm SHA1", Justification = "SHA1 is required for compatibility with existing changelog bundle format")] + private static string ComputeSha1(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA1.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + [GeneratedRegex(@"(\s+)version:", RegexOptions.Multiline)] + private static partial Regex VersionToTargetRegex(); + + [GeneratedRegex(@"github\.com/([^/]+)/([^/]+)/pull/(\d+)", RegexOptions.IgnoreCase)] + private static partial Regex GitHubPrUrlRegex(); + + private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) + { + // Parse PR using the same logic as GitHubPrService.ParsePrUrl + // Return a normalized format (owner/repo#number) for comparison + + // Trim whitespace first + pr = pr.Trim(); + + // Handle full URL: https://github.com/owner/repo/pull/123 + if (pr.StartsWith("https://github.com/", StringComparison.OrdinalIgnoreCase) || + pr.StartsWith("http://github.com/", StringComparison.OrdinalIgnoreCase)) + { + // Use regex to parse URL more reliably + var match = GitHubPrUrlRegex().Match(pr); + if (match.Success && match.Groups.Count >= 4) + { + var owner = match.Groups[1].Value.Trim(); + var repo = match.Groups[2].Value.Trim(); + var prPart = match.Groups[3].Value.Trim(); + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo) && + int.TryParse(prPart, out var prNum)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + + // Fallback to URI parsing if regex fails + try + { + var uri = new Uri(pr); + var segments = uri.Segments; + // segments[0] is "/", segments[1] is "owner/", segments[2] is "repo/", segments[3] is "pull/", segments[4] is "123" + if (segments.Length >= 5 && segments[3].Equals("pull/", StringComparison.OrdinalIgnoreCase)) + { + var owner = segments[1].TrimEnd('/').Trim(); + var repo = segments[2].TrimEnd('/').Trim(); + var prPart = segments[4].TrimEnd('/').Trim(); + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo) && + int.TryParse(prPart, out var prNum)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + } + catch (UriFormatException) + { + // Invalid URI, fall through + } + } + + // Handle short format: owner/repo#123 + var hashIndex = pr.LastIndexOf('#'); + if (hashIndex > 0 && hashIndex < pr.Length - 1) + { + var repoPart = pr[..hashIndex].Trim(); + var prPart = pr[(hashIndex + 1)..].Trim(); + if (int.TryParse(prPart, out var prNum)) + { + var repoParts = repoPart.Split('/'); + if (repoParts.Length == 2) + { + var owner = repoParts[0].Trim(); + var repo = repoParts[1].Trim(); + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + } + } + + // Handle just a PR number when owner/repo are provided + if (int.TryParse(pr, out var prNumber) && + !string.IsNullOrWhiteSpace(defaultOwner) && !string.IsNullOrWhiteSpace(defaultRepo)) + { + return $"{defaultOwner}/{defaultRepo}#{prNumber}".ToLowerInvariant(); + } + + // Return as-is for comparison (fallback) + return pr.ToLowerInvariant(); + } + + public async Task RenderChangelogs( + IDiagnosticsCollector collector, + ChangelogRenderInput input, + Cancel ctx + ) + { + try + { + // Validate input + if (input.Bundles == null || input.Bundles.Count == 0) + { + collector.EmitError(string.Empty, "At least one bundle file is required. Use --input to specify bundle files."); + return false; + } + + var deserializer = new StaticDeserializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .Build(); + + // Validation phase: Load and validate all bundles before merging + var bundleDataList = new List<(BundledChangelogData data, BundleInput input, string directory)>(); + var seenFileNames = new Dictionary>(StringComparer.OrdinalIgnoreCase); // filename -> list of bundle files + var seenPrs = new Dictionary>(); // PR -> list of bundle files + var defaultRepo = "elastic"; + + foreach (var bundleInput in input.Bundles) + { + if (string.IsNullOrWhiteSpace(bundleInput.BundleFile)) + { + collector.EmitError(string.Empty, "Bundle file path is required for each --input"); + return false; + } + + if (!_fileSystem.File.Exists(bundleInput.BundleFile)) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file does not exist"); + return false; + } + + // Load bundle file + var bundleContent = await _fileSystem.File.ReadAllTextAsync(bundleInput.BundleFile, ctx); + + // Validate bundle structure - check for unexpected fields by deserializing + BundledChangelogData? bundledData; + try + { + bundledData = deserializer.Deserialize(bundleContent); + } + catch (YamlException yamlEx) + { + collector.EmitError(bundleInput.BundleFile, $"Failed to deserialize bundle file: {yamlEx.Message}", yamlEx); + return false; + } + + if (bundledData == null) + { + collector.EmitError(bundleInput.BundleFile, "Failed to deserialize bundle file"); + return false; + } + + // Validate bundle has required structure + if (bundledData.Products == null) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file is missing required field: products"); + return false; + } + + if (bundledData.Entries == null) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file is missing required field: entries"); + return false; + } + + // Determine directory for resolving file references + var bundleDirectory = bundleInput.Directory ?? _fileSystem.Path.GetDirectoryName(bundleInput.BundleFile) ?? Directory.GetCurrentDirectory(); + + // Validate all referenced files exist and check for duplicates + var fileNamesInThisBundle = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var entry in bundledData.Entries) + { + // Track file names for duplicate detection + if (!string.IsNullOrWhiteSpace(entry.File?.Name)) + { + var fileName = entry.File.Name; + + // Check for duplicates within the same bundle + if (!fileNamesInThisBundle.Add(fileName)) + { + collector.EmitWarning(bundleInput.BundleFile, $"Changelog file '{fileName}' appears multiple times in the same bundle"); + } + + // Track across bundles + if (!seenFileNames.TryGetValue(fileName, out var bundleList)) + { + bundleList = []; + seenFileNames[fileName] = bundleList; + } + bundleList.Add(bundleInput.BundleFile); + } + + // If entry has resolved data, validate it + if (!string.IsNullOrWhiteSpace(entry.Title) && !string.IsNullOrWhiteSpace(entry.Type)) + { + + if (entry.Products == null || entry.Products.Count == 0) + { + collector.EmitError(bundleInput.BundleFile, $"Entry '{entry.Title}' in bundle is missing required field: products"); + return false; + } + + // Track PRs for duplicate detection + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + var normalizedPr = NormalizePrForComparison(entry.Pr, null, null); + if (!seenPrs.TryGetValue(normalizedPr, out var prBundleList)) + { + prBundleList = []; + seenPrs[normalizedPr] = prBundleList; + } + prBundleList.Add(bundleInput.BundleFile); + } + } + else + { + // Entry only has file reference - validate file exists + if (string.IsNullOrWhiteSpace(entry.File?.Name)) + { + collector.EmitError(bundleInput.BundleFile, "Entry in bundle is missing required field: file.name"); + return false; + } + + if (string.IsNullOrWhiteSpace(entry.File.Checksum)) + { + collector.EmitError(bundleInput.BundleFile, $"Entry for file '{entry.File.Name}' in bundle is missing required field: file.checksum"); + return false; + } + + var filePath = _fileSystem.Path.Combine(bundleDirectory, entry.File.Name); + if (!_fileSystem.File.Exists(filePath)) + { + collector.EmitError(bundleInput.BundleFile, $"Referenced changelog file '{entry.File.Name}' does not exist at path: {filePath}"); + return false; + } + + // Validate the changelog file can be deserialized + try + { + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + var checksum = ComputeSha1(fileContent); + if (checksum != entry.File.Checksum) + { + collector.EmitWarning(bundleInput.BundleFile, $"Checksum mismatch for file {entry.File.Name}. Expected {entry.File.Checksum}, got {checksum}"); + } + + // Deserialize YAML (skip comment lines) to validate structure + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + + // Normalize "version:" to "target:" in products section + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + var entryData = deserializer.Deserialize(normalizedYaml); + if (entryData == null) + { + collector.EmitError(bundleInput.BundleFile, $"Failed to deserialize changelog file '{entry.File.Name}'"); + return false; + } + + // Validate required fields in changelog file + if (string.IsNullOrWhiteSpace(entryData.Title)) + { + collector.EmitError(filePath, "Changelog file is missing required field: title"); + return false; + } + + if (string.IsNullOrWhiteSpace(entryData.Type)) + { + collector.EmitError(filePath, "Changelog file is missing required field: type"); + return false; + } + + if (entryData.Products == null || entryData.Products.Count == 0) + { + collector.EmitError(filePath, "Changelog file is missing required field: products"); + return false; + } + + // Track PRs for duplicate detection + if (!string.IsNullOrWhiteSpace(entryData.Pr)) + { + var normalizedPr = NormalizePrForComparison(entryData.Pr, null, null); + if (!seenPrs.TryGetValue(normalizedPr, out var prBundleList2)) + { + prBundleList2 = []; + seenPrs[normalizedPr] = prBundleList2; + } + prBundleList2.Add(bundleInput.BundleFile); + } + } + catch (YamlException yamlEx) + { + collector.EmitError(filePath, $"Failed to parse changelog file: {yamlEx.Message}", yamlEx); + return false; + } + } + } + + bundleDataList.Add((bundledData, bundleInput, bundleDirectory)); + } + + // Check for duplicate file names across bundles + foreach (var (fileName, bundleFiles) in seenFileNames.Where(kvp => kvp.Value.Count > 1)) + { + var uniqueBundles = bundleFiles.Distinct().ToList(); + if (uniqueBundles.Count > 1) + { + collector.EmitWarning(string.Empty, $"Changelog file '{fileName}' appears in multiple bundles: {string.Join(", ", uniqueBundles)}"); + } + } + + // Check for duplicate PRs + foreach (var (pr, bundleFiles) in seenPrs.Where(kvp => kvp.Value.Count > 1)) + { + var uniqueBundles = bundleFiles.Distinct().ToList(); + if (uniqueBundles.Count > 1) + { + collector.EmitWarning(string.Empty, $"PR '{pr}' appears in multiple bundles: {string.Join(", ", uniqueBundles)}"); + } + } + + // If validation found errors, stop before merging + if (collector.Errors > 0) + { + return false; + } + + // Merge phase: Now that validation passed, load and merge all bundles + var allResolvedEntries = new List<(ChangelogData entry, string repo, HashSet bundleProductIds, bool hideLinks)>(); + var allProducts = new HashSet<(string product, string target)>(); + + foreach (var (bundledData, bundleInput, bundleDirectory) in bundleDataList) + { + // Collect products from this bundle + var bundleProductIds = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var product in bundledData.Products) + { + var target = product.Target ?? string.Empty; + _ = allProducts.Add((product.Product, target)); + if (!string.IsNullOrWhiteSpace(product.Product)) + { + _ = bundleProductIds.Add(product.Product); + } + } + + var repo = bundleInput.Repo ?? defaultRepo; + + // Resolve entries + foreach (var entry in bundledData.Entries) + { + ChangelogData? entryData = null; + + // If entry has resolved data, use it + if (!string.IsNullOrWhiteSpace(entry.Title) && !string.IsNullOrWhiteSpace(entry.Type)) + { + entryData = new ChangelogData + { + Title = entry.Title, + Type = entry.Type, + Subtype = entry.Subtype, + Description = entry.Description, + Impact = entry.Impact, + Action = entry.Action, + FeatureId = entry.FeatureId, + Highlight = entry.Highlight, + Pr = entry.Pr, + Products = entry.Products ?? [], + Areas = entry.Areas, + Issues = entry.Issues + }; + } + else + { + // Load from file (already validated to exist) + var filePath = _fileSystem.Path.Combine(bundleDirectory, entry.File.Name); + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + + // Deserialize YAML (skip comment lines) + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + + // Normalize "version:" to "target:" in products section + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + entryData = deserializer.Deserialize(normalizedYaml); + } + + if (entryData != null) + { + allResolvedEntries.Add((entryData, repo, bundleProductIds, bundleInput.HideLinks)); + } + } + } + + if (allResolvedEntries.Count == 0) + { + collector.EmitError(string.Empty, "No changelog entries to render"); + return false; + } + + // Determine output directory + var outputDir = input.Output ?? Directory.GetCurrentDirectory(); + if (!_fileSystem.Directory.Exists(outputDir)) + { + _ = _fileSystem.Directory.CreateDirectory(outputDir); + } + + // Extract version from products (use first product's target if available, or "unknown") + var version = allProducts.Count > 0 + ? allProducts.OrderBy(p => p.product).ThenBy(p => p.target).First().target + : "unknown"; + + if (string.IsNullOrWhiteSpace(version)) + { + version = "unknown"; + } + + // Warn if --title was not provided and version defaults to "unknown" + if (string.IsNullOrWhiteSpace(input.Title) && version == "unknown") + { + collector.EmitWarning(string.Empty, "No --title option provided and bundle files do not contain 'target' values. Output folder and markdown titles will default to 'unknown'. Consider using --title to specify a custom title."); + } + + // Group entries by type (kind) + var entriesByType = allResolvedEntries.Select(e => e.entry).GroupBy(e => e.Type).ToDictionary(g => g.Key, g => g.ToList()); + + // Use title from input or default to version + var title = input.Title ?? version; + // Convert title to slug format for folder names and anchors (lowercase, dashes instead of spaces) + var titleSlug = TitleToSlug(title); + + // Load changelog configuration to check for render_blockers + var config = await LoadChangelogConfiguration(collector, input.Config, ctx); + if (config == null) + { + collector.EmitError(string.Empty, "Failed to load changelog configuration"); + return false; + } + + // Extract render blockers from configuration + // RenderBlockers is a Dictionary where: + // - Key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + // - Value is a RenderBlockersEntry containing areas and/or types that should be blocked for those products + var renderBlockers = config.RenderBlockers; + + // Load feature IDs to hide - check if --hide-features contains a file path or a list of feature IDs + var featureIdsToHide = new HashSet(StringComparer.OrdinalIgnoreCase); + if (input.HideFeatures is { Length: > 0 }) + { + // If there's exactly one value, check if it's a file path + if (input.HideFeatures.Length == 1) + { + var singleValue = input.HideFeatures[0]; + + if (_fileSystem.File.Exists(singleValue)) + { + // File exists, read feature IDs from it + var featureIdsFileContent = await _fileSystem.File.ReadAllTextAsync(singleValue, ctx); + var featureIdsFromFile = featureIdsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)) + .ToArray(); + + foreach (var featureId in featureIdsFromFile) + { + _ = featureIdsToHide.Add(featureId); + } + } + else + { + // Check if it looks like a file path + var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || + singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(singleValue); + + if (looksLikeFilePath) + { + // File path doesn't exist + collector.EmitError(singleValue, $"File does not exist: {singleValue}"); + return false; + } + else + { + // Doesn't look like a file path, treat as feature ID + _ = featureIdsToHide.Add(singleValue); + } + } + } + else + { + // Multiple values - process all values first, then check for errors + var nonExistentFiles = new List(); + foreach (var value in input.HideFeatures) + { + if (_fileSystem.File.Exists(value)) + { + // File exists, read feature IDs from it + var featureIdsFileContent = await _fileSystem.File.ReadAllTextAsync(value, ctx); + var featureIdsFromFile = featureIdsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)) + .ToArray(); + + foreach (var featureId in featureIdsFromFile) + { + _ = featureIdsToHide.Add(featureId); + } + } + else + { + // Check if it looks like a file path + var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || + value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(value); + + if (looksLikeFilePath) + { + // Track non-existent files to check later + nonExistentFiles.Add(value); + } + else + { + // Doesn't look like a file path, treat as feature ID + _ = featureIdsToHide.Add(value); + } + } + } + + // Report errors for non-existent files + if (nonExistentFiles.Count > 0) + { + foreach (var filePath in nonExistentFiles) + { + collector.EmitError(filePath, $"File does not exist: {filePath}"); + } + return false; + } + } + } + + // Track hidden entries for warnings + var hiddenEntries = new List<(string title, string featureId)>(); + foreach (var (entry, _, _, _) in allResolvedEntries) + { + if (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) + { + hiddenEntries.Add((entry.Title ?? "Unknown", entry.FeatureId)); + } + } + + // Emit warnings for hidden entries + if (hiddenEntries.Count > 0) + { + foreach (var (entryTitle, featureId) in hiddenEntries) + { + collector.EmitWarning(string.Empty, $"Changelog entry '{entryTitle}' with feature-id '{featureId}' will be commented out in markdown output"); + } + } + + // Check entries against render blockers and track blocked entries + // render_blockers matches against bundle products, not individual entry products + var blockedEntries = new List<(string title, List reasons)>(); + foreach (var (entry, _, bundleProductIds, _) in allResolvedEntries) + { + var isBlocked = ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out var blockReasons); + if (isBlocked) + { + blockedEntries.Add((entry.Title ?? "Unknown", blockReasons)); + } + } + + // Emit warnings for blocked entries + if (blockedEntries.Count > 0) + { + foreach (var (entryTitle, reasons) in blockedEntries) + { + var reasonsText = string.Join(" and ", reasons); + collector.EmitWarning(string.Empty, $"Changelog entry '{entryTitle}' will be commented out in markdown output because it matches render_blockers: {reasonsText}"); + } + } + + // Check for unhandled changelog types + var handledTypes = new HashSet(StringComparer.OrdinalIgnoreCase) + { + ChangelogEntryTypes.Feature, + ChangelogEntryTypes.Enhancement, + ChangelogEntryTypes.Security, + ChangelogEntryTypes.BugFix, + ChangelogEntryTypes.BreakingChange, + ChangelogEntryTypes.Deprecation, + ChangelogEntryTypes.KnownIssue, + ChangelogEntryTypes.Docs, + ChangelogEntryTypes.Regression, + ChangelogEntryTypes.Other + }; + + // config is never null at this point (checked above), and AvailableTypes is initialized in the class + var availableTypes = config.AvailableTypes; + var availableTypesSet = new HashSet(availableTypes, StringComparer.OrdinalIgnoreCase); + + foreach (var entryType in entriesByType.Keys.Where(t => availableTypesSet.Contains(t) && !handledTypes.Contains(t))) + { + // Only warn if the type is valid according to config but not handled in rendering + var entryCount = entriesByType[entryType].Count; + collector.EmitWarning(string.Empty, $"Changelog type '{entryType}' is valid according to configuration but is not handled in rendering output. {entryCount} entry/entries of this type will not be included in the generated markdown files."); + } + + // Create mapping from entries to their bundle product IDs for render_blockers checking + // Use a custom comparer for reference equality since entries are objects + var entryToBundleProducts = new Dictionary>(); + foreach (var (entry, _, bundleProductIds, _) in allResolvedEntries) + { + entryToBundleProducts[entry] = bundleProductIds; + } + + // Create mapping from entries to their repo for PR link formatting + var entryToRepo = new Dictionary(); + foreach (var (entry, repo, _, _) in allResolvedEntries) + { + entryToRepo[entry] = repo; + } + + // Create mapping from entries to their hideLinks setting for per-bundle link visibility + var entryToHideLinks = new Dictionary(); + foreach (var (entry, _, _, hideLinks) in allResolvedEntries) + { + entryToHideLinks[entry] = hideLinks; + } + + // Render markdown files (use first repo found for section anchors, or default) + var repoForAnchors = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; + + // Render index.md (features, enhancements, bug fixes, security, docs, regression, other) + await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForAnchors, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks, ctx); + + // Render breaking-changes.md + await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForAnchors, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks, ctx); + + // Render deprecations.md + await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForAnchors, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks, ctx); + + // Render known-issues.md + await RenderKnownIssuesMarkdown(collector, outputDir, title, titleSlug, repoForAnchors, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks, ctx); + + _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); + + return true; + } + catch (OperationCanceledException) + { + throw; + } + catch (IOException ioEx) + { + collector.EmitError(string.Empty, $"IO error rendering changelogs: {ioEx.Message}", ioEx); + return false; + } + catch (UnauthorizedAccessException uaEx) + { + collector.EmitError(string.Empty, $"Access denied rendering changelogs: {uaEx.Message}", uaEx); + return false; + } + catch (YamlException yamlEx) + { + collector.EmitError(string.Empty, $"YAML parsing error: {yamlEx.Message}", yamlEx); + return false; + } + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderIndexMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string titleSlug, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, + Dictionary entryToRepo, + Dictionary entryToHideLinks, + Cancel ctx + ) + { + var features = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Feature, []); + var enhancements = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Enhancement, []); + var security = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Security, []); + var bugFixes = entriesByType.GetValueOrDefault(ChangelogEntryTypes.BugFix, []); + var docs = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Docs, []); + var regressions = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Regression, []); + var other = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Other, []); + + var hasBreakingChanges = entriesByType.ContainsKey(ChangelogEntryTypes.BreakingChange); + var hasDeprecations = entriesByType.ContainsKey(ChangelogEntryTypes.Deprecation); + var hasKnownIssues = entriesByType.ContainsKey(ChangelogEntryTypes.KnownIssue); + + var otherLinks = new List(); + if (hasKnownIssues) + { + otherLinks.Add($"[Known issues](/release-notes/known-issues.md#{repo}-{titleSlug}-known-issues)"); + } + if (hasBreakingChanges) + { + otherLinks.Add($"[Breaking changes](/release-notes/breaking-changes.md#{repo}-{titleSlug}-breaking-changes)"); + } + if (hasDeprecations) + { + otherLinks.Add($"[Deprecations](/release-notes/deprecations.md#{repo}-{titleSlug}-deprecations)"); + } + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-release-notes-{titleSlug}]"); + + if (otherLinks.Count > 0) + { + var linksText = string.Join(" and ", otherLinks); + sb.AppendLine(CultureInfo.InvariantCulture, $"_{linksText}._"); + sb.AppendLine(); + } + + var hasAnyEntries = features.Count > 0 || enhancements.Count > 0 || security.Count > 0 || bugFixes.Count > 0 || docs.Count > 0 || regressions.Count > 0 || other.Count > 0; + + if (hasAnyEntries) + { + if (features.Count > 0 || enhancements.Count > 0) + { + sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{titleSlug}-features-enhancements]"); + var combined = features.Concat(enhancements).ToList(); + RenderEntriesByArea(sb, combined, subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks); + } + + if (security.Count > 0 || bugFixes.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{titleSlug}-fixes]"); + var combined = security.Concat(bugFixes).ToList(); + RenderEntriesByArea(sb, combined, subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks); + } + + if (docs.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Documentation [{repo}-{titleSlug}-docs]"); + RenderEntriesByArea(sb, docs, subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks); + } + + if (regressions.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Regressions [{repo}-{titleSlug}-regressions]"); + RenderEntriesByArea(sb, regressions, subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks); + } + + if (other.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Other changes [{repo}-{titleSlug}-other]"); + RenderEntriesByArea(sb, other, subsections, featureIdsToHide, renderBlockers, entryToBundleProducts, entryToRepo, entryToHideLinks); + } + } + else + { + sb.AppendLine("_No new features, enhancements, or fixes._"); + } + + var indexPath = _fileSystem.Path.Combine(outputDir, titleSlug, "index.md"); + var indexDir = _fileSystem.Path.GetDirectoryName(indexPath); + if (!string.IsNullOrWhiteSpace(indexDir) && !_fileSystem.Directory.Exists(indexDir)) + { + _ = _fileSystem.Directory.CreateDirectory(indexDir); + } + + await _fileSystem.File.WriteAllTextAsync(indexPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderBreakingChangesMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string titleSlug, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, + Dictionary entryToRepo, + Dictionary entryToHideLinks, + Cancel ctx + ) + { + var breakingChanges = entriesByType.GetValueOrDefault(ChangelogEntryTypes.BreakingChange, []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-breaking-changes]"); + + if (breakingChanges.Count > 0) + { + // Group by subtype if subsections is enabled, otherwise group by area + var groupedEntries = subsections + ? breakingChanges.GroupBy(e => string.IsNullOrWhiteSpace(e.Subtype) ? string.Empty : e.Subtype).ToList() + : breakingChanges.GroupBy(e => GetComponent(e)).ToList(); + + foreach (var group in groupedEntries) + { + if (subsections && !string.IsNullOrWhiteSpace(group.Key)) + { + var header = FormatSubtypeHeader(group.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in group) + { + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var entryRepo = entryToRepo.GetValueOrDefault(entry, repo); + var entryHideLinks = entryToHideLinks.GetValueOrDefault(entry, false); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); + + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine(""); + } + } + } + } + else + { + sb.AppendLine("_No breaking changes._"); + } + + var breakingPath = _fileSystem.Path.Combine(outputDir, titleSlug, "breaking-changes.md"); + var breakingDir = _fileSystem.Path.GetDirectoryName(breakingPath); + if (!string.IsNullOrWhiteSpace(breakingDir) && !_fileSystem.Directory.Exists(breakingDir)) + { + _ = _fileSystem.Directory.CreateDirectory(breakingDir); + } + + await _fileSystem.File.WriteAllTextAsync(breakingPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderDeprecationsMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string titleSlug, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, + Dictionary entryToRepo, + Dictionary entryToHideLinks, + Cancel ctx + ) + { + var deprecations = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Deprecation, []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-deprecations]"); + + if (deprecations.Count > 0) + { + var groupedByArea = deprecations.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var entryRepo = entryToRepo.GetValueOrDefault(entry, repo); + var entryHideLinks = entryToHideLinks.GetValueOrDefault(entry, false); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); + + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine(""); + } + } + } + } + else + { + sb.AppendLine("_No deprecations._"); + } + + var deprecationsPath = _fileSystem.Path.Combine(outputDir, titleSlug, "deprecations.md"); + var deprecationsDir = _fileSystem.Path.GetDirectoryName(deprecationsPath); + if (!string.IsNullOrWhiteSpace(deprecationsDir) && !_fileSystem.Directory.Exists(deprecationsDir)) + { + _ = _fileSystem.Directory.CreateDirectory(deprecationsDir); + } + + await _fileSystem.File.WriteAllTextAsync(deprecationsPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderKnownIssuesMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string titleSlug, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, + Dictionary entryToRepo, + Dictionary entryToHideLinks, + Cancel ctx + ) + { + var knownIssues = entriesByType.GetValueOrDefault(ChangelogEntryTypes.KnownIssue, []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-known-issues]"); + + if (knownIssues.Count > 0) + { + var groupedByArea = knownIssues.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var entryRepo = entryToRepo.GetValueOrDefault(entry, repo); + var entryHideLinks = entryToHideLinks.GetValueOrDefault(entry, false); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); + + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine(""); + } + } + } + } + else + { + sb.AppendLine("_No known issues._"); + } + + var knownIssuesPath = _fileSystem.Path.Combine(outputDir, titleSlug, "known-issues.md"); + var knownIssuesDir = _fileSystem.Path.GetDirectoryName(knownIssuesPath); + if (!string.IsNullOrWhiteSpace(knownIssuesDir) && !_fileSystem.Directory.Exists(knownIssuesDir)) + { + _ = _fileSystem.Directory.CreateDirectory(knownIssuesDir); + } + + await _fileSystem.File.WriteAllTextAsync(knownIssuesPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private void RenderEntriesByArea(StringBuilder sb, List entries, bool subsections, HashSet featureIdsToHide, Dictionary? renderBlockers, Dictionary> entryToBundleProducts, Dictionary entryToRepo, Dictionary entryToHideLinks) + { + var groupedByArea = entries.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var entryRepo = entryToRepo.GetValueOrDefault(entry, "elastic"); + var entryHideLinks = entryToHideLinks.GetValueOrDefault(entry, false); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); + + if (shouldHide) + { + sb.Append("% "); + } + sb.Append("* "); + sb.Append(Beautify(entry.Title)); + + var hasCommentedLinks = false; + if (entryHideLinks) + { + // When hiding private links, put them on separate lines as comments with proper indentation + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.AppendLine(); + if (shouldHide) + { + sb.Append("% "); + } + sb.Append(" "); + sb.Append(FormatPrLink(entry.Pr, entryRepo, entryHideLinks)); + hasCommentedLinks = true; + } + + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.AppendLine(); + if (shouldHide) + { + sb.Append("% "); + } + sb.Append(" "); + sb.Append(FormatIssueLink(issue, entryRepo, entryHideLinks)); + hasCommentedLinks = true; + } + } + + // Add newline after the last link if there are commented links + if (hasCommentedLinks) + { + sb.AppendLine(); + } + } + else + { + sb.Append(' '); + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.Append(FormatPrLink(entry.Pr, entryRepo, entryHideLinks)); + sb.Append(' '); + } + + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(FormatIssueLink(issue, entryRepo, entryHideLinks)); + sb.Append(' '); + } + } + } + + if (!string.IsNullOrWhiteSpace(entry.Description)) + { + // Add blank line before description + // When hiding links, add an indented blank line if there are commented links + if (entryHideLinks && hasCommentedLinks) + { + sb.AppendLine(" "); + } + else + { + sb.AppendLine(); + } + var indented = Indent(entry.Description); + if (shouldHide) + { + // Comment out each line of the description + var indentedLines = indented.Split('\n'); + foreach (var line in indentedLines) + { + sb.Append("% "); + sb.AppendLine(line); + } + } + else + { + sb.AppendLine(indented); + } + } + else + { + sb.AppendLine(); + } + } + } + } + + /// + /// Checks if an entry should be blocked based on render_blockers configuration. + /// RenderBlockers is a Dictionary where: + /// - Key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + /// - Value is a RenderBlockersEntry containing areas and/or types that should be blocked for those products + /// An entry is blocked if ANY product in the bundle matches ANY product key AND (ANY area matches OR ANY type matches). + /// Note: render_blockers matches against bundle products, not individual entry products. + /// + private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundleProductIds, Dictionary? renderBlockers, out List reasons) + { + reasons = []; + if (renderBlockers == null || renderBlockers.Count == 0) + { + return false; + } + + // Bundle must have products to be blocked + if (bundleProductIds == null || bundleProductIds.Count == 0) + { + return false; + } + + // Extract area values from entry (case-insensitive comparison) + var entryAreas = entry.Areas != null && entry.Areas.Count > 0 + ? entry.Areas + .Where(a => !string.IsNullOrWhiteSpace(a)) + .Select(a => a!) + .ToHashSet(StringComparer.OrdinalIgnoreCase) + : new HashSet(StringComparer.OrdinalIgnoreCase); + + // Extract type from entry (case-insensitive comparison) + var entryType = !string.IsNullOrWhiteSpace(entry.Type) + ? entry.Type + : null; + + // Check each render_blockers entry + foreach (var (productKey, blockersEntry) in renderBlockers) + { + if (blockersEntry == null) + { + continue; + } + + // Parse product key - can be comma-separated (e.g., "elasticsearch, cloud-serverless") + var productKeys = productKey + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + // Check if any product in the bundle matches any product in the key + var matchingProducts = bundleProductIds.Intersect(productKeys, StringComparer.OrdinalIgnoreCase).ToList(); + if (matchingProducts.Count == 0) + { + continue; + } + + var isBlocked = false; + var blockReasons = new List(); + + // Check areas if specified + if (blockersEntry.Areas != null && blockersEntry.Areas.Count > 0 && entryAreas.Count > 0) + { + var matchingAreas = entryAreas.Intersect(blockersEntry.Areas, StringComparer.OrdinalIgnoreCase).ToList(); + if (matchingAreas.Count > 0) + { + isBlocked = true; + var reasonsForProductsAndAreas = matchingProducts + .SelectMany(product => matchingAreas + .Select(area => $"product '{product}' with area '{area}'")) + .Distinct(); + + foreach (var reason in reasonsForProductsAndAreas.Where(reason => !blockReasons.Contains(reason))) + { + blockReasons.Add(reason); + } + } + } + + // Check types if specified + if (blockersEntry.Types != null && blockersEntry.Types.Count > 0 && !string.IsNullOrWhiteSpace(entryType)) + { + var matchingTypes = blockersEntry.Types + .Where(t => string.Equals(t, entryType, StringComparison.OrdinalIgnoreCase)) + .ToList(); + if (matchingTypes.Count > 0) + { + isBlocked = true; + var reasonsForProducts = matchingProducts + .SelectMany(product => matchingTypes + .Select(type => $"product '{product}' with type '{type}'")) + .Distinct(); + + foreach (var reason in reasonsForProducts.Where(reason => !blockReasons.Contains(reason))) + { + blockReasons.Add(reason); + } + } + } + + if (isBlocked) + { + reasons.AddRange(blockReasons); + return true; + } + } + + return false; + } + + private static string GetComponent(ChangelogData entry) + { + // Map areas (list) to component (string) - use first area or empty string + if (entry.Areas != null && entry.Areas.Count > 0) + { + return entry.Areas[0]; + } + return string.Empty; + } + + private static string FormatAreaHeader(string area) + { + // Capitalize first letter and replace hyphens with spaces + if (string.IsNullOrWhiteSpace(area)) + return string.Empty; + + var result = area.Length < 2 + ? char.ToUpperInvariant(area[0]).ToString() + : char.ToUpperInvariant(area[0]) + area[1..]; + return result.Replace("-", " "); + } + + private static string FormatSubtypeHeader(string subtype) + { + // Capitalize first letter and replace hyphens with spaces + if (string.IsNullOrWhiteSpace(subtype)) + return string.Empty; + + var result = subtype.Length < 2 + ? char.ToUpperInvariant(subtype[0]).ToString() + : char.ToUpperInvariant(subtype[0]) + subtype[1..]; + return result.Replace("-", " "); + } + + private static string Beautify(string text) + { + if (string.IsNullOrWhiteSpace(text)) + return string.Empty; + + // Capitalize first letter and ensure ends with period + var result = text.Length < 2 + ? char.ToUpperInvariant(text[0]).ToString() + : char.ToUpperInvariant(text[0]) + text[1..]; + if (!result.EndsWith('.')) + { + result += "."; + } + return result; + } + + private static string TitleToSlug(string title) + { + if (string.IsNullOrWhiteSpace(title)) + return string.Empty; + + // Convert to lowercase and replace spaces with dashes + return title.ToLowerInvariant().Replace(' ', '-'); + } + + private static string Indent(string text) + { + // Indent each line with two spaces + var lines = text.Split('\n'); + return string.Join("\n", lines.Select(line => " " + line)); + } + + [GeneratedRegex(@"\d+$", RegexOptions.None)] + private static partial Regex TrailingNumberRegex(); + + private static string FormatPrLink(string pr, string repo, bool hidePrivateLinks) + { + // Extract PR number + var match = TrailingNumberRegex().Match(pr); + var prNumber = match.Success ? match.Value : pr; + + // Format as markdown link + string link; + if (pr.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + link = $"[#{prNumber}]({pr})"; + } + else + { + var url = $"https://github.com/elastic/{repo}/pull/{prNumber}"; + link = $"[#{prNumber}]({url})"; + } + + // Comment out link if hiding private links + if (hidePrivateLinks) + { + return $"% {link}"; + } + + return link; + } + + private static string FormatIssueLink(string issue, string repo, bool hidePrivateLinks) + { + // Extract issue number + var match = TrailingNumberRegex().Match(issue); + var issueNumber = match.Success ? match.Value : issue; + + // Format as markdown link + string link; + if (issue.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + link = $"[#{issueNumber}]({issue})"; + } + else + { + var url = $"https://github.com/elastic/{repo}/issues/{issueNumber}"; + link = $"[#{issueNumber}]({url})"; + } + + // Comment out link if hiding private links + if (hidePrivateLinks) + { + return $"% {link}"; + } + + return link; + } } diff --git a/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj b/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj index 0494a04d1..96209f37e 100644 --- a/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj +++ b/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj @@ -6,6 +6,12 @@ enable + + + <_Parameter1>Elastic.Documentation.Services.Tests + + + diff --git a/src/tooling/docs-builder/Arguments/BundleInputParser.cs b/src/tooling/docs-builder/Arguments/BundleInputParser.cs new file mode 100644 index 000000000..7b1ae91f0 --- /dev/null +++ b/src/tooling/docs-builder/Arguments/BundleInputParser.cs @@ -0,0 +1,83 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +using Elastic.Documentation.Services.Changelog; + +namespace Documentation.Builder.Arguments; + +/// +/// Utility class for parsing bundle input format: "bundle-file-path|changelog-file-path|repo|link-visibility" +/// Uses pipe (|) as delimiter since ConsoleAppFramework auto-splits string[] by comma. +/// Only bundle-file-path is required. +/// +public static class BundleInputParser +{ + /// + /// Parses a single input string into a BundleInput object. + /// Format: "bundle-file-path|changelog-file-path|repo|link-visibility" (only bundle-file-path is required) + /// Uses pipe (|) as delimiter since ConsoleAppFramework auto-splits string[] by comma. + /// link-visibility can be "hide-links" or "keep-links" (default is keep-links if omitted). + /// + public static BundleInput? Parse(string input) + { + if (string.IsNullOrWhiteSpace(input)) + return null; + + // Split by pipe to get parts (comma is auto-split by ConsoleAppFramework) + var parts = input.Split('|', StringSplitOptions.TrimEntries); + + if (parts.Length == 0 || string.IsNullOrWhiteSpace(parts[0])) + return null; + + var bundleInput = new BundleInput + { + BundleFile = parts[0] + }; + + // Directory is optional (second part) + if (parts.Length > 1 && !string.IsNullOrWhiteSpace(parts[1])) + { + bundleInput.Directory = parts[1]; + } + + // Repo is optional (third part) + if (parts.Length > 2 && !string.IsNullOrWhiteSpace(parts[2])) + { + bundleInput.Repo = parts[2]; + } + + // Link visibility is optional (fourth part) - "hide-links" or "keep-links" + if (parts.Length > 3 && !string.IsNullOrWhiteSpace(parts[3])) + { + bundleInput.HideLinks = parts[3].Equals("hide-links", StringComparison.OrdinalIgnoreCase); + } + + return bundleInput; + } + + /// + /// Parses multiple input strings into a list of BundleInput objects. + /// Each input is in format: "bundle-file-path|changelog-file-path|repo|link-visibility" (only bundle-file-path is required) + /// Uses pipe (|) as delimiter since ConsoleAppFramework auto-splits string[] by comma. + /// Multiple bundles can be specified by comma-separating them in a single --input option. + /// link-visibility can be "hide-links" or "keep-links" (default is keep-links if omitted). + /// + public static List ParseAll(string[]? inputs) + { + var result = new List(); + + if (inputs == null || inputs.Length == 0) + return result; + + foreach (var input in inputs) + { + var bundleInput = Parse(input); + if (bundleInput != null) + result.Add(bundleInput); + } + + return result; + } +} + diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index fd4222eb8..4b020bdc0 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -3,6 +3,7 @@ // See the LICENSE file in the project root for more information using System.IO.Abstractions; +using System.Linq; using ConsoleAppFramework; using Documentation.Builder.Arguments; using Elastic.Documentation.Configuration; @@ -21,17 +22,17 @@ IConfigurationContext configurationContext { private readonly IFileSystem _fileSystem = new FileSystem(); /// - /// Changelog commands. Use 'changelog add' to create a new changelog fragment. + /// Changelog commands. Use 'changelog add' to create a new changelog or 'changelog bundle' to create a consolidated list of changelogs. /// [Command("")] public Task Default() { - collector.EmitError(string.Empty, "Please specify a subcommand. Use 'changelog add' to create a new changelog fragment. Run 'changelog add --help' for usage information."); + collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n - 'changelog render': Render a bundled changelog to markdown files\n\nRun 'changelog add --help', 'changelog bundle --help', or 'changelog render --help' for usage information."); return Task.FromResult(1); } /// - /// Add a new changelog fragment from command-line input + /// Add a new changelog from command-line input /// /// Optional: A short, user-facing title (max 80 characters). Required if --pr is not specified. If --pr and --title are specified, the latter value is used instead of what exists in the PR. /// Optional: Type of change (feature, enhancement, bug-fix, breaking-change, etc.). Required if --pr is not specified. If mappings are configured, type can be derived from the PR. @@ -47,7 +48,7 @@ public Task Default() /// Optional: What users must do to mitigate /// Optional: Feature flag ID /// Optional: Include in release highlights - /// Optional: Output directory for the changelog fragment. Defaults to current directory + /// Optional: Output directory for the changelog. Defaults to current directory /// Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' /// Optional: Use the PR number as the filename instead of generating it from a unique ID and title /// @@ -144,5 +145,252 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st return await serviceInvoker.InvokeAsync(ctx); } + + /// + /// Bundle changelog files + /// + /// Optional: Directory containing changelog YAML files. Defaults to current directory + /// Optional: Output path for the bundled changelog. Can be either (1) a directory path, in which case 'changelog-bundle.yaml' is created in that directory, or (2) a file path ending in .yml or .yaml. Defaults to 'changelog-bundle.yaml' in the input directory + /// Include all changelogs in the directory. Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. + /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02 ga, cloud-serverless 2025-12-06 beta"). When specified, all three parts (product, target, lifecycle) are required but can be wildcards (*). Examples: "elasticsearch * *" matches all elasticsearch changelogs, "cloud-serverless 2025-12-02 *" matches cloud-serverless 2025-12-02 with any lifecycle, "* 9.3.* *" matches any product with target starting with "9.3.", "* * *" matches all changelogs (equivalent to --all). Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. + /// Optional: Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. + /// Optional: Copy the contents of each changelog file into the entries array. By default, the bundle contains only the file names and checksums. + /// Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. Only one filter option can be specified: `--all`, `--input-products`, or `--prs`. + /// GitHub repository owner (required only when PRs are specified as numbers) + /// GitHub repository name (required only when PRs are specified as numbers) + /// + [Command("bundle")] + public async Task Bundle( + string? directory = null, + string? output = null, + bool all = false, + [ProductInfoParser] List? inputProducts = null, + [ProductInfoParser] List? outputProducts = null, + bool resolve = false, + string[]? prs = null, + string? owner = null, + string? repo = null, + Cancel ctx = default + ) + { + await using var serviceInvoker = new ServiceInvoker(collector); + + var service = new ChangelogService(logFactory, configurationContext, null); + + // Process each --prs occurrence: each can be comma-separated PRs or a file path + var allPrs = new List(); + if (prs is { Length: > 0 }) + { + foreach (var prsValue in prs.Where(p => !string.IsNullOrWhiteSpace(p))) + { + // Check if it contains commas - if so, split and add each as a PR + if (prsValue.Contains(',')) + { + var commaSeparatedPrs = prsValue + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)); + allPrs.AddRange(commaSeparatedPrs); + } + else + { + // Single value - pass as-is (will be handled by service layer as file path or PR) + allPrs.Add(prsValue); + } + } + } + + // Validate filter options - at least one must be specified + var specifiedFilters = new List(); + if (all) + specifiedFilters.Add("--all"); + if (inputProducts != null && inputProducts.Count > 0) + specifiedFilters.Add("--input-products"); + if (allPrs.Count > 0) + specifiedFilters.Add("--prs"); + + if (specifiedFilters.Count == 0) + { + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --input-products, or --prs"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + + if (specifiedFilters.Count > 1) + { + collector.EmitError(string.Empty, $"Multiple filter options cannot be specified together. You specified: {string.Join(", ", specifiedFilters)}. Please use only one filter option: --all, --input-products, or --prs"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + + // Validate that if inputProducts is provided, all three parts (product, target, lifecycle) are present for each entry + // They can be wildcards (*) but must be present + if (inputProducts != null && inputProducts.Count > 0) + { + foreach (var product in inputProducts) + { + if (string.IsNullOrWhiteSpace(product.Product)) + { + collector.EmitError(string.Empty, "--input-products: product is required (use '*' for wildcard)"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + + // When --input-products is used, target and lifecycle are required (but can be "*") + // If they're null, it means they weren't provided in the input + if (product.Target == null) + { + collector.EmitError(string.Empty, $"--input-products: target is required for product '{product.Product}' (use '*' for wildcard)"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + + if (product.Lifecycle == null) + { + collector.EmitError(string.Empty, $"--input-products: lifecycle is required for product '{product.Product}' (use '*' for wildcard)"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + } + + // Check if --input-products * * * is specified (equivalent to --all) + var isAllWildcard = inputProducts.Count == 1 && + inputProducts[0].Product == "*" && + inputProducts[0].Target == "*" && + inputProducts[0].Lifecycle == "*"; + + if (isAllWildcard) + { + all = true; + inputProducts = null; // Clear inputProducts so service treats it as --all + } + } + + // Process and validate output parameter + string? processedOutput = null; + if (!string.IsNullOrWhiteSpace(output)) + { + var outputLower = output.ToLowerInvariant(); + var endsWithYml = outputLower.EndsWith(".yml", StringComparison.OrdinalIgnoreCase); + var endsWithYaml = outputLower.EndsWith(".yaml", StringComparison.OrdinalIgnoreCase); + + if (endsWithYml || endsWithYaml) + { + // It's a file path - use as-is + processedOutput = output; + } + else + { + // Check if it has a file extension (other than .yml/.yaml) + var extension = Path.GetExtension(output); + if (!string.IsNullOrEmpty(extension)) + { + // Has an extension that's not .yml/.yaml - this is invalid + collector.EmitError(string.Empty, $"--output: If a filename is provided, it must end in .yml or .yaml. Found: {extension}"); + _ = collector.StartAsync(ctx); + await collector.WaitForDrain(); + await collector.StopAsync(ctx); + return 1; + } + + // It's a directory path - append default filename + processedOutput = Path.Combine(output, "changelog-bundle.yaml"); + } + } + + var input = new ChangelogBundleInput + { + Directory = directory ?? Directory.GetCurrentDirectory(), + Output = processedOutput, + All = all, + InputProducts = inputProducts, + OutputProducts = outputProducts, + Resolve = resolve, + Prs = allPrs.Count > 0 ? allPrs.ToArray() : null, + Owner = owner, + Repo = repo + }; + + serviceInvoker.AddCommand(service, input, + async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, state, ctx) + ); + + return await serviceInvoker.InvokeAsync(ctx); + } + + /// + /// Render bundled changelog(s) to markdown files + /// + /// Required: Bundle input(s) in format "bundle-file-path|changelog-file-path|repo|link-visibility" (use pipe as delimiter). To merge multiple bundles, separate them with commas. Only bundle-file-path is required. link-visibility can be "hide-links" or "keep-links" (default). Paths must be absolute or use environment variables; tilde (~) expansion is not supported. + /// Optional: Output directory for rendered markdown files. Defaults to current directory + /// Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle + /// Optional: Group entries by area/component in subsections. For breaking changes with a subtype, groups by subtype instead of area. Defaults to false + /// Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. + /// + [Command("render")] + public async Task Render( + string[]? input = null, + string? output = null, + string? title = null, + bool subsections = false, + string[]? hideFeatures = null, + string? config = null, + Cancel ctx = default + ) + { + await using var serviceInvoker = new ServiceInvoker(collector); + + var service = new ChangelogService(logFactory, configurationContext, null); + + // Process each --hide-features occurrence: each can be comma-separated feature IDs or a file path + var allFeatureIds = new List(); + if (hideFeatures is { Length: > 0 }) + { + foreach (var hideFeaturesValue in hideFeatures.Where(v => !string.IsNullOrWhiteSpace(v))) + { + // Check if it contains commas - if so, split and add each as a feature ID + if (hideFeaturesValue.Contains(',')) + { + var commaSeparatedFeatureIds = hideFeaturesValue + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)); + allFeatureIds.AddRange(commaSeparatedFeatureIds); + } + else + { + // Single value - pass as-is (will be handled by service layer as file path or feature ID) + allFeatureIds.Add(hideFeaturesValue); + } + } + } + + // Parse each --input value into BundleInput objects + var bundles = BundleInputParser.ParseAll(input); + + var renderInput = new ChangelogRenderInput + { + Bundles = bundles, + Output = output, + Title = title, + Subsections = subsections, + HideFeatures = allFeatureIds.Count > 0 ? allFeatureIds.ToArray() : null, + Config = config + }; + + serviceInvoker.AddCommand(service, renderInput, + async static (s, collector, state, ctx) => await s.RenderChangelogs(collector, state, ctx) + ); + + return await serviceInvoker.InvokeAsync(ctx); + } } diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 74b0d818a..e73744836 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -1118,224 +1118,3547 @@ public async Task CreateChangelog_WithMultiplePrs_CreatesOneFilePerPr() var files = Directory.GetFiles(outputDir, "*.yaml"); files.Should().HaveCount(2); - var yamlContent1 = await File.ReadAllTextAsync(files[0], TestContext.Current.CancellationToken); - var yamlContent2 = await File.ReadAllTextAsync(files[1], TestContext.Current.CancellationToken); + var yamlContents = new List(); + foreach (var file in files) + { + yamlContents.Add(await File.ReadAllTextAsync(file, TestContext.Current.CancellationToken)); + } + + // Verify both PRs were processed + yamlContents.Should().Contain(c => c.Contains("title: First PR feature")); + yamlContents.Should().Contain(c => c.Contains("title: Second PR bug fix")); + } + + [Fact] + public async Task BundleChangelogs_WithAllOption_CreatesValidBundle() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First changelog + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second changelog + type: enhancement + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-changelog.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-changelog.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("products:"); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("product: kibana"); + bundleContent.Should().Contain("entries:"); + bundleContent.Should().Contain("file:"); + bundleContent.Should().Contain("name: 1755268130-first-changelog.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-changelog.yaml"); + bundleContent.Should().Contain("checksum:"); + } + + [Fact] + public async Task BundleChangelogs_WithProductsFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().NotContain("name: 1755268140-kibana-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + var changelog3 = """ + title: Third PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-pr.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-third-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["https://github.com/elastic/elasticsearch/pull/100", "https://github.com/elastic/elasticsearch/pull/200"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-first-pr.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-pr.yaml"); + bundleContent.Should().NotContain("name: 1755268150-third-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFilterAndUnmatchedPrs_EmitsWarnings() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file for only one PR + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = [ + "https://github.com/elastic/elasticsearch/pull/100", + "https://github.com/elastic/elasticsearch/pull/200", + "https://github.com/elastic/elasticsearch/pull/300" + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().Be(2); // Two unmatched PRs + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No changelog file found for PR: https://github.com/elastic/elasticsearch/pull/200")); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No changelog file found for PR: https://github.com/elastic/elasticsearch/pull/300")); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFileFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create PRs file + var prsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "prs.txt"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(prsFile)!); + await fileSystem.File.WriteAllTextAsync(prsFile, """ + https://github.com/elastic/elasticsearch/pull/100 + https://github.com/elastic/elasticsearch/pull/200 + """, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = [prsFile], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-first-pr.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrNumberAndOwnerRepo_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: PR with number + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-pr-number.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["100"], + Owner = "elastic", + Repo = "elasticsearch", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-pr-number.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithShortPrFormat_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: PR with short format + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/133609 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-short-format.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["elastic/elasticsearch#133609"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-short-format.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithNoMatchingFiles_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("No YAML files found") || d.Message.Contains("No changelog entries matched")); + } + + [Fact] + public async Task BundleChangelogs_WithInvalidDirectory_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var invalidDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent"); + + var input = new ChangelogBundleInput + { + Directory = invalidDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Directory does not exist")); + } + + [Fact] + public async Task BundleChangelogs_WithNoFilterOption_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First changelog + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second changelog + type: enhancement + products: + - product: kibana + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-changelog.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-changelog.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("At least one filter option must be specified")); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleFilterOptions_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Multiple filter options cannot be specified together")); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleProducts_CreatesValidBundle() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Cloud serverless feature 1 + type: feature + products: + - product: cloud-serverless + target: 2025-12-02 + pr: https://github.com/elastic/cloud-serverless/pull/100 + """; + var changelog2 = """ + title: Cloud serverless feature 2 + type: feature + products: + - product: cloud-serverless + target: 2025-12-06 + pr: https://github.com/elastic/cloud-serverless/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-cloud-feature1.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-cloud-feature2.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [ + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02", Lifecycle = "*" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06", Lifecycle = "*" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: cloud-serverless"); + bundleContent.Should().Contain("target: 2025-12-02"); + bundleContent.Should().Contain("target: 2025-12-06"); + bundleContent.Should().Contain("name: 1755268130-cloud-feature1.yaml"); + bundleContent.Should().Contain("name: 1755268140-cloud-feature2.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithWildcardProductFilter_MatchesAllProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [new ProductInfo { Product = "*", Target = "9.2.0", Lifecycle = "ga" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().Contain("name: 1755268140-kibana-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithWildcardAllParts_EquivalentToAll() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.3.0 + lifecycle: beta + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [new ProductInfo { Product = "*", Target = "*", Lifecycle = "*" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().Contain("name: 1755268140-kibana-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrefixWildcardTarget_MatchesCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch 9.3.0 feature + type: feature + products: + - product: elasticsearch + target: 9.3.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Elasticsearch 9.3.1 feature + type: feature + products: + - product: elasticsearch + target: 9.3.1 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + var changelog3 = """ + title: Elasticsearch 9.2.0 feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-es-9.3.0.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-es-9.3.1.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-es-9.2.0.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.3.*", Lifecycle = "*" }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-es-9.3.0.yaml"); + bundleContent.Should().Contain("name: 1755268140-es-9.3.1.yaml"); + bundleContent.Should().NotContain("name: 1755268150-es-9.2.0.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithNonExistentFileAsPrs_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Provide a non-existent file path - should return error since there are no other PRs + var nonexistentFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"); + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = [nonexistentFile], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + // File doesn't exist and there are no other PRs, so should return error + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("File does not exist")); + } + + [Fact] + public async Task BundleChangelogs_WithUrlAsPrs_TreatsAsPrIdentifier() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create a changelog file for a specific PR + var changelog = """ + title: Test PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/123 + """; + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Provide a URL - should be treated as a PR identifier, not a file path + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["https://github.com/elastic/elasticsearch/pull/123"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + // URL should be treated as PR identifier and match the changelog + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-test-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithNonExistentFileAndOtherPrs_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create a changelog file for a specific PR + var changelog = """ + title: Test PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/123 + """; + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Provide a non-existent file path along with a valid PR - should emit warning for file but continue with PR + var nonexistentFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"); + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = [nonexistentFile, "https://github.com/elastic/elasticsearch/pull/123"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + // Should succeed because we have a valid PR, but should emit warning for the non-existent file + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + // Check that we have a warning about the file not existing + var fileWarning = _collector.Diagnostics.FirstOrDefault(d => d.Message.Contains("File does not exist, skipping")); + fileWarning.Should().NotBeNull("Expected a warning about the non-existent file being skipped"); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-test-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithOutputProducts_OverridesChangelogProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with different products + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + OutputProducts = [ + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02", Lifecycle = "ga" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06", Lifecycle = "beta" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Output products should override changelog products + bundleContent.Should().Contain("product: cloud-serverless"); + bundleContent.Should().Contain("target: 2025-12-02"); + bundleContent.Should().Contain("target: 2025-12-06"); + // Lifecycle values should be included in products array + bundleContent.Should().Contain("lifecycle: ga"); + bundleContent.Should().Contain("lifecycle: beta"); + // Should not contain products from changelogs + bundleContent.Should().NotContain("product: elasticsearch"); + bundleContent.Should().NotContain("product: kibana"); + // But should still contain the entries + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().Contain("name: 1755268140-kibana-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleProducts_IncludesAllProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with different products + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + var changelog3 = """ + title: Multi-product feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-multi-product.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("product: kibana"); + bundleContent.Should().Contain("target: 9.2.0"); + // Should have 3 entries + var entryCount = bundleContent.Split("file:", StringSplitOptions.RemoveEmptyEntries).Length - 1; + entryCount.Should().Be(3); + } + + [Fact] + public async Task BundleChangelogs_WithInputProducts_IncludesLifecycleInProductsArray() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch GA feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Elasticsearch Beta feature + type: feature + products: + - product: elasticsearch + target: 9.3.0 + lifecycle: beta + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-ga.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-elasticsearch-beta.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [ + new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }, + new ProductInfo { Product = "elasticsearch", Target = "9.3.0", Lifecycle = "beta" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Verify lifecycle is included in products array (extracted from changelog entries, not filter) + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("target: 9.3.0"); + bundleContent.Should().Contain("lifecycle: ga"); + bundleContent.Should().Contain("lifecycle: beta"); + } + + [Fact] + public async Task BundleChangelogs_WithOutputProducts_IncludesLifecycleInProductsArray() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + OutputProducts = [ + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02", Lifecycle = "ga" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06", Lifecycle = "beta" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Verify lifecycle is included in products array from --output-products + bundleContent.Should().Contain("product: cloud-serverless"); + bundleContent.Should().Contain("target: 2025-12-02"); + bundleContent.Should().Contain("target: 2025-12-06"); + bundleContent.Should().Contain("lifecycle: ga"); + bundleContent.Should().Contain("lifecycle: beta"); + } + + [Fact] + public async Task BundleChangelogs_ExtractsLifecycleFromChangelogEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with lifecycle + var changelog1 = """ + title: Elasticsearch GA feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Elasticsearch Beta feature + type: feature + products: + - product: elasticsearch + target: 9.3.0 + lifecycle: beta + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-ga.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-elasticsearch-beta.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Verify lifecycle is included in products array extracted from changelog entries + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("target: 9.3.0"); + bundleContent.Should().Contain("lifecycle: ga"); + bundleContent.Should().Contain("lifecycle: beta"); + } + + [Fact] + public async Task BundleChangelogs_WithInputProductsWildcardLifecycle_ExtractsActualLifecycleFromChangelogs() + { + // Arrange - Test the scenario where --input-products uses "*" for lifecycle, + // but the actual lifecycle value should be extracted from the changelog entries + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file with lifecycle + var changelog1 = """ + title: A new feature was added + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + InputProducts = [ + new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "*" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Verify that the actual lifecycle value "ga" from the changelog is included in products array, + // not the wildcard "*" from the filter + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("lifecycle: ga"); + // Verify wildcard "*" is not included in the products array + bundleContent.Should().NotContain("lifecycle: *"); + bundleContent.Should().NotContain("lifecycle: '*\""); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleTargets_WarningIncludesLifecycle() + { + // Arrange - Test that warning message includes lifecycle when multiple products + // have the same target but different lifecycles + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with same target but different lifecycles + var changelog1 = """ + title: Elasticsearch GA feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Elasticsearch Beta feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: beta + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + var changelog3 = """ + title: Elasticsearch feature without lifecycle + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-ga.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-elasticsearch-beta.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-elasticsearch-no-lifecycle.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + // Verify warning message includes lifecycle values + _collector.Diagnostics.Should().Contain(d => + d.Message.Contains("Product 'elasticsearch' has multiple targets in bundle") && + d.Message.Contains("9.2.0") && + d.Message.Contains("9.2.0 beta") && + d.Message.Contains("9.2.0 ga")); + } + + [Fact] + public async Task BundleChangelogs_WithResolve_CopiesChangelogContents() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + areas: + - Search + description: This is a test feature + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("file:"); + bundleContent.Should().Contain("name: 1755268130-test-feature.yaml"); + bundleContent.Should().Contain("checksum:"); + bundleContent.Should().Contain("type: feature"); + bundleContent.Should().Contain("title: Test feature"); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("pr: https://github.com/elastic/elasticsearch/pull/100"); + bundleContent.Should().Contain("areas:"); + bundleContent.Should().Contain("- Search"); + bundleContent.Should().Contain("description: This is a test feature"); + } + + [Fact] + public async Task BundleChangelogs_WithDirectoryOutputPath_CreatesDefaultFilename() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + // Use a directory path with default filename (simulating command layer processing) + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var outputPath = fileSystem.Path.Combine(outputDir, "changelog-bundle.yaml"); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = outputPath + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + fileSystem.File.Exists(outputPath).Should().BeTrue("Output file should be created"); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(outputPath, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("products:"); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("entries:"); + bundleContent.Should().Contain("name: 1755268130-test-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingTitle_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without title + var changelog1 = """ + type: feature + products: + - product: elasticsearch + target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: title")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingType_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without type + var changelog1 = """ + title: Test feature + products: + - product: elasticsearch + target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: type")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingProducts_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without products + var changelog1 = """ + title: Test feature + type: feature + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: products")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndInvalidProduct_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file with invalid product (missing product field) + var changelog1 = """ + title: Test feature + type: feature + products: + - target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("product entry missing required field: product")); + } + + [Fact] + public async Task RenderChangelogs_WithValidBundle_CreatesMarkdownFiles() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This is a test feature + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(bundleFile)!); + + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("## 9.2.0"); + indexContent.Should().Contain("Test feature"); + } + + [Fact] + public async Task RenderChangelogs_WithMultipleBundles_MergesAndRenders() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create test changelog files + var changelog1 = """ + title: First feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second feature + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir1, "1755268130-first.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir2, "1755268140-second.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("First feature"); + indexContent.Should().Contain("Second feature"); + } + + [Fact] + public async Task CreateChangelog_WithBlockingLabel_SkipsChangelogCreation() + { + // Arrange + var mockGitHubService = A.Fake(); + var prInfo = new GitHubPrInfo + { + Title = "PR with blocking label", + Labels = ["type:feature", "skip:releaseNotes"] + }; + + A.CallTo(() => mockGitHubService.FetchPrInfoAsync( + A._, + A._, + A._, + A._)) + .Returns(prInfo); + + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(configDir); + var configPath = fileSystem.Path.Combine(configDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - preview + - beta + - ga + label_to_type: + "type:feature": feature + add_blockers: + elasticsearch: + - "skip:releaseNotes" + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var service = new ChangelogService(_loggerFactory, _configurationContext, mockGitHubService); + + var input = new ChangelogInput + { + Prs = ["https://github.com/elastic/elasticsearch/pull/1234"], + Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], + Config = configPath, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.CreateChangelog(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); // Should succeed but skip creating changelog + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Skipping changelog creation") && d.Message.Contains("skip:releaseNotes")); + + var outputDir = input.Output ?? Directory.GetCurrentDirectory(); + if (!Directory.Exists(outputDir)) + Directory.CreateDirectory(outputDir); + var files = Directory.GetFiles(outputDir, "*.yaml"); + files.Should().HaveCount(0); // No files should be created + } + + [Fact] + public async Task CreateChangelog_WithBlockingLabelForSpecificProduct_OnlyBlocksForThatProduct() + { + // Arrange + var mockGitHubService = A.Fake(); + var prInfo = new GitHubPrInfo + { + Title = "PR with blocking label", + Labels = ["type:feature", "ILM"] + }; + + A.CallTo(() => mockGitHubService.FetchPrInfoAsync( + A._, + A._, + A._, + A._)) + .Returns(prInfo); + + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(configDir); + var configPath = fileSystem.Path.Combine(configDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - preview + - beta + - ga + label_to_type: + "type:feature": feature + add_blockers: + cloud-serverless: + - "ILM" + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var service = new ChangelogService(_loggerFactory, _configurationContext, mockGitHubService); + + var input = new ChangelogInput + { + Prs = ["https://github.com/elastic/elasticsearch/pull/1234"], + Products = [ + new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-08-05" } + ], + Config = configPath, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.CreateChangelog(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); // Should succeed but skip creating changelog due to cloud-serverless blocker + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Skipping changelog creation") && d.Message.Contains("ILM")); + + var outputDir = input.Output ?? Directory.GetCurrentDirectory(); + if (!Directory.Exists(outputDir)) + Directory.CreateDirectory(outputDir); + var files = Directory.GetFiles(outputDir, "*.yaml"); + files.Should().HaveCount(0); // No files should be created because cloud-serverless blocks it + } + + [Fact] + public async Task RenderChangelogs_WithMissingBundleFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var missingBundle = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.yaml"); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = missingBundle }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Bundle file does not exist")); + } + + [Fact] + public async Task RenderChangelogs_WithMissingChangelogFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: nonexistent.yaml + checksum: abc123 + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = bundleDir }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("does not exist")); + } + + [Fact] + public async Task RenderChangelogs_WithInvalidBundleStructure_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + invalid_field: value + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field") || d.Message.Contains("Failed to deserialize")); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicateFileName_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create same changelog file in both directories + var changelog = """ + title: Duplicate feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var fileName = "1755268130-duplicate.yaml"; + var file1 = fileSystem.Path.Combine(changelogDir1, fileName); + var file2 = fileSystem.Path.Combine(changelogDir2, fileName); + await fileSystem.File.WriteAllTextAsync(file1, changelog, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears in multiple bundles")); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicateFileNameInSameBundle_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog file + var changelog = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var fileName = "1755268130-test-feature.yaml"; + var changelogFile = fileSystem.Path.Combine(changelogDir, fileName); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Create bundle file with the same file referenced twice + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundleFile, Directory = changelogDir } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears multiple times in the same bundle") && + d.File == bundleFile); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicatePr_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create changelog files with same PR + var changelog1 = """ + title: First feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second feature + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir1, "1755268130-first.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir2, "1755268140-second.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears in multiple bundles")); + } + + [Fact] + public async Task RenderChangelogs_WithInvalidChangelogFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create invalid changelog file (missing required fields) + var invalidChangelog = """ + title: Invalid feature + # Missing type and products + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-invalid.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, invalidChangelog, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-invalid.yaml + checksum: {ComputeSha1(invalidChangelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field")); + } + + [Fact] + public async Task RenderChangelogs_WithResolvedEntry_ValidatesAndRenders() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + products: + - product: elasticsearch + target: 9.2.0 + entries: + - type: feature + title: Resolved feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("Resolved feature"); + } + + [Fact] + public async Task RenderChangelogs_WithoutTitleAndNoTargets_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without target + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file without target + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir + // Note: Title is not set + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No --title option provided") && + d.Message.Contains("default to 'unknown'")); + } + + [Fact] + public async Task RenderChangelogs_WithTitleAndNoTargets_NoWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without target + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file without target + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); - // One file should contain first PR title, the other should contain second PR title - var contents = new[] { yamlContent1, yamlContent2 }; - contents.Should().Contain(c => c.Contains("title: First PR feature")); - contents.Should().Contain(c => c.Contains("title: Second PR bug fix")); - contents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/1234")); - contents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/5678")); + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" // Title is provided + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + // Should not have warning about missing title + _collector.Diagnostics.Should().NotContain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No --title option provided")); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with feature-id + var changelog1 = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:hidden-api + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be hidden + """; + + // Create changelog without feature-id (should not be hidden) + var changelog2 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-api"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Hidden feature") && + d.Message.Contains("feature:hidden-api") && + d.Message.Contains("will be commented out")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Hidden entry should be commented out with % prefix + indexContent.Should().Contain("% * Hidden feature"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_BreakingChange_UsesBlockComments() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden breaking change + type: breaking-change + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:hidden-breaking + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This breaking change should be hidden + impact: Users will be affected + action: Update your code + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-breaking.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-breaking.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-breaking"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var breakingFile = fileSystem.Path.Combine(outputDir, "9.2.0", "breaking-changes.md"); + fileSystem.File.Exists(breakingFile).Should().BeTrue(); + + var breakingContent = await fileSystem.File.ReadAllTextAsync(breakingFile, TestContext.Current.CancellationToken); + // Should use block comments + breakingContent.Should().Contain(""); + breakingContent.Should().Contain("Hidden breaking change"); + // Entry should be between comment markers + var commentStart = breakingContent.IndexOf("", StringComparison.Ordinal); + commentStart.Should().BeLessThan(commentEnd); + breakingContent.Substring(commentStart, commentEnd - commentStart).Should().Contain("Hidden breaking change"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_Deprecation_UsesBlockComments() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden deprecation + type: deprecation + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:hidden-deprecation + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This deprecation should be hidden + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-deprecation.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-deprecation.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-deprecation"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var deprecationsFile = fileSystem.Path.Combine(outputDir, "9.2.0", "deprecations.md"); + fileSystem.File.Exists(deprecationsFile).Should().BeTrue(); + + var deprecationsContent = await fileSystem.File.ReadAllTextAsync(deprecationsFile, TestContext.Current.CancellationToken); + // Should use block comments + deprecationsContent.Should().Contain(""); + deprecationsContent.Should().Contain("Hidden deprecation"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CommaSeparated_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog1 = """ + title: First hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:first + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelog2 = """ + title: Second hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:second + pr: https://github.com/elastic/elasticsearch/pull/101 + """; + + var changelog3 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/102 + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-first.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-second.yaml"); + var changelogFile3 = fileSystem.Path.Combine(changelogDir, "1755268150-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile3, changelog3, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + - file: + name: 1755268150-visible.yaml + checksum: {ComputeSha1(changelog3)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:first", "feature:second"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("% * First hidden feature"); + indexContent.Should().Contain("% * Second hidden feature"); + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_FromFile_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature-id: feature:from-file + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Create feature IDs file + var featureIdsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "feature-ids.txt"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(featureIdsFile)!); + await fileSystem.File.WriteAllTextAsync(featureIdsFile, "feature:from-file\nfeature:another", TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = [featureIdsFile] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("% * Hidden feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CaseInsensitive_MatchesFeatureIds() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature-id: Feature:UpperCase + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:uppercase"] // Different case + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Should match case-insensitively + indexContent.Should().Contain("% * Hidden feature"); + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + search area) + var changelog1 = """ + title: Blocked feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked + """; + + // Create changelog that should NOT be blocked (elasticsearch but different area) + var changelog2 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - observability + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("area 'search'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_CommaSeparatedProducts_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with cloud-serverless product that should be blocked + var changelog1 = """ + title: Blocked cloud feature + type: feature + products: + - product: cloud-serverless + target: 2025-12-02 + areas: + - security + pr: https://github.com/elastic/cloud-serverless/pull/100 + description: This feature should be blocked + """; + + // Create changelog with elasticsearch product that should also be blocked + var changelog2 = """ + title: Blocked elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - security + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should also be blocked + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-cloud-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-es-blocked.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers using comma-separated products in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + "elasticsearch, cloud-serverless": + areas: + - security + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + - product: cloud-serverless + target: 2025-12-02 + entries: + - file: + name: 1755268130-cloud-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-es-blocked.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Both entries should be commented out + indexContent.Should().Contain("% * Blocked cloud feature"); + indexContent.Should().Contain("% * Blocked elasticsearch feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_MultipleProductsInEntry_ChecksAllProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with multiple products - one matches render_blockers + var changelog = """ + title: Multi-product feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked because elasticsearch matches + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-multi-product.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Create config file with render_blockers for elasticsearch only in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + entries: + - file: + name: 1755268130-multi-product.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Multi-product feature") && + d.Message.Contains("product 'elasticsearch'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Should be blocked because elasticsearch matches, even though kibana doesn't + indexContent.Should().Contain("% * Multi-product feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_TypeBlocking_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + feature type, blocked by type) + var changelog1 = """ + title: Blocked feature by type + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked by type + """; + + // Create changelog that should NOT be blocked (elasticsearch but different type) + var changelog2 = """ + title: Visible enhancement + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This enhancement should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers blocking docs type + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + - enhancement + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + types: + - feature + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature by type") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("type 'feature'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature by type"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible enhancement"); + indexContent.Should().NotContain("% * Visible enhancement"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } } [Fact] - public async Task CreateChangelog_WithBlockingLabel_SkipsChangelogCreation() + public async Task RenderChangelogs_WithRenderBlockers_AreasAndTypes_CommentsOutMatchingEntries() { // Arrange - var mockGitHubService = A.Fake(); - var prInfo = new GitHubPrInfo - { - Title = "PR with blocking label", - Labels = ["type:feature", "skip:releaseNotes"] - }; + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked by area (elasticsearch + search area) + var changelog1 = """ + title: Blocked by area + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This should be blocked by area + """; - A.CallTo(() => mockGitHubService.FetchPrInfoAsync( - A._, - A._, - A._, - A._)) - .Returns(prInfo); + // Create changelog that should be blocked by type (elasticsearch + enhancement type, blocked by type) + var changelog2 = """ + title: Blocked by type + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This should be blocked by type + """; - var fileSystem = new FileSystem(); + // Create changelog that should NOT be blocked + var changelog3 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - observability + pr: https://github.com/elastic/elasticsearch/pull/102 + description: This should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-area-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-type-blocked.yaml"); + var changelogFile3 = fileSystem.Path.Combine(changelogDir, "1755268150-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile3, changelog3, TestContext.Current.CancellationToken); + + // Create config file with render_blockers blocking both areas and types var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - fileSystem.Directory.CreateDirectory(configDir); - var configPath = fileSystem.Path.Combine(configDir, "changelog.yml"); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); var configContent = """ available_types: - feature + - enhancement available_subtypes: [] available_lifecycles: - - preview - - beta - ga - label_to_type: - "type:feature": feature - add_blockers: + render_blockers: elasticsearch: - - "skip:releaseNotes" + areas: + - search + types: + - enhancement """; await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); - var service = new ChangelogService(_loggerFactory, _configurationContext, mockGitHubService); + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-area-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-type-blocked.yaml + checksum: {ComputeSha1(changelog2)} + - file: + name: 1755268150-visible.yaml + checksum: {ComputeSha1(changelog3)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); - var input = new ChangelogInput + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try { - Prs = ["https://github.com/elastic/elasticsearch/pull/1234"], - Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], - Config = configPath, - Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) - }; + Directory.SetCurrentDirectory(configDir); - // Act - var result = await service.CreateChangelog(_collector, input, TestContext.Current.CancellationToken); - - // Assert - result.Should().BeTrue(); // Should succeed but skip creating changelog - _collector.Warnings.Should().BeGreaterThan(0); - _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Skipping changelog creation") && d.Message.Contains("skip:releaseNotes")); + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - var outputDir = input.Output ?? Directory.GetCurrentDirectory(); - if (!Directory.Exists(outputDir)) - Directory.CreateDirectory(outputDir); - var files = Directory.GetFiles(outputDir, "*.yaml"); - files.Should().HaveCount(0); // No files should be created + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Both blocked entries should be commented out + indexContent.Should().Contain("% * Blocked by area"); + indexContent.Should().Contain("% * Blocked by type"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } } [Fact] - public async Task CreateChangelog_WithBlockingLabelForSpecificProduct_OnlyBlocksForThatProduct() + public async Task RenderChangelogs_WithRenderBlockers_UsesBundleProductsNotEntryProducts() { // Arrange - var mockGitHubService = A.Fake(); - var prInfo = new GitHubPrInfo - { - Title = "PR with blocking label", - Labels = ["type:feature", "ILM"] - }; + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with elasticsearch product and search area + // But bundle has kibana product - should NOT be blocked because render_blockers matches against bundle products + var changelog1 = """ + title: Entry with elasticsearch but bundle has kibana + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This should NOT be blocked because bundle product is kibana + """; - A.CallTo(() => mockGitHubService.FetchPrInfoAsync( - A._, - A._, - A._, - A._)) - .Returns(prInfo); + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-test.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); - var fileSystem = new FileSystem(); + // Create config file with render_blockers blocking elasticsearch var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - fileSystem.Directory.CreateDirectory(configDir); - var configPath = fileSystem.Path.Combine(configDir, "changelog.yml"); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); var configContent = """ available_types: - feature available_subtypes: [] available_lifecycles: - - preview - - beta - ga - label_to_type: - "type:feature": feature - add_blockers: - cloud-serverless: - - "ILM" + render_blockers: + elasticsearch: + areas: + - search """; await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); - var service = new ChangelogService(_loggerFactory, _configurationContext, mockGitHubService); + // Create bundle file with kibana product (not elasticsearch) + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: kibana + target: 9.2.0 + entries: + - file: + name: 1755268130-test.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); - var input = new ChangelogInput + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try { - Prs = ["https://github.com/elastic/elasticsearch/pull/1234"], - Products = [ - new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }, - new ProductInfo { Product = "cloud-serverless", Target = "2025-08-05" } - ], - Config = configPath, - Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) - }; - - // Act - var result = await service.CreateChangelog(_collector, input, TestContext.Current.CancellationToken); + Directory.SetCurrentDirectory(configDir); - // Assert - result.Should().BeTrue(); // Should succeed but skip creating changelog due to cloud-serverless blocker - _collector.Warnings.Should().BeGreaterThan(0); - _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Skipping changelog creation") && d.Message.Contains("ILM") && d.Message.Contains("cloud-serverless")); + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - var outputDir = input.Output ?? Directory.GetCurrentDirectory(); - if (!Directory.Exists(outputDir)) - Directory.CreateDirectory(outputDir); - var files = Directory.GetFiles(outputDir, "*.yaml"); - files.Should().HaveCount(0); // No files should be created + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + // Should have no warnings because entry is NOT blocked (bundle product is kibana, not elasticsearch) + _collector.Warnings.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Entry should NOT be commented out because bundle product is kibana, not elasticsearch + indexContent.Should().Contain("* Entry with elasticsearch but bundle has kibana"); + indexContent.Should().NotContain("% * Entry with elasticsearch but bundle has kibana"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } } [Fact] - public async Task CreateChangelog_WithMultiplePrsAndSomeBlocked_CreatesFilesForNonBlockedPrs() + public async Task RenderChangelogs_WithCustomConfigPath_UsesSpecifiedConfigFile() { // Arrange - var mockGitHubService = A.Fake(); - var pr1Info = new GitHubPrInfo - { - Title = "First PR without blocker", - Labels = ["type:feature"] - }; - var pr2Info = new GitHubPrInfo - { - Title = "Second PR with blocker", - Labels = ["type:feature", "skip:releaseNotes"] - }; - - A.CallTo(() => mockGitHubService.FetchPrInfoAsync( - A.That.Contains("1234"), - null, - null, - A._)) - .Returns(pr1Info); + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + search area) + var changelog1 = """ + title: Blocked feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked + """; - A.CallTo(() => mockGitHubService.FetchPrInfoAsync( - A.That.Contains("5678"), - null, - null, - A._)) - .Returns(pr2Info); + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); - var fileSystem = new FileSystem(); - var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - fileSystem.Directory.CreateDirectory(configDir); - var configPath = fileSystem.Path.Combine(configDir, "changelog.yml"); + // Create config file in a custom location (not in docs/ subdirectory) + var customConfigDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(customConfigDir); + var customConfigPath = fileSystem.Path.Combine(customConfigDir, "custom-changelog.yml"); var configContent = """ available_types: - feature available_subtypes: [] available_lifecycles: - - preview - - beta - ga - label_to_type: - "type:feature": feature - add_blockers: + render_blockers: elasticsearch: - - "skip:releaseNotes" + areas: + - search """; - await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(customConfigPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); - var service = new ChangelogService(_loggerFactory, _configurationContext, mockGitHubService); + // Don't change directory - use custom config path via Config property + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); - var input = new ChangelogInput + var input = new ChangelogRenderInput { - Prs = ["https://github.com/elastic/elasticsearch/pull/1234", "https://github.com/elastic/elasticsearch/pull/5678"], - Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0", Lifecycle = "ga" }], - Config = configPath, - Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + Config = customConfigPath }; // Act - var result = await service.CreateChangelog(_collector, input, TestContext.Current.CancellationToken); + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); // Assert result.Should().BeTrue(); + _collector.Errors.Should().Be(0); _collector.Warnings.Should().BeGreaterThan(0); - _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Skipping changelog creation") && d.Message.Contains("5678")); - - var outputDir = input.Output ?? Directory.GetCurrentDirectory(); - if (!Directory.Exists(outputDir)) - Directory.CreateDirectory(outputDir); - var files = Directory.GetFiles(outputDir, "*.yaml"); - files.Should().HaveCount(1); // Only one file should be created (for PR 1234) - - var yamlContent = await File.ReadAllTextAsync(files[0], TestContext.Current.CancellationToken); - yamlContent.Should().Contain("title: First PR without blocker"); - yamlContent.Should().Contain("pr: https://github.com/elastic/elasticsearch/pull/1234"); - yamlContent.Should().NotContain("Second PR with blocker"); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("area 'search'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature"); } [Fact] @@ -1512,17 +4835,266 @@ public async Task CreateChangelog_WithPrsFromFile_ProcessesAllPrsFromFile() var yamlContents = new List(); foreach (var file in files) { - var content = await File.ReadAllTextAsync(file, TestContext.Current.CancellationToken); - yamlContents.Add(content); + yamlContents.Add(await File.ReadAllTextAsync(file, TestContext.Current.CancellationToken)); } // Verify all PRs were processed yamlContents.Should().Contain(c => c.Contains("title: First PR from file")); yamlContents.Should().Contain(c => c.Contains("title: Second PR from file")); yamlContents.Should().Contain(c => c.Contains("title: Third PR from file")); - yamlContents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/1111")); - yamlContents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/2222")); - yamlContents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/3333")); + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableTypes_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_types - should use defaults + var configContent = """ + available_subtypes: [] + available_lifecycles: + - ga + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default types + config!.AvailableTypes.Should().Contain("feature"); + config.AvailableTypes.Should().Contain("bug-fix"); + config.AvailableTypes.Should().Contain("docs"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableSubtypes_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_subtypes - should use defaults + var configContent = """ + available_types: + - feature + available_lifecycles: + - ga + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default subtypes + config!.AvailableSubtypes.Should().Contain("api"); + config.AvailableSubtypes.Should().Contain("behavioral"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableLifecycles_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_lifecycles - should use defaults + var configContent = """ + available_types: + - feature + available_subtypes: [] + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default lifecycles + config!.AvailableLifecycles.Should().Contain("preview"); + config.AvailableLifecycles.Should().Contain("beta"); + config.AvailableLifecycles.Should().Contain("ga"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithInvalidRenderBlockersType_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config with invalid type in render_blockers + var configContent = """ + available_types: + - feature + - docs + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + types: + - invalid-type + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().BeNull(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Error && + d.Message.Contains("Type 'invalid-type' in render_blockers") && + d.Message.Contains("is not in the list of available types")); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithUnhandledType_EmitsWarning() + { + // Arrange + // This test simulates the scenario where a new type is added to ChangelogConfiguration.cs + // but the rendering code hasn't been updated to handle it yet. + // We use reflection to temporarily add "experimental-feature" to the defaults for testing. + var defaultConfig = ChangelogConfiguration.Default; + var originalTypes = defaultConfig.AvailableTypes.ToList(); + var testType = "experimental-feature"; + + // Temporarily add the test type to defaults to simulate it being added to ChangelogConfiguration.cs + defaultConfig.AvailableTypes.Add(testType); + + try + { + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with an unhandled type + var changelog1 = """ + title: Experimental feature + type: experimental-feature + products: + - product: elasticsearch + target: 9.2.0 + description: This is an experimental feature + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-experimental.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-experimental.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("experimental-feature") && + d.Message.Contains("is valid according to configuration but is not handled in rendering output") && + d.Message.Contains("1 entry/entries of this type will not be included")); + + // Verify that the entry is not included in the output + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().NotContain("Experimental feature"); + } + finally + { + // Restore original types + defaultConfig.AvailableTypes.Clear(); + defaultConfig.AvailableTypes.AddRange(originalTypes); + } } [Fact] @@ -1635,5 +5207,13 @@ public async Task CreateChangelog_WithMixedPrsFromFileAndCommaSeparated_Processe yamlContents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/1111")); yamlContents.Should().Contain(c => c.Contains("pr: https://github.com/elastic/elasticsearch/pull/2222")); } + + [SuppressMessage("Security", "CA5350:Do not use insecure cryptographic algorithm SHA1", Justification = "SHA1 is required for compatibility with existing changelog bundle format")] + private static string ComputeSha1(string content) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(content); + var hash = System.Security.Cryptography.SHA1.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } }