From ba9dc5c851aef4eeef496b040782968ae625c715 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 14:10:31 -0800 Subject: [PATCH 01/54] Create changelog bundle command --- .../Changelog/BundledChangelogData.cs | 41 ++ .../Changelog/ChangelogBundleInput.cs | 21 + .../Changelog/ChangelogYamlStaticContext.cs | 4 + .../ChangelogService.cs | 365 +++++++++++++++++- .../docs-builder/Commands/ChangelogCommand.cs | 48 +++ 5 files changed, 478 insertions(+), 1 deletion(-) create mode 100644 src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs create mode 100644 src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs new file mode 100644 index 000000000..fa6d8d4f6 --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs @@ -0,0 +1,41 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Data structure for bundled changelog YAML file +/// +public class BundledChangelogData +{ + public List Products { get; set; } = []; + public List Entries { get; set; } = []; +} + +public class BundledProduct +{ + public string Product { get; set; } = string.Empty; + public string Version { get; set; } = string.Empty; +} + +public class BundledEntry +{ + public string Kind { get; set; } = string.Empty; + public string Summary { get; set; } = string.Empty; + public string Description { get; set; } = string.Empty; + public string Component { get; set; } = string.Empty; + public List? Pr { get; set; } + public List? Issue { get; set; } + public string Impact { get; set; } = string.Empty; + public string Action { get; set; } = string.Empty; + public long Timestamp { get; set; } + public BundledFile? File { get; set; } +} + +public class BundledFile +{ + public string Name { get; set; } = string.Empty; + public string Checksum { get; set; } = string.Empty; +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs new file mode 100644 index 000000000..d4f7f969b --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -0,0 +1,21 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input data for bundling changelog fragments +/// +public class ChangelogBundleInput +{ + public string Directory { get; set; } = string.Empty; + public string? Output { get; set; } + public bool All { get; set; } + public string? ProductVersion { get; set; } + public string[]? Prs { get; set; } + public string? PrsFile { get; set; } + public string? Owner { get; set; } + public string? Repo { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs index 6aa2b85e8..2dfb04ff8 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs @@ -10,5 +10,9 @@ namespace Elastic.Documentation.Services.Changelog; [YamlSerializable(typeof(ChangelogData))] [YamlSerializable(typeof(ProductInfo))] [YamlSerializable(typeof(ChangelogConfiguration))] +[YamlSerializable(typeof(BundledChangelogData))] +[YamlSerializable(typeof(BundledProduct))] +[YamlSerializable(typeof(BundledEntry))] +[YamlSerializable(typeof(BundledFile))] public partial class ChangelogYamlStaticContext; diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index f2c96226b..1524c226f 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -5,6 +5,9 @@ using System.Globalization; using System.IO.Abstractions; using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; using Elastic.Documentation.Configuration; using Elastic.Documentation.Diagnostics; using Elastic.Documentation.Services.Changelog; @@ -14,7 +17,7 @@ using YamlDotNet.Serialization.NamingConventions; namespace Elastic.Documentation.Services; -public class ChangelogService( +public partial class ChangelogService( ILoggerFactory logFactory, IConfigurationContext configurationContext, IGitHubPrService? githubPrService = null @@ -502,5 +505,365 @@ private static List MapLabelsToAreas(string[] labels, Dictionary BundleChangelogs( + IDiagnosticsCollector collector, + ChangelogBundleInput input, + Cancel ctx + ) + { + try + { + // Validate input + if (string.IsNullOrWhiteSpace(input.Directory)) + { + collector.EmitError(string.Empty, "Directory is required"); + return false; + } + + if (!_fileSystem.Directory.Exists(input.Directory)) + { + collector.EmitError(input.Directory, "Directory does not exist"); + return false; + } + + // Validate filter options + var filterCount = 0; + if (input.All) + filterCount++; + if (!string.IsNullOrWhiteSpace(input.ProductVersion)) + filterCount++; + if (input.Prs != null && input.Prs.Length > 0) + filterCount++; + if (!string.IsNullOrWhiteSpace(input.PrsFile)) + filterCount++; + + if (filterCount == 0) + { + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --product-version, --prs, or --prs-file"); + return false; + } + + if (filterCount > 1) + { + collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --product-version, --prs, or --prs-file"); + return false; + } + + // Load PRs from file if specified + var prsToMatch = new HashSet(StringComparer.OrdinalIgnoreCase); + if (!string.IsNullOrWhiteSpace(input.PrsFile)) + { + if (!_fileSystem.File.Exists(input.PrsFile)) + { + collector.EmitError(input.PrsFile, "PRs file does not exist"); + return false; + } + + var prsFileContent = await _fileSystem.File.ReadAllTextAsync(input.PrsFile, ctx); + var prsFromFile = prsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToArray(); + + if (input.Prs != null && input.Prs.Length > 0) + { + foreach (var pr in input.Prs) + { + _ = prsToMatch.Add(pr); + } + } + + foreach (var pr in prsFromFile) + { + _ = prsToMatch.Add(pr); + } + } + else if (input.Prs != null && input.Prs.Length > 0) + { + foreach (var pr in input.Prs) + { + _ = prsToMatch.Add(pr); + } + } + + // Parse product/version if specified + string? filterProduct = null; + string? filterVersion = null; + if (!string.IsNullOrWhiteSpace(input.ProductVersion)) + { + var parts = input.ProductVersion.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (parts.Length != 2) + { + collector.EmitError(string.Empty, "Product version must be in format 'product:version'"); + return false; + } + filterProduct = parts[0]; + filterVersion = parts[1]; + } + + // Read all YAML files from directory + var yamlFiles = _fileSystem.Directory.GetFiles(input.Directory, "*.yaml", SearchOption.TopDirectoryOnly) + .Concat(_fileSystem.Directory.GetFiles(input.Directory, "*.yml", SearchOption.TopDirectoryOnly)) + .ToList(); + + if (yamlFiles.Count == 0) + { + collector.EmitError(input.Directory, "No YAML files found in directory"); + return false; + } + + _logger.LogInformation("Found {Count} YAML files in directory", yamlFiles.Count); + + // Deserialize and filter changelog files + var deserializer = new StaticDeserializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .Build(); + + var changelogEntries = new List<(ChangelogData data, string filePath, string fileName, long timestamp, string checksum)>(); + + foreach (var filePath in yamlFiles) + { + try + { + var fileName = _fileSystem.Path.GetFileName(filePath); + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + + // Extract timestamp from filename (format: timestamp-slug.yaml) + var timestampMatch = TimestampRegex().Match(fileName); + if (!timestampMatch.Success) + { + _logger.LogWarning("Skipping file {FileName}: does not match expected format (timestamp-slug.yaml)", fileName); + continue; + } + + var timestamp = long.Parse(timestampMatch.Groups[1].Value, CultureInfo.InvariantCulture); + + // Compute checksum (SHA1) + var checksum = ComputeSha1(fileContent); + + // Deserialize YAML (skip comment lines) + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + var data = deserializer.Deserialize(yamlWithoutComments); + + if (data == null) + { + _logger.LogWarning("Skipping file {FileName}: failed to deserialize", fileName); + continue; + } + + // Apply filters + if (input.All) + { + // Include all + } + else if (!string.IsNullOrWhiteSpace(filterProduct) && !string.IsNullOrWhiteSpace(filterVersion)) + { + // Filter by product/version + var matches = data.Products.Any(p => + string.Equals(p.Product, filterProduct, StringComparison.OrdinalIgnoreCase) && + string.Equals(p.Target, filterVersion, StringComparison.OrdinalIgnoreCase)); + + if (!matches) + { + continue; + } + } + else if (prsToMatch.Count > 0) + { + // Filter by PRs + var matches = false; + if (!string.IsNullOrWhiteSpace(data.Pr)) + { + // Normalize PR for comparison + var normalizedPr = NormalizePrForComparison(data.Pr, input.Owner, input.Repo); + matches = prsToMatch.Any(pr => NormalizePrForComparison(pr, input.Owner, input.Repo) == normalizedPr); + } + + if (!matches) + { + continue; + } + } + + changelogEntries.Add((data, filePath, fileName, timestamp, checksum)); + } + catch (YamlException ex) + { + _logger.LogWarning(ex, "Failed to parse YAML file {FilePath}", filePath); + collector.EmitError(filePath, $"Failed to parse YAML: {ex.Message}"); + continue; + } + catch (Exception ex) when (ex is not (OutOfMemoryException or StackOverflowException or ThreadAbortException)) + { + _logger.LogWarning(ex, "Error processing file {FilePath}", filePath); + collector.EmitError(filePath, $"Error processing file: {ex.Message}"); + continue; + } + } + + if (changelogEntries.Count == 0) + { + collector.EmitError(string.Empty, "No changelog entries matched the filter criteria"); + return false; + } + + _logger.LogInformation("Found {Count} matching changelog entries", changelogEntries.Count); + + // Build bundled data + var bundledData = new BundledChangelogData(); + + // Extract unique products/versions + var productVersions = new HashSet<(string product, string version)>(); + foreach (var (data, _, _, _, _) in changelogEntries) + { + foreach (var product in data.Products) + { + var version = product.Target ?? string.Empty; + _ = productVersions.Add((product.Product, version)); + } + } + + bundledData.Products = productVersions + .OrderBy(pv => pv.product) + .ThenBy(pv => pv.version) + .Select(pv => new BundledProduct + { + Product = pv.product, + Version = pv.version + }) + .ToList(); + + // Build entries + bundledData.Entries = changelogEntries + .OrderBy(e => e.timestamp) + .Select(e => + { + var entry = new BundledEntry + { + Kind = e.data.Type, + Summary = e.data.Title, + Description = e.data.Description ?? string.Empty, + Component = e.data.Products.FirstOrDefault()?.Product ?? string.Empty, + Impact = e.data.Impact ?? string.Empty, + Action = e.data.Action ?? string.Empty, + Timestamp = e.timestamp, + File = new BundledFile + { + Name = e.fileName, + Checksum = e.checksum + } + }; + + // Convert PR to list + if (!string.IsNullOrWhiteSpace(e.data.Pr)) + { + entry.Pr = [e.data.Pr]; + } + + // Convert issues to list + if (e.data.Issues != null && e.data.Issues.Count > 0) + { + entry.Issue = e.data.Issues.ToList(); + } + + return entry; + }) + .ToList(); + + // Generate bundled YAML + var bundleSerializer = new StaticSerializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .ConfigureDefaultValuesHandling(DefaultValuesHandling.OmitNull | DefaultValuesHandling.OmitEmptyCollections) + .Build(); + + var bundledYaml = bundleSerializer.Serialize(bundledData); + + // Determine output path + var outputPath = input.Output ?? _fileSystem.Path.Combine(input.Directory, "changelog-bundle.yaml"); + var outputDir = _fileSystem.Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(outputDir) && !_fileSystem.Directory.Exists(outputDir)) + { + _ = _fileSystem.Directory.CreateDirectory(outputDir); + } + + // Write bundled file + await _fileSystem.File.WriteAllTextAsync(outputPath, bundledYaml, ctx); + _logger.LogInformation("Created bundled changelog: {OutputPath}", outputPath); + + return true; + } + catch (OperationCanceledException) + { + throw; + } + catch (IOException ioEx) + { + collector.EmitError(string.Empty, $"IO error bundling changelogs: {ioEx.Message}", ioEx); + return false; + } + catch (UnauthorizedAccessException uaEx) + { + collector.EmitError(string.Empty, $"Access denied bundling changelogs: {uaEx.Message}", uaEx); + return false; + } + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA5350:Do not use insecure cryptographic algorithm SHA1", Justification = "SHA1 is required for compatibility with existing changelog bundle format")] + private static string ComputeSha1(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA1.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + [GeneratedRegex(@"^(\d+)-", RegexOptions.None)] + private static partial Regex TimestampRegex(); + + private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) + { + // Parse PR using the same logic as GitHubPrService.ParsePrUrl + // Return a normalized format (owner/repo#number) for comparison + + // Handle full URL: https://github.com/owner/repo/pull/123 + if (pr.StartsWith("https://github.com/", StringComparison.OrdinalIgnoreCase) || + pr.StartsWith("http://github.com/", StringComparison.OrdinalIgnoreCase)) + { + try + { + var uri = new Uri(pr); + var segments = uri.Segments; + if (segments.Length >= 5 && segments[3].Equals("pull/", StringComparison.OrdinalIgnoreCase)) + { + var owner = segments[1].TrimEnd('/'); + var repo = segments[2].TrimEnd('/'); + var prNum = segments[4].Trim(); + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + catch (UriFormatException) + { + // Invalid URI, fall through + } + } + + // Handle short format: owner/repo#123 + var hashIndex = pr.LastIndexOf('#'); + if (hashIndex > 0 && hashIndex < pr.Length - 1) + { + return pr.ToLowerInvariant(); + } + + // Handle just a PR number when owner/repo are provided + if (int.TryParse(pr, out var prNumber) && + !string.IsNullOrWhiteSpace(defaultOwner) && !string.IsNullOrWhiteSpace(defaultRepo)) + { + return $"{defaultOwner}/{defaultRepo}#{prNumber}".ToLowerInvariant(); + } + + // Return as-is for comparison (fallback) + return pr.ToLowerInvariant(); + } } diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index 7c108441d..acc11cd08 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -100,5 +100,53 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st return await serviceInvoker.InvokeAsync(ctx); } + + /// + /// Bundle changelog fragments into a single YAML file + /// + /// Required: Directory containing changelog YAML files + /// Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory + /// Include all changelogs in the directory + /// Filter by product and version in format "product:version" (e.g., "elastic-agent:9.1.5") + /// Filter by pull request URLs or numbers (can specify multiple times) + /// Path to a newline-delimited file containing PR URLs or numbers + /// Optional: GitHub repository owner (used when PRs are specified as numbers) + /// Optional: GitHub repository name (used when PRs are specified as numbers) + /// + [Command("bundle")] + public async Task Bundle( + string directory, + string? output = null, + bool all = false, + string? productVersion = null, + string[]? prs = null, + string? prsFile = null, + string? owner = null, + string? repo = null, + Cancel ctx = default + ) + { + await using var serviceInvoker = new ServiceInvoker(collector); + + var service = new ChangelogService(logFactory, configurationContext, null); + + var input = new ChangelogBundleInput + { + Directory = directory, + Output = output, + All = all, + ProductVersion = productVersion, + Prs = prs, + PrsFile = prsFile, + Owner = owner, + Repo = repo + }; + + serviceInvoker.AddCommand(service, input, + async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, state, ctx) + ); + + return await serviceInvoker.InvokeAsync(ctx); + } } From c55369ee42f5bde147fe06dd1b1f5edf30cf9787 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 14:30:43 -0800 Subject: [PATCH 02/54] Limited bundle contents --- .../Changelog/BundledChangelogData.cs | 11 +-- .../ChangelogService.cs | 81 ++++++++----------- 2 files changed, 34 insertions(+), 58 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs index fa6d8d4f6..270ae655f 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs @@ -21,16 +21,7 @@ public class BundledProduct public class BundledEntry { - public string Kind { get; set; } = string.Empty; - public string Summary { get; set; } = string.Empty; - public string Description { get; set; } = string.Empty; - public string Component { get; set; } = string.Empty; - public List? Pr { get; set; } - public List? Issue { get; set; } - public string Impact { get; set; } = string.Empty; - public string Action { get; set; } = string.Empty; - public long Timestamp { get; set; } - public BundledFile? File { get; set; } + public BundledFile File { get; set; } = new(); } public class BundledFile diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 1524c226f..45a122276 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -7,7 +7,6 @@ using System.Linq; using System.Security.Cryptography; using System.Text; -using System.Text.RegularExpressions; using Elastic.Documentation.Configuration; using Elastic.Documentation.Diagnostics; using Elastic.Documentation.Services.Changelog; @@ -17,7 +16,7 @@ using YamlDotNet.Serialization.NamingConventions; namespace Elastic.Documentation.Services; -public partial class ChangelogService( +public class ChangelogService( ILoggerFactory logFactory, IConfigurationContext configurationContext, IGitHubPrService? githubPrService = null @@ -620,7 +619,8 @@ Cancel ctx .WithNamingConvention(UnderscoredNamingConvention.Instance) .Build(); - var changelogEntries = new List<(ChangelogData data, string filePath, string fileName, long timestamp, string checksum)>(); + var changelogEntries = new List<(ChangelogData data, string filePath, string fileName, string checksum)>(); + var matchedPrs = new HashSet(StringComparer.OrdinalIgnoreCase); foreach (var filePath in yamlFiles) { @@ -629,16 +629,6 @@ Cancel ctx var fileName = _fileSystem.Path.GetFileName(filePath); var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); - // Extract timestamp from filename (format: timestamp-slug.yaml) - var timestampMatch = TimestampRegex().Match(fileName); - if (!timestampMatch.Success) - { - _logger.LogWarning("Skipping file {FileName}: does not match expected format (timestamp-slug.yaml)", fileName); - continue; - } - - var timestamp = long.Parse(timestampMatch.Groups[1].Value, CultureInfo.InvariantCulture); - // Compute checksum (SHA1) var checksum = ComputeSha1(fileContent); @@ -678,7 +668,16 @@ Cancel ctx { // Normalize PR for comparison var normalizedPr = NormalizePrForComparison(data.Pr, input.Owner, input.Repo); - matches = prsToMatch.Any(pr => NormalizePrForComparison(pr, input.Owner, input.Repo) == normalizedPr); + foreach (var pr in prsToMatch) + { + var normalizedPrToMatch = NormalizePrForComparison(pr, input.Owner, input.Repo); + if (normalizedPr == normalizedPrToMatch) + { + matches = true; + _ = matchedPrs.Add(pr); + break; + } + } } if (!matches) @@ -687,7 +686,7 @@ Cancel ctx } } - changelogEntries.Add((data, filePath, fileName, timestamp, checksum)); + changelogEntries.Add((data, filePath, fileName, checksum)); } catch (YamlException ex) { @@ -703,6 +702,19 @@ Cancel ctx } } + // Warn about unmatched PRs if filtering by PRs + if (prsToMatch.Count > 0) + { + var unmatchedPrs = prsToMatch.Where(pr => !matchedPrs.Contains(pr)).ToList(); + if (unmatchedPrs.Count > 0) + { + foreach (var unmatchedPr in unmatchedPrs) + { + collector.EmitWarning(string.Empty, $"No changelog file found for PR: {unmatchedPr}"); + } + } + } + if (changelogEntries.Count == 0) { collector.EmitError(string.Empty, "No changelog entries matched the filter criteria"); @@ -716,7 +728,7 @@ Cancel ctx // Extract unique products/versions var productVersions = new HashSet<(string product, string version)>(); - foreach (var (data, _, _, _, _) in changelogEntries) + foreach (var (data, _, _, _) in changelogEntries) { foreach (var product in data.Products) { @@ -735,40 +747,15 @@ Cancel ctx }) .ToList(); - // Build entries + // Build entries - only include file information bundledData.Entries = changelogEntries - .OrderBy(e => e.timestamp) - .Select(e => + .Select(e => new BundledEntry { - var entry = new BundledEntry - { - Kind = e.data.Type, - Summary = e.data.Title, - Description = e.data.Description ?? string.Empty, - Component = e.data.Products.FirstOrDefault()?.Product ?? string.Empty, - Impact = e.data.Impact ?? string.Empty, - Action = e.data.Action ?? string.Empty, - Timestamp = e.timestamp, - File = new BundledFile - { - Name = e.fileName, - Checksum = e.checksum - } - }; - - // Convert PR to list - if (!string.IsNullOrWhiteSpace(e.data.Pr)) + File = new BundledFile { - entry.Pr = [e.data.Pr]; + Name = e.fileName, + Checksum = e.checksum } - - // Convert issues to list - if (e.data.Issues != null && e.data.Issues.Count > 0) - { - entry.Issue = e.data.Issues.ToList(); - } - - return entry; }) .ToList(); @@ -818,8 +805,6 @@ private static string ComputeSha1(string content) return Convert.ToHexString(hash).ToLowerInvariant(); } - [GeneratedRegex(@"^(\d+)-", RegexOptions.None)] - private static partial Regex TimestampRegex(); private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) { From 1d55a26b312fa19b9032fb1d45499417b226a243 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 14:41:20 -0800 Subject: [PATCH 03/54] Add tests --- .../ChangelogServiceTests.cs | 577 ++++++++++++++++++ 1 file changed, 577 insertions(+) diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index c826f66ab..6adecdd3d 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -822,5 +822,582 @@ public async Task CreateChangelog_WithFeatureId_CreatesValidYaml() var yamlContent = await File.ReadAllTextAsync(files[0], TestContext.Current.CancellationToken); yamlContent.Should().Contain("feature_id: feature:new-search-api"); } + + [Fact] + public async Task BundleChangelogs_WithAllOption_CreatesValidBundle() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First changelog + type: feature + products: + - product: elasticsearch + target: 9.2.0 + lifecycle: ga + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second changelog + type: enhancement + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-changelog.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-changelog.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("products:"); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("product: kibana"); + bundleContent.Should().Contain("entries:"); + bundleContent.Should().Contain("file:"); + bundleContent.Should().Contain("name: 1755268130-first-changelog.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-changelog.yaml"); + bundleContent.Should().Contain("checksum:"); + } + + [Fact] + public async Task BundleChangelogs_WithProductVersionFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + ProductVersion = "elasticsearch:9.2.0", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().NotContain("name: 1755268140-kibana-feature.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + var changelog3 = """ + title: Third PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-pr.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-third-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["https://github.com/elastic/elasticsearch/pull/100", "https://github.com/elastic/elasticsearch/pull/200"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-first-pr.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-pr.yaml"); + bundleContent.Should().NotContain("name: 1755268150-third-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFilterAndUnmatchedPrs_EmitsWarnings() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file for only one PR + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = [ + "https://github.com/elastic/elasticsearch/pull/100", + "https://github.com/elastic/elasticsearch/pull/200", + "https://github.com/elastic/elasticsearch/pull/300" + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().Be(2); // Two unmatched PRs + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No changelog file found for PR: https://github.com/elastic/elasticsearch/pull/200")); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No changelog file found for PR: https://github.com/elastic/elasticsearch/pull/300")); + } + + [Fact] + public async Task BundleChangelogs_WithPrsFileFilter_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files + var changelog1 = """ + title: First PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-first-pr.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-second-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create PRs file + var prsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "prs.txt"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(prsFile)!); + await fileSystem.File.WriteAllTextAsync(prsFile, """ + https://github.com/elastic/elasticsearch/pull/100 + https://github.com/elastic/elasticsearch/pull/200 + """, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + PrsFile = prsFile, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-first-pr.yaml"); + bundleContent.Should().Contain("name: 1755268140-second-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithPrNumberAndOwnerRepo_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: PR with number + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-pr-number.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["100"], + Owner = "elastic", + Repo = "elasticsearch", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-pr-number.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithShortPrFormat_FiltersCorrectly() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: PR with short format + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-short-format.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = ["elastic/elasticsearch#100"], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-short-format.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithNoMatchingFiles_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + ProductVersion = "elasticsearch:9.2.0", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("No YAML files found") || d.Message.Contains("No changelog entries matched")); + } + + [Fact] + public async Task BundleChangelogs_WithInvalidDirectory_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var invalidDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent"); + + var input = new ChangelogBundleInput + { + Directory = invalidDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Directory does not exist")); + } + + [Fact] + public async Task BundleChangelogs_WithNoFilterOption_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("At least one filter option must be specified")); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleFilterOptions_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + ProductVersion = "elasticsearch:9.2.0", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Only one filter option can be specified")); + } + + [Fact] + public async Task BundleChangelogs_WithInvalidProductVersionFormat_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + ProductVersion = "invalid-format", + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Product version must be in format")); + } + + [Fact] + public async Task BundleChangelogs_WithInvalidPrsFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + PrsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"), + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("PRs file does not exist")); + } + + [Fact] + public async Task BundleChangelogs_WithMultipleProducts_IncludesAllProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with different products + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + var changelog3 = """ + title: Multi-product feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/300 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana.yaml"); + var file3 = fileSystem.Path.Combine(changelogDir, "1755268150-multi-product.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file3, changelog3, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("product: kibana"); + bundleContent.Should().Contain("version: 9.2.0"); + // Should have 3 entries + var entryCount = bundleContent.Split("file:", StringSplitOptions.RemoveEmptyEntries).Length - 1; + entryCount.Should().Be(3); + } } From b671cd973e2d039f14fb66fd840c3071e44a6053 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 15:08:07 -0800 Subject: [PATCH 04/54] Fix target handling --- .../ChangelogService.cs | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 45a122276..803864e90 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -7,6 +7,7 @@ using System.Linq; using System.Security.Cryptography; using System.Text; +using System.Text.RegularExpressions; using Elastic.Documentation.Configuration; using Elastic.Documentation.Diagnostics; using Elastic.Documentation.Services.Changelog; @@ -16,7 +17,7 @@ using YamlDotNet.Serialization.NamingConventions; namespace Elastic.Documentation.Services; -public class ChangelogService( +public partial class ChangelogService( ILoggerFactory logFactory, IConfigurationContext configurationContext, IGitHubPrService? githubPrService = null @@ -601,9 +602,21 @@ Cancel ctx filterVersion = parts[1]; } - // Read all YAML files from directory + // Determine output path to exclude it from input files + var outputPath = input.Output ?? _fileSystem.Path.Combine(input.Directory, "changelog-bundle.yaml"); + var outputFileName = _fileSystem.Path.GetFileName(outputPath); + + // Read all YAML files from directory (exclude bundle files and output file) var yamlFiles = _fileSystem.Directory.GetFiles(input.Directory, "*.yaml", SearchOption.TopDirectoryOnly) .Concat(_fileSystem.Directory.GetFiles(input.Directory, "*.yml", SearchOption.TopDirectoryOnly)) + .Where(f => + { + var fileName = _fileSystem.Path.GetFileName(f); + // Exclude bundle files and the output file + return !fileName.Contains("changelog-bundle", StringComparison.OrdinalIgnoreCase) && + !fileName.Equals(outputFileName, StringComparison.OrdinalIgnoreCase) && + !fileName.Contains("-bundle", StringComparison.OrdinalIgnoreCase); + }) .ToList(); if (yamlFiles.Count == 0) @@ -635,7 +648,13 @@ Cancel ctx // Deserialize YAML (skip comment lines) var yamlLines = fileContent.Split('\n'); var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); - var data = deserializer.Deserialize(yamlWithoutComments); + + // Normalize "version:" to "target:" in products section for compatibility + // Some changelog files may use "version" instead of "target" + // Match "version:" with various indentation levels + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + var data = deserializer.Deserialize(normalizedYaml); if (data == null) { @@ -767,8 +786,7 @@ Cancel ctx var bundledYaml = bundleSerializer.Serialize(bundledData); - // Determine output path - var outputPath = input.Output ?? _fileSystem.Path.Combine(input.Directory, "changelog-bundle.yaml"); + // Output path was already determined above when filtering files var outputDir = _fileSystem.Path.GetDirectoryName(outputPath); if (!string.IsNullOrWhiteSpace(outputDir) && !_fileSystem.Directory.Exists(outputDir)) { @@ -805,6 +823,8 @@ private static string ComputeSha1(string content) return Convert.ToHexString(hash).ToLowerInvariant(); } + [GeneratedRegex(@"(\s+)version:", RegexOptions.Multiline)] + private static partial Regex VersionToTargetRegex(); private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) { From 2546710e9fcc24a0e3e40fc12870e69efe8cbe74 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 15:29:04 -0800 Subject: [PATCH 05/54] Add some version error handling --- .../ChangelogService.cs | 61 +++++++++++++++---- 1 file changed, 48 insertions(+), 13 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 803864e90..dfb9b9536 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -746,26 +746,49 @@ Cancel ctx var bundledData = new BundledChangelogData(); // Extract unique products/versions - var productVersions = new HashSet<(string product, string version)>(); - foreach (var (data, _, _, _) in changelogEntries) + // If --product-version filter was used, only include that specific product-version + if (!string.IsNullOrWhiteSpace(filterProduct) && !string.IsNullOrWhiteSpace(filterVersion)) { - foreach (var product in data.Products) + bundledData.Products = [new BundledProduct { - var version = product.Target ?? string.Empty; - _ = productVersions.Add((product.Product, version)); + Product = filterProduct, + Version = filterVersion + }]; + } + else + { + var productVersions = new HashSet<(string product, string version)>(); + foreach (var (data, _, _, _) in changelogEntries) + { + foreach (var product in data.Products) + { + var version = product.Target ?? string.Empty; + _ = productVersions.Add((product.Product, version)); + } } + + bundledData.Products = productVersions + .OrderBy(pv => pv.product) + .ThenBy(pv => pv.version) + .Select(pv => new BundledProduct + { + Product = pv.product, + Version = pv.version + }) + .ToList(); } - bundledData.Products = productVersions - .OrderBy(pv => pv.product) - .ThenBy(pv => pv.version) - .Select(pv => new BundledProduct - { - Product = pv.product, - Version = pv.version - }) + // Check for products with same product ID but different versions + var productsByProductId = bundledData.Products.GroupBy(p => p.Product, StringComparer.OrdinalIgnoreCase) + .Where(g => g.Count() > 1) .ToList(); + foreach (var productGroup in productsByProductId) + { + var versions = productGroup.Select(p => string.IsNullOrWhiteSpace(p.Version) ? "(no version)" : p.Version).ToList(); + collector.EmitWarning(string.Empty, $"Product '{productGroup.Key}' has multiple versions in bundle: {string.Join(", ", versions)}"); + } + // Build entries - only include file information bundledData.Entries = changelogEntries .Select(e => new BundledEntry @@ -793,6 +816,18 @@ Cancel ctx _ = _fileSystem.Directory.CreateDirectory(outputDir); } + // If output file already exists, generate a unique filename + if (_fileSystem.File.Exists(outputPath)) + { + var directory = _fileSystem.Path.GetDirectoryName(outputPath) ?? string.Empty; + var fileNameWithoutExtension = _fileSystem.Path.GetFileNameWithoutExtension(outputPath); + var extension = _fileSystem.Path.GetExtension(outputPath); + var timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(); + var uniqueFileName = $"{fileNameWithoutExtension}-{timestamp}{extension}"; + outputPath = _fileSystem.Path.Combine(directory, uniqueFileName); + _logger.LogInformation("Output file already exists, using unique filename: {OutputPath}", outputPath); + } + // Write bundled file await _fileSystem.File.WriteAllTextAsync(outputPath, bundledYaml, ctx); _logger.LogInformation("Created bundled changelog: {OutputPath}", outputPath); From 906d1e0f2abf10bedf11555483e7059a1e372e46 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 15:45:36 -0800 Subject: [PATCH 06/54] Change --product-version to --products --- .../Changelog/ChangelogBundleInput.cs | 2 +- .../ChangelogService.cs | 50 ++++++++++--------- .../docs-builder/Commands/ChangelogCommand.cs | 12 ++--- 3 files changed, 33 insertions(+), 31 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs index d4f7f969b..fb87d67cc 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -12,7 +12,7 @@ public class ChangelogBundleInput public string Directory { get; set; } = string.Empty; public string? Output { get; set; } public bool All { get; set; } - public string? ProductVersion { get; set; } + public List? Products { get; set; } public string[]? Prs { get; set; } public string? PrsFile { get; set; } public string? Owner { get; set; } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index dfb9b9536..2c29884b4 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -531,7 +531,7 @@ Cancel ctx var filterCount = 0; if (input.All) filterCount++; - if (!string.IsNullOrWhiteSpace(input.ProductVersion)) + if (input.Products != null && input.Products.Count > 0) filterCount++; if (input.Prs != null && input.Prs.Length > 0) filterCount++; @@ -540,13 +540,13 @@ Cancel ctx if (filterCount == 0) { - collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --product-version, --prs, or --prs-file"); + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --products, --prs, or --prs-file"); return false; } if (filterCount > 1) { - collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --product-version, --prs, or --prs-file"); + collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --products, --prs, or --prs-file"); return false; } @@ -587,19 +587,15 @@ Cancel ctx } } - // Parse product/version if specified - string? filterProduct = null; - string? filterVersion = null; - if (!string.IsNullOrWhiteSpace(input.ProductVersion)) + // Build set of product/version combinations to filter by + var productsToMatch = new HashSet<(string product, string version)>(); + if (input.Products != null && input.Products.Count > 0) { - var parts = input.ProductVersion.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (parts.Length != 2) + foreach (var product in input.Products) { - collector.EmitError(string.Empty, "Product version must be in format 'product:version'"); - return false; + var version = product.Target ?? string.Empty; + _ = productsToMatch.Add((product.Product.ToLowerInvariant(), version)); } - filterProduct = parts[0]; - filterVersion = parts[1]; } // Determine output path to exclude it from input files @@ -667,12 +663,14 @@ Cancel ctx { // Include all } - else if (!string.IsNullOrWhiteSpace(filterProduct) && !string.IsNullOrWhiteSpace(filterVersion)) + else if (productsToMatch.Count > 0) { - // Filter by product/version + // Filter by products var matches = data.Products.Any(p => - string.Equals(p.Product, filterProduct, StringComparison.OrdinalIgnoreCase) && - string.Equals(p.Target, filterVersion, StringComparison.OrdinalIgnoreCase)); + { + var version = p.Target ?? string.Empty; + return productsToMatch.Contains((p.Product.ToLowerInvariant(), version)); + }); if (!matches) { @@ -746,14 +744,18 @@ Cancel ctx var bundledData = new BundledChangelogData(); // Extract unique products/versions - // If --product-version filter was used, only include that specific product-version - if (!string.IsNullOrWhiteSpace(filterProduct) && !string.IsNullOrWhiteSpace(filterVersion)) + // If --products filter was used, only include those specific product-versions + if (productsToMatch.Count > 0) { - bundledData.Products = [new BundledProduct - { - Product = filterProduct, - Version = filterVersion - }]; + bundledData.Products = productsToMatch + .OrderBy(pv => pv.product) + .ThenBy(pv => pv.version) + .Select(pv => new BundledProduct + { + Product = pv.product, + Version = pv.version + }) + .ToList(); } else { diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index acc11cd08..b07bf4396 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -104,10 +104,10 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st /// /// Bundle changelog fragments into a single YAML file /// - /// Required: Directory containing changelog YAML files + /// Optional: Directory containing changelog YAML files. Defaults to current directory /// Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory /// Include all changelogs in the directory - /// Filter by product and version in format "product:version" (e.g., "elastic-agent:9.1.5") + /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") /// Filter by pull request URLs or numbers (can specify multiple times) /// Path to a newline-delimited file containing PR URLs or numbers /// Optional: GitHub repository owner (used when PRs are specified as numbers) @@ -115,10 +115,10 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st /// [Command("bundle")] public async Task Bundle( - string directory, + string? directory = null, string? output = null, bool all = false, - string? productVersion = null, + [ProductInfoParser] List? products = null, string[]? prs = null, string? prsFile = null, string? owner = null, @@ -132,10 +132,10 @@ public async Task Bundle( var input = new ChangelogBundleInput { - Directory = directory, + Directory = directory ?? Directory.GetCurrentDirectory(), Output = output, All = all, - ProductVersion = productVersion, + Products = products, Prs = prs, PrsFile = prsFile, Owner = owner, From 4fd65c1d84ce30b1959bd7f297793ea51f924c6d Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 15:58:35 -0800 Subject: [PATCH 07/54] Ignore bundle files when bundling --- .../ChangelogService.cs | 41 +++++++++++++++---- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 2c29884b4..2c7c9a7b8 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -603,18 +603,41 @@ Cancel ctx var outputFileName = _fileSystem.Path.GetFileName(outputPath); // Read all YAML files from directory (exclude bundle files and output file) - var yamlFiles = _fileSystem.Directory.GetFiles(input.Directory, "*.yaml", SearchOption.TopDirectoryOnly) + var allYamlFiles = _fileSystem.Directory.GetFiles(input.Directory, "*.yaml", SearchOption.TopDirectoryOnly) .Concat(_fileSystem.Directory.GetFiles(input.Directory, "*.yml", SearchOption.TopDirectoryOnly)) - .Where(f => - { - var fileName = _fileSystem.Path.GetFileName(f); - // Exclude bundle files and the output file - return !fileName.Contains("changelog-bundle", StringComparison.OrdinalIgnoreCase) && - !fileName.Equals(outputFileName, StringComparison.OrdinalIgnoreCase) && - !fileName.Contains("-bundle", StringComparison.OrdinalIgnoreCase); - }) .ToList(); + var yamlFiles = new List(); + foreach (var filePath in allYamlFiles) + { + var fileName = _fileSystem.Path.GetFileName(filePath); + + // Exclude the output file + if (fileName.Equals(outputFileName, StringComparison.OrdinalIgnoreCase)) + continue; + + // Check if file is a bundle file by looking for "entries:" key (unique to bundle files) + try + { + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + // Bundle files have "entries:" at root level, changelog files don't + if (fileContent.Contains("entries:", StringComparison.Ordinal) && + fileContent.Contains("products:", StringComparison.Ordinal)) + { + _logger.LogDebug("Skipping bundle file: {FileName}", fileName); + continue; + } + } + catch (Exception ex) when (ex is not (OutOfMemoryException or StackOverflowException or ThreadAbortException)) + { + // If we can't read the file, skip it + _logger.LogWarning(ex, "Failed to read file {FileName} for bundle detection", fileName); + continue; + } + + yamlFiles.Add(filePath); + } + if (yamlFiles.Count == 0) { collector.EmitError(input.Directory, "No YAML files found in directory"); From f7157af0fc82d242d8b8eeea475d430cf7b38015 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 19:14:44 -0800 Subject: [PATCH 08/54] Change version to target in bundle --- .../Changelog/BundledChangelogData.cs | 2 +- .../ChangelogService.cs | 8 +-- .../ChangelogServiceTests.cs | 53 +++++++++++++++---- 3 files changed, 48 insertions(+), 15 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs index 270ae655f..fbae06c19 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs @@ -16,7 +16,7 @@ public class BundledChangelogData public class BundledProduct { public string Product { get; set; } = string.Empty; - public string Version { get; set; } = string.Empty; + public string? Target { get; set; } } public class BundledEntry diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 2c7c9a7b8..f2ff2b49d 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -776,7 +776,7 @@ Cancel ctx .Select(pv => new BundledProduct { Product = pv.product, - Version = pv.version + Target = string.IsNullOrWhiteSpace(pv.version) ? null : pv.version }) .ToList(); } @@ -798,7 +798,7 @@ Cancel ctx .Select(pv => new BundledProduct { Product = pv.product, - Version = pv.version + Target = string.IsNullOrWhiteSpace(pv.version) ? null : pv.version }) .ToList(); } @@ -810,8 +810,8 @@ Cancel ctx foreach (var productGroup in productsByProductId) { - var versions = productGroup.Select(p => string.IsNullOrWhiteSpace(p.Version) ? "(no version)" : p.Version).ToList(); - collector.EmitWarning(string.Empty, $"Product '{productGroup.Key}' has multiple versions in bundle: {string.Join(", ", versions)}"); + var targets = productGroup.Select(p => string.IsNullOrWhiteSpace(p.Target) ? "(no target)" : p.Target).ToList(); + collector.EmitWarning(string.Empty, $"Product '{productGroup.Key}' has multiple targets in bundle: {string.Join(", ", targets)}"); } // Build entries - only include file information diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 6adecdd3d..e203d945e 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -882,7 +882,7 @@ public async Task BundleChangelogs_WithAllOption_CreatesValidBundle() } [Fact] - public async Task BundleChangelogs_WithProductVersionFilter_FiltersCorrectly() + public async Task BundleChangelogs_WithProductsFilter_FiltersCorrectly() { // Arrange var service = new ChangelogService(_loggerFactory, _configurationContext, null); @@ -916,7 +916,7 @@ public async Task BundleChangelogs_WithProductVersionFilter_FiltersCorrectly() var input = new ChangelogBundleInput { Directory = changelogDir, - ProductVersion = "elasticsearch:9.2.0", + Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -929,6 +929,7 @@ public async Task BundleChangelogs_WithProductVersionFilter_FiltersCorrectly() var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); bundleContent.Should().NotContain("name: 1755268140-kibana-feature.yaml"); } @@ -1196,7 +1197,7 @@ public async Task BundleChangelogs_WithNoMatchingFiles_ReturnsError() var input = new ChangelogBundleInput { Directory = changelogDir, - ProductVersion = "elasticsearch:9.2.0", + Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1270,7 +1271,7 @@ public async Task BundleChangelogs_WithMultipleFilterOptions_ReturnsError() { Directory = changelogDir, All = true, - ProductVersion = "elasticsearch:9.2.0", + Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1284,7 +1285,7 @@ public async Task BundleChangelogs_WithMultipleFilterOptions_ReturnsError() } [Fact] - public async Task BundleChangelogs_WithInvalidProductVersionFormat_ReturnsError() + public async Task BundleChangelogs_WithMultipleProducts_CreatesValidBundle() { // Arrange var service = new ChangelogService(_loggerFactory, _configurationContext, null); @@ -1292,10 +1293,36 @@ public async Task BundleChangelogs_WithInvalidProductVersionFormat_ReturnsError( var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); fileSystem.Directory.CreateDirectory(changelogDir); + // Create test changelog files + var changelog1 = """ + title: Cloud serverless feature 1 + type: feature + products: + - product: cloud-serverless + target: 2025-12-02 + pr: https://github.com/elastic/cloud-serverless/pull/100 + """; + var changelog2 = """ + title: Cloud serverless feature 2 + type: feature + products: + - product: cloud-serverless + target: 2025-12-06 + pr: https://github.com/elastic/cloud-serverless/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-cloud-feature1.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-cloud-feature2.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + var input = new ChangelogBundleInput { Directory = changelogDir, - ProductVersion = "invalid-format", + Products = [ + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06" } + ], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1303,9 +1330,15 @@ public async Task BundleChangelogs_WithInvalidProductVersionFormat_ReturnsError( var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); // Assert - result.Should().BeFalse(); - _collector.Errors.Should().BeGreaterThan(0); - _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Product version must be in format")); + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("product: cloud-serverless"); + bundleContent.Should().Contain("target: 2025-12-02"); + bundleContent.Should().Contain("target: 2025-12-06"); + bundleContent.Should().Contain("name: 1755268130-cloud-feature1.yaml"); + bundleContent.Should().Contain("name: 1755268140-cloud-feature2.yaml"); } [Fact] @@ -1394,7 +1427,7 @@ public async Task BundleChangelogs_WithMultipleProducts_IncludesAllProducts() var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); bundleContent.Should().Contain("product: elasticsearch"); bundleContent.Should().Contain("product: kibana"); - bundleContent.Should().Contain("version: 9.2.0"); + bundleContent.Should().Contain("target: 9.2.0"); // Should have 3 entries var entryCount = bundleContent.Split("file:", StringSplitOptions.RemoveEmptyEntries).Length - 1; entryCount.Should().Be(3); From fb4828a8e733165a1a0fb00ba6158388ddc49c5f Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 19:23:20 -0800 Subject: [PATCH 09/54] Rename input-products, add output-products --- .../Changelog/ChangelogBundleInput.cs | 3 +- .../ChangelogService.cs | 30 +++++--- .../docs-builder/Commands/ChangelogCommand.cs | 9 ++- .../ChangelogServiceTests.cs | 71 +++++++++++++++++-- 4 files changed, 97 insertions(+), 16 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs index fb87d67cc..de1bcbe99 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -12,7 +12,8 @@ public class ChangelogBundleInput public string Directory { get; set; } = string.Empty; public string? Output { get; set; } public bool All { get; set; } - public List? Products { get; set; } + public List? InputProducts { get; set; } + public List? OutputProducts { get; set; } public string[]? Prs { get; set; } public string? PrsFile { get; set; } public string? Owner { get; set; } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index f2ff2b49d..ffeab5828 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -531,7 +531,7 @@ Cancel ctx var filterCount = 0; if (input.All) filterCount++; - if (input.Products != null && input.Products.Count > 0) + if (input.InputProducts != null && input.InputProducts.Count > 0) filterCount++; if (input.Prs != null && input.Prs.Length > 0) filterCount++; @@ -540,13 +540,13 @@ Cancel ctx if (filterCount == 0) { - collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --products, --prs, or --prs-file"); + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --input-products, --prs, or --prs-file"); return false; } if (filterCount > 1) { - collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --products, --prs, or --prs-file"); + collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --input-products, --prs, or --prs-file"); return false; } @@ -589,9 +589,9 @@ Cancel ctx // Build set of product/version combinations to filter by var productsToMatch = new HashSet<(string product, string version)>(); - if (input.Products != null && input.Products.Count > 0) + if (input.InputProducts != null && input.InputProducts.Count > 0) { - foreach (var product in input.Products) + foreach (var product in input.InputProducts) { var version = product.Target ?? string.Empty; _ = productsToMatch.Add((product.Product.ToLowerInvariant(), version)); @@ -766,9 +766,22 @@ Cancel ctx // Build bundled data var bundledData = new BundledChangelogData(); - // Extract unique products/versions - // If --products filter was used, only include those specific product-versions - if (productsToMatch.Count > 0) + // Set products array in output + // If --output-products was specified, use those values (override any from changelogs) + if (input.OutputProducts != null && input.OutputProducts.Count > 0) + { + bundledData.Products = input.OutputProducts + .OrderBy(p => p.Product) + .ThenBy(p => p.Target ?? string.Empty) + .Select(p => new BundledProduct + { + Product = p.Product, + Target = p.Target + }) + .ToList(); + } + // If --input-products filter was used, only include those specific product-versions + else if (productsToMatch.Count > 0) { bundledData.Products = productsToMatch .OrderBy(pv => pv.product) @@ -780,6 +793,7 @@ Cancel ctx }) .ToList(); } + // Otherwise, extract unique products/versions from changelog entries else { var productVersions = new HashSet<(string product, string version)>(); diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index b07bf4396..cf3e57afb 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -107,7 +107,8 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st /// Optional: Directory containing changelog YAML files. Defaults to current directory /// Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory /// Include all changelogs in the directory - /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") + /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") + /// Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. /// Filter by pull request URLs or numbers (can specify multiple times) /// Path to a newline-delimited file containing PR URLs or numbers /// Optional: GitHub repository owner (used when PRs are specified as numbers) @@ -118,7 +119,8 @@ public async Task Bundle( string? directory = null, string? output = null, bool all = false, - [ProductInfoParser] List? products = null, + [ProductInfoParser] List? inputProducts = null, + [ProductInfoParser] List? outputProducts = null, string[]? prs = null, string? prsFile = null, string? owner = null, @@ -135,7 +137,8 @@ public async Task Bundle( Directory = directory ?? Directory.GetCurrentDirectory(), Output = output, All = all, - Products = products, + InputProducts = inputProducts, + OutputProducts = outputProducts, Prs = prs, PrsFile = prsFile, Owner = owner, diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index e203d945e..1127fa9b7 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -916,7 +916,7 @@ public async Task BundleChangelogs_WithProductsFilter_FiltersCorrectly() var input = new ChangelogBundleInput { Directory = changelogDir, - Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1197,7 +1197,7 @@ public async Task BundleChangelogs_WithNoMatchingFiles_ReturnsError() var input = new ChangelogBundleInput { Directory = changelogDir, - Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1271,7 +1271,7 @@ public async Task BundleChangelogs_WithMultipleFilterOptions_ReturnsError() { Directory = changelogDir, All = true, - Products = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], + InputProducts = [new ProductInfo { Product = "elasticsearch", Target = "9.2.0" }], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1319,7 +1319,7 @@ public async Task BundleChangelogs_WithMultipleProducts_CreatesValidBundle() var input = new ChangelogBundleInput { Directory = changelogDir, - Products = [ + InputProducts = [ new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02" }, new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06" } ], @@ -1366,6 +1366,69 @@ public async Task BundleChangelogs_WithInvalidPrsFile_ReturnsError() _collector.Diagnostics.Should().Contain(d => d.Message.Contains("PRs file does not exist")); } + [Fact] + public async Task BundleChangelogs_WithOutputProducts_OverridesChangelogProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog files with different products + var changelog1 = """ + title: Elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Kibana feature + type: feature + products: + - product: kibana + target: 9.2.0 + pr: https://github.com/elastic/kibana/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-elasticsearch-feature.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir, "1755268140-kibana-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + OutputProducts = [ + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-02" }, + new ProductInfo { Product = "cloud-serverless", Target = "2025-12-06" } + ], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + // Output products should override changelog products + bundleContent.Should().Contain("product: cloud-serverless"); + bundleContent.Should().Contain("target: 2025-12-02"); + bundleContent.Should().Contain("target: 2025-12-06"); + // Should not contain products from changelogs + bundleContent.Should().NotContain("product: elasticsearch"); + bundleContent.Should().NotContain("product: kibana"); + // But should still contain the entries + bundleContent.Should().Contain("name: 1755268130-elasticsearch-feature.yaml"); + bundleContent.Should().Contain("name: 1755268140-kibana-feature.yaml"); + } + [Fact] public async Task BundleChangelogs_WithMultipleProducts_IncludesAllProducts() { From 3fbdeb417dc9c089367dc2d760145e01602092b1 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 20:34:44 -0800 Subject: [PATCH 10/54] Add resolve option --- .../Changelog/BundledChangelogData.cs | 14 ++ .../Changelog/ChangelogBundleInput.cs | 1 + .../ChangelogService.cs | 79 ++++++- .../docs-builder/Commands/ChangelogCommand.cs | 3 + .../ChangelogServiceTests.cs | 200 ++++++++++++++++++ 5 files changed, 289 insertions(+), 8 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs index fbae06c19..c69cd2b42 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/BundledChangelogData.cs @@ -22,6 +22,20 @@ public class BundledProduct public class BundledEntry { public BundledFile File { get; set; } = new(); + + // Resolved changelog fields (only populated when --resolve is used) + public string? Type { get; set; } + public string? Title { get; set; } + public List? Products { get; set; } + public string? Description { get; set; } + public string? Impact { get; set; } + public string? Action { get; set; } + public string? FeatureId { get; set; } + public bool? Highlight { get; set; } + public string? Subtype { get; set; } + public List? Areas { get; set; } + public string? Pr { get; set; } + public List? Issues { get; set; } } public class BundledFile diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs index de1bcbe99..7d7f005ac 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -14,6 +14,7 @@ public class ChangelogBundleInput public bool All { get; set; } public List? InputProducts { get; set; } public List? OutputProducts { get; set; } + public bool Resolve { get; set; } public string[]? Prs { get; set; } public string? PrsFile { get; set; } public string? Owner { get; set; } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index ffeab5828..d5c484abe 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -828,17 +828,80 @@ Cancel ctx collector.EmitWarning(string.Empty, $"Product '{productGroup.Key}' has multiple targets in bundle: {string.Join(", ", targets)}"); } - // Build entries - only include file information - bundledData.Entries = changelogEntries - .Select(e => new BundledEntry + // Build entries + if (input.Resolve) + { + // When resolving, include changelog contents and validate required fields + var resolvedEntries = new List(); + foreach (var (data, filePath, fileName, checksum) in changelogEntries) { - File = new BundledFile + // Validate required fields + if (string.IsNullOrWhiteSpace(data.Title)) { - Name = e.fileName, - Checksum = e.checksum + collector.EmitError(filePath, "Changelog file is missing required field: title"); + return false; } - }) - .ToList(); + + if (string.IsNullOrWhiteSpace(data.Type)) + { + collector.EmitError(filePath, "Changelog file is missing required field: type"); + return false; + } + + if (data.Products == null || data.Products.Count == 0) + { + collector.EmitError(filePath, "Changelog file is missing required field: products"); + return false; + } + + // Validate products have required fields + foreach (var product in data.Products) + { + if (string.IsNullOrWhiteSpace(product.Product)) + { + collector.EmitError(filePath, "Changelog file has product entry missing required field: product"); + return false; + } + } + + resolvedEntries.Add(new BundledEntry + { + File = new BundledFile + { + Name = fileName, + Checksum = checksum + }, + Type = data.Type, + Title = data.Title, + Products = data.Products, + Description = data.Description, + Impact = data.Impact, + Action = data.Action, + FeatureId = data.FeatureId, + Highlight = data.Highlight, + Subtype = data.Subtype, + Areas = data.Areas, + Pr = data.Pr, + Issues = data.Issues + }); + } + + bundledData.Entries = resolvedEntries; + } + else + { + // Only include file information + bundledData.Entries = changelogEntries + .Select(e => new BundledEntry + { + File = new BundledFile + { + Name = e.fileName, + Checksum = e.checksum + } + }) + .ToList(); + } // Generate bundled YAML var bundleSerializer = new StaticSerializerBuilder(new ChangelogYamlStaticContext()) diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index cf3e57afb..294b9c32d 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -109,6 +109,7 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st /// Include all changelogs in the directory /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") /// Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. + /// Copy the contents of each changelog file into the entries array /// Filter by pull request URLs or numbers (can specify multiple times) /// Path to a newline-delimited file containing PR URLs or numbers /// Optional: GitHub repository owner (used when PRs are specified as numbers) @@ -121,6 +122,7 @@ public async Task Bundle( bool all = false, [ProductInfoParser] List? inputProducts = null, [ProductInfoParser] List? outputProducts = null, + bool resolve = false, string[]? prs = null, string? prsFile = null, string? owner = null, @@ -139,6 +141,7 @@ public async Task Bundle( All = all, InputProducts = inputProducts, OutputProducts = outputProducts, + Resolve = resolve, Prs = prs, PrsFile = prsFile, Owner = owner, diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 1127fa9b7..a6de682e6 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -1495,5 +1495,205 @@ public async Task BundleChangelogs_WithMultipleProducts_IncludesAllProducts() var entryCount = bundleContent.Split("file:", StringSplitOptions.RemoveEmptyEntries).Length - 1; entryCount.Should().Be(3); } + + [Fact] + public async Task BundleChangelogs_WithResolve_CopiesChangelogContents() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + areas: + - Search + description: This is a test feature + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("file:"); + bundleContent.Should().Contain("name: 1755268130-test-feature.yaml"); + bundleContent.Should().Contain("checksum:"); + bundleContent.Should().Contain("type: feature"); + bundleContent.Should().Contain("title: Test feature"); + bundleContent.Should().Contain("product: elasticsearch"); + bundleContent.Should().Contain("target: 9.2.0"); + bundleContent.Should().Contain("pr: https://github.com/elastic/elasticsearch/pull/100"); + bundleContent.Should().Contain("areas:"); + bundleContent.Should().Contain("- Search"); + bundleContent.Should().Contain("description: This is a test feature"); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingTitle_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without title + var changelog1 = """ + type: feature + products: + - product: elasticsearch + target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: title")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingType_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without type + var changelog1 = """ + title: Test feature + products: + - product: elasticsearch + target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: type")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndMissingProducts_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without products + var changelog1 = """ + title: Test feature + type: feature + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field: products")); + } + + [Fact] + public async Task BundleChangelogs_WithResolveAndInvalidProduct_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file with invalid product (missing product field) + var changelog1 = """ + title: Test feature + type: feature + products: + - target: 9.2.0 + """; + + var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + + var input = new ChangelogBundleInput + { + Directory = changelogDir, + All = true, + Resolve = true, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("product entry missing required field: product")); + } } From 814c36e26f89cf14d39d673a1ebf8b62961ce1fe Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 11 Dec 2025 21:17:54 -0800 Subject: [PATCH 11/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../Elastic.Documentation.Services/ChangelogService.cs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index d5c484abe..0088032ef 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -855,13 +855,10 @@ Cancel ctx } // Validate products have required fields - foreach (var product in data.Products) + if (data.Products.Any(product => string.IsNullOrWhiteSpace(product.Product))) { - if (string.IsNullOrWhiteSpace(product.Product)) - { - collector.EmitError(filePath, "Changelog file has product entry missing required field: product"); - return false; - } + collector.EmitError(filePath, "Changelog file has product entry missing required field: product"); + return false; } resolvedEntries.Add(new BundledEntry From 484bd3f15ced10df29bc64080f8d6ac3867a01ee Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 21:41:34 -0800 Subject: [PATCH 12/54] Add CLI docs --- docs/_docset.yml | 1 + docs/cli/release/index.md | 1 + docs/contribute/index.md | 2 +- .../Changelog/ChangelogBundleInput.cs | 2 +- src/tooling/docs-builder/Commands/ChangelogCommand.cs | 2 +- 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/_docset.yml b/docs/_docset.yml index dd9d26547..392649a62 100644 --- a/docs/_docset.yml +++ b/docs/_docset.yml @@ -157,6 +157,7 @@ toc: children: - file: index.md - file: changelog-add.md + - file: changelog-bundle.md - folder: migration children: - file: index.md diff --git a/docs/cli/release/index.md b/docs/cli/release/index.md index 37c29bb46..5a2829c29 100644 --- a/docs/cli/release/index.md +++ b/docs/cli/release/index.md @@ -9,3 +9,4 @@ These commands are associated with product release documentation. ## Changelog commands - [changelog add](changelog-add.md) - Create a changelog file +- [changelog bundle](changelog-bundle.md) - Create a changelog bundle file diff --git a/docs/contribute/index.md b/docs/contribute/index.md index 21240317b..ff508410c 100644 --- a/docs/contribute/index.md +++ b/docs/contribute/index.md @@ -10,7 +10,7 @@ Use these guides for tasks like managing documentation files and folders, config - [Move files and folders](move.md): Move files or folders and automatically update all links in the documentation. - [Manage redirects across doc sets](redirects.md): Set up redirects when moving or deleting pages to prevent broken links. -- [Add changelog entries](changelog.md): Create changelog fragments using the command-line interface. +- [Add changelog entries](changelog.md): Create changelog files using the command-line interface. ## Repository management diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs index 7d7f005ac..00c762f68 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -5,7 +5,7 @@ namespace Elastic.Documentation.Services.Changelog; /// -/// Input data for bundling changelog fragments +/// Input data for bundling changelog files /// public class ChangelogBundleInput { diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index 294b9c32d..93c93dbe0 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -102,7 +102,7 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st } /// - /// Bundle changelog fragments into a single YAML file + /// Bundle changelog files /// /// Optional: Directory containing changelog YAML files. Defaults to current directory /// Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory From 6ff29fd409f197d9d94384b74979b030a0637a4f Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 21:51:22 -0800 Subject: [PATCH 13/54] Update changelog how-to --- docs/contribute/changelog.md | 32 ++++++++++++++++++++++++++------ docs/contribute/index.md | 2 +- 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 693217424..ca1f6355a 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -1,14 +1,16 @@ -# Add changelog entries +# Create and bundle changelogs The `docs-builder changelog add` command creates a new changelog file from command-line input. -By adding a file for each notable change, you can ultimately generate release documention with a consistent layout for all your products. +The `docs-builder changelog bundle` command creates a consolidated list of changelogs. + +By adding a file for each notable change and grouping them into bundles, you can ultimately generate release documention with a consistent layout for all your products. :::{note} This command is associated with an ongoing release docs initiative. Additional workflows are still to come for managing the list of changelogs in each release. ::: -The command generates a YAML file that uses the following schema: +The changelogs use the following schema: :::{dropdown} Changelog schema ::::{include} /contribute/_snippets/changelog-fields.md @@ -17,7 +19,7 @@ The command generates a YAML file that uses the following schema: ## Command options -The command supports all of the following options, which generally align with fields in the changelog schema: +The `changelog add` command supports all of the following options, which generally align with fields in the changelog schema: ```sh Usage: changelog add [options...] [-h|--help] [--version] @@ -43,6 +45,24 @@ Options: --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] ``` +The `changelog bundle` command supports all of the following options, which provide multiple methods for collecting the changelogs: + +```sh +Bundle changelogs + +Options: + --directory Optional: Directory containing changelog YAML files. Defaults to current directory [Default: null] + --output Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory [Default: null] + --all Include all changelogs in the directory + --input-products ?> Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") [Default: null] + --output-products ?> Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. [Default: null] + --resolve Copy the contents of each changelog file into the entries array + --prs Filter by pull request URLs or numbers (can specify multiple times) [Default: null] + --prs-file Path to a newline-delimited file containing PR URLs or numbers [Default: null] + --owner Optional: GitHub repository owner (used when PRs are specified as numbers) [Default: null] + --repo Optional: GitHub repository name (used when PRs are specified as numbers) [Default: null] +``` + ### Product format The `--products` parameter accepts products in the format `"product target lifecycle, ..."` where: @@ -87,7 +107,7 @@ Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/ma ## Examples -### Multiple products +### Create a changelog for multiple products The following command creates a changelog for a bug fix that applies to two products: @@ -120,7 +140,7 @@ areas: - ES|QL ``` -### PR label mappings +### Create a changelog with PR label mappings You can update your changelog configuration file to contain GitHub label mappings, for example: diff --git a/docs/contribute/index.md b/docs/contribute/index.md index ff508410c..f782350dc 100644 --- a/docs/contribute/index.md +++ b/docs/contribute/index.md @@ -10,7 +10,7 @@ Use these guides for tasks like managing documentation files and folders, config - [Move files and folders](move.md): Move files or folders and automatically update all links in the documentation. - [Manage redirects across doc sets](redirects.md): Set up redirects when moving or deleting pages to prevent broken links. -- [Add changelog entries](changelog.md): Create changelog files using the command-line interface. +- [](changelog.md): Create and bundle changelogs using the command-line interface. ## Repository management From 0b91259d4c5c6cc170f893aea14e2254fdee8d90 Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 22:03:43 -0800 Subject: [PATCH 14/54] Add ignored file --- docs/cli/release/changelog-bundle.md | 50 ++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 docs/cli/release/changelog-bundle.md diff --git a/docs/cli/release/changelog-bundle.md b/docs/cli/release/changelog-bundle.md new file mode 100644 index 000000000..379ae2b11 --- /dev/null +++ b/docs/cli/release/changelog-bundle.md @@ -0,0 +1,50 @@ +# changelog bundle + +Bundle changelog files. + +To create the changelogs, use [](/cli/release/changelog-add.md). + + +## Usage + +```sh +docs-builder changelog bundle [options...] [-h|--help] +``` + +## Options + +`--all` +: Include all changelogs from the directory. + +`--directory ` +: Optional: The directory that contains the changelog YAML files. +: Defaults to the current directory. + +`--input-products ?>` +: Filter by products in format "product target lifecycle, ..." +: For example, `cloud-serverless 2025-12-02, cloud-serverless 2025-12-06`. + +`--output ` +: Optional: The output file path for the bundle. +: Defaults to `changelog-bundle.yaml` in the input directory. + +`--output-products ?>` +: Explicitly set the products array in the output file in format "product target lifecycle, ...". +: This value replaces information that would otherwise by derived from changelogs. + +`--owner ` +: Optional: The GitHub repository owner, which is required when pull requests are specified as numbers. + +`--prs ` +: Filter by pull request URLs or numbers (can specify multiple times). + +`--prs-file ` +: The path to a newline-delimited file containing PR URLs or numbers. + +`--repo ` +: Optional: The GitHub repository name, which is required when PRs are specified as numbers. + +`--resolve` +: Copy the contents of each changelog file into the entries array. From 82c2b8ad2f7686c4f3fac09928df9c04d652b6ad Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 11 Dec 2025 23:40:57 -0800 Subject: [PATCH 15/54] Improve docs --- docs/contribute/changelog.md | 196 ++++++++++++++++-- .../Changelog/ChangelogInput.cs | 2 +- .../ChangelogService.cs | 2 +- .../docs-builder/Commands/ChangelogCommand.cs | 8 +- 4 files changed, 190 insertions(+), 18 deletions(-) diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index ca1f6355a..d9bf3f033 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -7,7 +7,7 @@ By adding a file for each notable change and grouping them into bundles, you can :::{note} This command is associated with an ongoing release docs initiative. -Additional workflows are still to come for managing the list of changelogs in each release. +Additional workflows are still to come for updating and generating documentation from changelogs. ::: The changelogs use the following schema: @@ -19,12 +19,12 @@ The changelogs use the following schema: ## Command options -The `changelog add` command supports all of the following options, which generally align with fields in the changelog schema: +The `changelog add` command creates a single YAML changelog file and supports all of the following options: ```sh Usage: changelog add [options...] [-h|--help] [--version] -Add a new changelog fragment from command-line input +Add a new changelog from command-line input Options: --products > Required: Products affected in format "product target lifecycle, ..." (e.g., "elasticsearch 9.2.0 ga, cloud-serverless 2025-08-05") [Required] @@ -41,11 +41,11 @@ Options: --action Optional: What users must do to mitigate [Default: null] --feature-id Optional: Feature flag ID [Default: null] --highlight Optional: Include in release highlights [Default: null] - --output Optional: Output directory for the changelog fragment. Defaults to current directory [Default: null] + --output Optional: Output directory for the changelog. Defaults to current directory [Default: null] --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] ``` -The `changelog bundle` command supports all of the following options, which provide multiple methods for collecting the changelogs: +The `changelog bundle` command creates a single YAML bundle file and supports all of the following options: ```sh Bundle changelogs @@ -65,7 +65,9 @@ Options: ### Product format -The `--products` parameter accepts products in the format `"product target lifecycle, ..."` where: +The `docs-builder changelog add` has a `--products` option and the `docs-builder changelog bundle` has `--input-products` and `--output-products` options that all use the same format. + +They accept values with the format `"product target lifecycle, ..."` where: - `product` is the product ID from [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml) (required) - `target` is the target version or date (optional) @@ -79,18 +81,18 @@ Examples: ## Changelog configuration -Some of the fields in the changelog accept only a specific set of values. +Some of the fields in the changelog files accept only a specific set of values. :::{important} -- Product values must exist in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). Invalid products will cause the command to fail. -- Type, subtype, and lifecycle values must match the available values defined in [ChangelogConfiguration.cs](https://github.com/elastic/docs-builder/blob/main/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs). Invalid values will cause the command to fail. +- Product values must exist in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). Invalid products will cause the `docs-builder changelog add` command to fail. +- Type, subtype, and lifecycle values must match the available values defined in [ChangelogConfiguration.cs](https://github.com/elastic/docs-builder/blob/main/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs). Invalid values will cause the `docs-builder changelog add` command to fail. ::: -If you want to further limit the list of values, you can optionally create a configuration file. +If you want to further limit the list of acceptable values, you can create a changelog configuration file. Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). -By default, the command checks the following path: `docs/changelog.yml`. +By default, the `docs-builder changelog add` command checks the following path: `docs/changelog.yml`. You can specify a different path with the `--config` command option. If a configuration file exists, the command validates all its values before generating the changelog file: @@ -105,6 +107,60 @@ When you run the command with the `--pr` option, it can use these mappings to fi Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). +## Bundle creation + +You can use the `docs-builder changelog bundle` command to create a YAML file that lists multiple changelogs. +By default, the file contains only the changelog file names and checksums. +For example: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 +entries: +- file: + name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 +``` + +You can optionally use the `--resolve` command option to pull all of the content from each changelog into the bundle. +For example: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 +entries: +- file: + name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 + type: bug-fix + title: Fix ML calendar event update scalability issues + products: + - product: elasticsearch + areas: + - Machine Learning + pr: https://github.com/elastic/elasticsearch/pull/136886 +``` + +When you run the `docs-builder changelog bundle` command, you can specify only one of the following filter options: + +`--all` +: Include all changelogs from the directory. + +`--input-products` +: Include changelogs for the specified products. +: The format aligns with [](#product-format). +: For example, `"cloud-serverless 2025-12-02, cloud-serverless 2025-12-06"`. + +`--prs` +: Include changelogs for the specified pull request URLs or numbers. +: Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). + +`--prs-file` +: Include changelogs for the pull request URLs or numbers specified in a newline-delimited file. +: Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). + ## Examples ### Create a changelog for multiple products @@ -173,7 +229,9 @@ label_to_areas: When you use the `--pr` option to derive information from a pull request, it can make use of those mappings: ```sh -docs-builder changelog add --pr https://github.com/elastic/elasticsearch/pull/139272 --products "elasticsearch 9.3.0" --config test/changelog.yml +docs-builder changelog add \ + --pr https://github.com/elastic/elasticsearch/pull/139272 \ + --products "elasticsearch 9.3.0" --config test/changelog.yml ``` In this case, the changelog file derives the title, type, and areas: @@ -188,3 +246,117 @@ areas: - ES|QL title: '[ES|QL] Take TOP_SNIPPETS out of snapshot' ``` + +### Create a changelog bundle by product + +You can use the `--input-products` option to create a bundle of changelogs that match the product details: + +```sh +docs-builder changelog bundle \ + --input-products "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06" <1> +``` + +1. Include all changelogs that have the `cloud-serverless` product identifier and target dates of either December 2 2025 or December 12 2025. For more information about product values, refer to [](#product-format). + +If you have changelog files that reference those product details, the command creates a file like this: + +```yaml +products: <1> +- product: cloud-serverless + target: 2025-12-02 +- product: cloud-serverless + target: 2025-12-06 +entries: +- file: + name: 1765495972-fixes-enrich-and-lookup-join-resolution-based-on-m.yaml + checksum: 6c3243f56279b1797b5dfff6c02ebf90b9658464 +- file: + name: 1765507778-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 70d197d96752c05b6595edffe6fe3ba3d055c845 +``` + +1. By default these values match your `--input-products` (even if the changelogs have more products). To specify different product metadata, use the `--output-products` option. + +If you add the `--resolve` option, the contents of each changelog will be included in the output file. +Refer to [](#bundle-creation). + +### Create a changelog bundle by PR list + +You can use the `--prs` option (with the `--repo` and `--owner` options if you provide only the PR numbers) to create a bundle of the changelogs that relate to those pull requests: + +```sh +docs-builder changelog bundle --prs 108875,135873,136886 \ <1> + --repo elasticsearch \ <2> + --owner elastic \ <3> + --output-products "elasticsearch 9.2.2" <4> +``` + +1. The list of pull request numbers to seek. +2. The repository in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. +3. The owner in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. +4. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. + +If you have changelog files that reference those pull requests, the command creates a file like this: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 +entries: +- file: + name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 +- file: + name: 1765507798-convert-bytestransportresponse-when-proxying-respo.yaml + checksum: c6dbd4730bf34dbbc877c16c042e6578dd108b62 +- file: + name: 1765507839-use-ivf_pq-for-gpu-index-build-for-large-datasets.yaml + checksum: 451d60283fe5df426f023e824339f82c2900311e +``` + +If you add the `--resolve` option, the contents of each changelog will be included in the output file. +Refer to [](#bundle-creation). + + +### Create a changelog bundle by PR file + +If you have a file that lists pull requests (such as PRs associated with a GitHub release): + +```txt +https://github.com/elastic/elasticsearch/pull/108875 +https://github.com/elastic/elasticsearch/pull/135873 +https://github.com/elastic/elasticsearch/pull/136886 +https://github.com/elastic/elasticsearch/pull/137126 +``` + +You can use the `--prs-file` option to create a bundle of the changelogs that relate to those pull requests: + +```sh +./docs-builder changelog bundle --prs-file test/9.2.2.txt \ <1> + --output-products "elasticsearch 9.2.2" <3> + --resolve <3> +``` + +1. The path for the file that lists the pull requests. If the file contains only PR numbers, you must add `--repo` and `--owner` command options. +2. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. +3. Optionally include the contents of each changelog in the output file. + +If you have changelog files that reference those pull requests, the command creates a file like this: + +```yaml +products: +- product: elasticsearch + target: 9.2.2 +entries: +- file: + name: 1765507778-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 70d197d96752c05b6595edffe6fe3ba3d055c845 + type: bug-fix + title: Break on FieldData when building global ordinals + products: + - product: elasticsearch + areas: + - Aggregations + pr: https://github.com/elastic/elasticsearch/pull/108875 +... +``` diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs index 86d4dce98..979b14b8f 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogInput.cs @@ -5,7 +5,7 @@ namespace Elastic.Documentation.Services.Changelog; /// -/// Input data for creating a changelog fragment +/// Input data for creating a changelog /// public class ChangelogInput { diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 0088032ef..2fce9000d 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -203,7 +203,7 @@ Cancel ctx // Write file await _fileSystem.File.WriteAllTextAsync(filePath, yamlContent, ctx); - _logger.LogInformation("Created changelog fragment: {FilePath}", filePath); + _logger.LogInformation("Created changelog: {FilePath}", filePath); return true; } diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index 93c93dbe0..ff73e51a0 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -19,17 +19,17 @@ IConfigurationContext configurationContext ) { /// - /// Changelog commands. Use 'changelog add' to create a new changelog fragment. + /// Changelog commands. Use 'changelog add' to create a new changelog or 'changelog bundle' to create a consolidated list of changelogs. /// [Command("")] public Task Default() { - collector.EmitError(string.Empty, "Please specify a subcommand. Use 'changelog add' to create a new changelog fragment. Run 'changelog add --help' for usage information."); + collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n\nRun 'changelog add --help' or 'changelog bundle --help' for usage information."); return Task.FromResult(1); } /// - /// Add a new changelog fragment from command-line input + /// Add a new changelog from command-line input /// /// Optional: A short, user-facing title (max 80 characters). Required if --pr is not specified. If --pr and --title are specified, the latter value is used instead of what exists in the PR. /// Optional: Type of change (feature, enhancement, bug-fix, breaking-change, etc.). Required if --pr is not specified. If mappings are configured, type can be derived from the PR. @@ -45,7 +45,7 @@ public Task Default() /// Optional: What users must do to mitigate /// Optional: Feature flag ID /// Optional: Include in release highlights - /// Optional: Output directory for the changelog fragment. Defaults to current directory + /// Optional: Output directory for the changelog. Defaults to current directory /// Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' /// [Command("add")] From b4a67cb2c3f68b8e3bb440d65c06b867302d570a Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 12 Dec 2025 17:41:22 -0800 Subject: [PATCH 16/54] Edit docs --- docs/contribute/changelog.md | 255 +++++++++++++++-------------------- 1 file changed, 112 insertions(+), 143 deletions(-) diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index d9bf3f033..29803e18d 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -1,10 +1,12 @@ # Create and bundle changelogs -The `docs-builder changelog add` command creates a new changelog file from command-line input. -The `docs-builder changelog bundle` command creates a consolidated list of changelogs. - By adding a file for each notable change and grouping them into bundles, you can ultimately generate release documention with a consistent layout for all your products. +1. Create changelogs with the `docs-builder changelog add` command. +2. [Create changelog bundles](#changelog-bundle) with the `docs-builder changelog bundle` command. For example, create a bundle for the pull requests that are included in a product release. + +For more information about running `docs-builder`, go to [Contribute locally](https://www.elastic.co/docs/contribute-docs/locally). + :::{note} This command is associated with an ongoing release docs initiative. Additional workflows are still to come for updating and generating documentation from changelogs. @@ -45,24 +47,6 @@ Options: --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] ``` -The `changelog bundle` command creates a single YAML bundle file and supports all of the following options: - -```sh -Bundle changelogs - -Options: - --directory Optional: Directory containing changelog YAML files. Defaults to current directory [Default: null] - --output Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory [Default: null] - --all Include all changelogs in the directory - --input-products ?> Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") [Default: null] - --output-products ?> Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. [Default: null] - --resolve Copy the contents of each changelog file into the entries array - --prs Filter by pull request URLs or numbers (can specify multiple times) [Default: null] - --prs-file Path to a newline-delimited file containing PR URLs or numbers [Default: null] - --owner Optional: GitHub repository owner (used when PRs are specified as numbers) [Default: null] - --repo Optional: GitHub repository name (used when PRs are specified as numbers) [Default: null] -``` - ### Product format The `docs-builder changelog add` has a `--products` option and the `docs-builder changelog bundle` has `--input-products` and `--output-products` options that all use the same format. @@ -107,43 +91,28 @@ When you run the command with the `--pr` option, it can use these mappings to fi Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). -## Bundle creation +## Create bundles [changelog-bundle] You can use the `docs-builder changelog bundle` command to create a YAML file that lists multiple changelogs. -By default, the file contains only the changelog file names and checksums. -For example: - -```yaml -products: -- product: elasticsearch - target: 9.2.2 -entries: -- file: - name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml - checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 -``` +For up-to-date details, use the `-h` option: -You can optionally use the `--resolve` command option to pull all of the content from each changelog into the bundle. -For example: +```sh +Bundle changelogs -```yaml -products: -- product: elasticsearch - target: 9.2.2 -entries: -- file: - name: 1765507819-fix-ml-calendar-event-update-scalability-issues.yaml - checksum: 069b59edb14594e0bc3b70365e81626bde730ab7 - type: bug-fix - title: Fix ML calendar event update scalability issues - products: - - product: elasticsearch - areas: - - Machine Learning - pr: https://github.com/elastic/elasticsearch/pull/136886 +Options: + --directory Optional: Directory containing changelog YAML files. Defaults to current directory [Default: null] + --output Optional: Output file path for the bundled changelog. Defaults to 'changelog-bundle.yaml' in the input directory [Default: null] + --all Include all changelogs in the directory + --input-products ?> Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") [Default: null] + --output-products ?> Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. [Default: null] + --resolve Copy the contents of each changelog file into the entries array + --prs Filter by pull request URLs or numbers (can specify multiple times) [Default: null] + --prs-file Path to a newline-delimited file containing PR URLs or numbers [Default: null] + --owner Optional: GitHub repository owner (used when PRs are specified as numbers) [Default: null] + --repo Optional: GitHub repository name (used when PRs are specified as numbers) [Default: null] ``` -When you run the `docs-builder changelog bundle` command, you can specify only one of the following filter options: +You can specify only one of the following filter options: `--all` : Include all changelogs from the directory. @@ -161,93 +130,10 @@ When you run the `docs-builder changelog bundle` command, you can specify only o : Include changelogs for the pull request URLs or numbers specified in a newline-delimited file. : Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). -## Examples - -### Create a changelog for multiple products - -The following command creates a changelog for a bug fix that applies to two products: - -```sh -docs-builder changelog add \ - --title "Fixes enrich and lookup join resolution based on minimum transport version" \ <1> - --type bug-fix \ <2> - --products "elasticsearch 9.2.3, cloud-serverless 2025-12-02" \ <3> - --areas "ES|QL" - --pr "https://github.com/elastic/elasticsearch/pull/137431" <4> -``` - -1. This option is required only if you want to override what's derived from the PR title. -2. The type values are defined in [ChangelogConfiguration.cs](https://github.com/elastic/docs-builder/blob/main/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs). -3. The product values are defined in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). -4. The `--pr` value can be a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). - -The output file has the following format: - -```yaml -pr: https://github.com/elastic/elasticsearch/pull/137431 -type: bug-fix -products: -- product: elasticsearch - target: 9.2.3 -- product: cloud-serverless - target: 2025-12-02 -title: Fixes enrich and lookup join resolution based on minimum transport version -areas: -- ES|QL -``` - -### Create a changelog with PR label mappings - -You can update your changelog configuration file to contain GitHub label mappings, for example: - -```yaml -# Available areas (optional - if not specified, all areas are allowed) -available_areas: - - search - - security - - machine-learning - - observability - - index-management - - ES|QL - # Add more areas as needed - -# GitHub label mappings (optional - used when --pr option is specified) -# Maps GitHub PR labels to changelog type values -# When a PR has a label that matches a key, the corresponding type value is used -label_to_type: - # Example mappings - customize based on your label naming conventions - ">enhancement": enhancement - ">breaking": breaking-change - -# Maps GitHub PR labels to changelog area values -# Multiple labels can map to the same area, and a single label can map to multiple areas (comma-separated) -label_to_areas: - # Example mappings - customize based on your label naming conventions - ":Search Relevance/ES|QL": "ES|QL" -``` - -When you use the `--pr` option to derive information from a pull request, it can make use of those mappings: - -```sh -docs-builder changelog add \ - --pr https://github.com/elastic/elasticsearch/pull/139272 \ - --products "elasticsearch 9.3.0" --config test/changelog.yml -``` - -In this case, the changelog file derives the title, type, and areas: - -```yaml -pr: https://github.com/elastic/elasticsearch/pull/139272 -type: enhancement -products: -- product: elasticsearch - target: 9.3.0 -areas: -- ES|QL -title: '[ES|QL] Take TOP_SNIPPETS out of snapshot' -``` +By default, the output file contains only the changelog file names and checksums. +You can optionally use the `--resolve` command option to pull all of the content from each changelog into the bundle. -### Create a changelog bundle by product +### Filter by product [changelog-bundle-product] You can use the `--input-products` option to create a bundle of changelogs that match the product details: @@ -278,9 +164,8 @@ entries: 1. By default these values match your `--input-products` (even if the changelogs have more products). To specify different product metadata, use the `--output-products` option. If you add the `--resolve` option, the contents of each changelog will be included in the output file. -Refer to [](#bundle-creation). -### Create a changelog bundle by PR list +### Filter by pull requests [changelog-bundle-pr] You can use the `--prs` option (with the `--repo` and `--owner` options if you provide only the PR numbers) to create a bundle of the changelogs that relate to those pull requests: @@ -315,10 +200,8 @@ entries: ``` If you add the `--resolve` option, the contents of each changelog will be included in the output file. -Refer to [](#bundle-creation). - -### Create a changelog bundle by PR file +### Filter by pull request file [changelog-bundle-file] If you have a file that lists pull requests (such as PRs associated with a GitHub release): @@ -360,3 +243,89 @@ entries: pr: https://github.com/elastic/elasticsearch/pull/108875 ... ``` + +## Examples + +### Create a changelog for multiple products + +The following command creates a changelog for a bug fix that applies to two products: + +```sh +docs-builder changelog add \ + --title "Fixes enrich and lookup join resolution based on minimum transport version" \ <1> + --type bug-fix \ <2> + --products "elasticsearch 9.2.3, cloud-serverless 2025-12-02" \ <3> + --areas "ES|QL" + --pr "https://github.com/elastic/elasticsearch/pull/137431" <4> +``` + +1. This option is required only if you want to override what's derived from the PR title. +2. The type values are defined in [ChangelogConfiguration.cs](https://github.com/elastic/docs-builder/blob/main/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs). +3. The product values are defined in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). +4. The `--pr` value can be a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). + +The output file has the following format: + +```yaml +pr: https://github.com/elastic/elasticsearch/pull/137431 +type: bug-fix +products: +- product: elasticsearch + target: 9.2.3 +- product: cloud-serverless + target: 2025-12-02 +title: Fixes enrich and lookup join resolution based on minimum transport version +areas: +- ES|QL +``` + +### Create a changelog with PR label mappings + +You can update your changelog configuration file to contain GitHub label mappings, for example: + +```yaml +# Available areas (optional - if not specified, all areas are allowed) +available_areas: + - search + - security + - machine-learning + - observability + - index-management + - ES|QL + # Add more areas as needed + +# GitHub label mappings (optional - used when --pr option is specified) +# Maps GitHub PR labels to changelog type values +# When a PR has a label that matches a key, the corresponding type value is used +label_to_type: + # Example mappings - customize based on your label naming conventions + ">enhancement": enhancement + ">breaking": breaking-change + +# Maps GitHub PR labels to changelog area values +# Multiple labels can map to the same area, and a single label can map to multiple areas (comma-separated) +label_to_areas: + # Example mappings - customize based on your label naming conventions + ":Search Relevance/ES|QL": "ES|QL" +``` + +When you use the `--pr` option to derive information from a pull request, it can make use of those mappings: + +```sh +docs-builder changelog add \ + --pr https://github.com/elastic/elasticsearch/pull/139272 \ + --products "elasticsearch 9.3.0" --config test/changelog.yml +``` + +In this case, the changelog file derives the title, type, and areas: + +```yaml +pr: https://github.com/elastic/elasticsearch/pull/139272 +type: enhancement +products: +- product: elasticsearch + target: 9.3.0 +areas: +- ES|QL +title: '[ES|QL] Take TOP_SNIPPETS out of snapshot' +``` From ebdfcaefceeef5e9da1a356a90ee47de25778b66 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 29 Dec 2025 10:34:20 -0800 Subject: [PATCH 17/54] Add changelog render command (#2352) --- docs/contribute/changelog.md | 115 ++- .../Changelog/BundleInput.cs | 16 + .../Changelog/ChangelogRenderInput.cs | 17 + .../ChangelogService.cs | 778 ++++++++++++++++++ .../Arguments/BundleInputParser.cs | 51 ++ .../docs-builder/Commands/ChangelogCommand.cs | 38 +- .../ChangelogServiceTests.cs | 686 +++++++++++++++ 7 files changed, 1633 insertions(+), 68 deletions(-) create mode 100644 src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs create mode 100644 src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs create mode 100644 src/tooling/docs-builder/Arguments/BundleInputParser.cs diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 29803e18d..4effb6a3b 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -4,6 +4,7 @@ By adding a file for each notable change and grouping them into bundles, you can 1. Create changelogs with the `docs-builder changelog add` command. 2. [Create changelog bundles](#changelog-bundle) with the `docs-builder changelog bundle` command. For example, create a bundle for the pull requests that are included in a product release. +3. [Create documentation](#render-changelogs) with the `docs-builder changelog render` command. For more information about running `docs-builder`, go to [Contribute locally](https://www.elastic.co/docs/contribute-docs/locally). @@ -244,88 +245,68 @@ entries: ... ``` -## Examples +## Create documentation [render-changelogs] -### Create a changelog for multiple products - -The following command creates a changelog for a bug fix that applies to two products: +The `docs-builder changelog render` command creates markdown files from changelog bundles for documentation purposes. +For up-to-date details, use the `-h` command option: ```sh -docs-builder changelog add \ - --title "Fixes enrich and lookup join resolution based on minimum transport version" \ <1> - --type bug-fix \ <2> - --products "elasticsearch 9.2.3, cloud-serverless 2025-12-02" \ <3> - --areas "ES|QL" - --pr "https://github.com/elastic/elasticsearch/pull/137431" <4> -``` +Render bundled changelog(s) to markdown files -1. This option is required only if you want to override what's derived from the PR title. -2. The type values are defined in [ChangelogConfiguration.cs](https://github.com/elastic/docs-builder/blob/main/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs). -3. The product values are defined in [products.yml](https://github.com/elastic/docs-builder/blob/main/config/products.yml). -4. The `--pr` value can be a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). +Options: + --input > Required: Bundle input(s) in format "bundle-file-path, changelog-file-path, repo". Can be specified multiple times. Only bundle-file-path is required. [Required] + --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] + --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] + --subsections Optional: Group entries by area/component in subsections. Defaults to false +``` -The output file has the following format: +Before you can use this command you must create changelog files and collect them into bundles. +For example, the `docs-builder changelog bundle` command creates a file like this: ```yaml -pr: https://github.com/elastic/elasticsearch/pull/137431 -type: bug-fix products: - product: elasticsearch - target: 9.2.3 -- product: cloud-serverless - target: 2025-12-02 -title: Fixes enrich and lookup join resolution based on minimum transport version -areas: -- ES|QL + target: 9.2.2 +entries: +- file: + name: 1765581721-convert-bytestransportresponse-when-proxying-respo.yaml + checksum: d7e74edff1bdd3e23ba4f2f88b92cf61cc7d490a +- file: + name: 1765581721-fix-ml-calendar-event-update-scalability-issues.yaml + checksum: dfafce50c9fd61c3d8db286398f9553e67737f07 +- file: + name: 1765581651-break-on-fielddata-when-building-global-ordinals.yaml + checksum: 704b25348d6daff396259216201053334b5b3c1d ``` -### Create a changelog with PR label mappings +To create markdown files from this bundle, run the `docs-builder changelog render` command: -You can update your changelog configuration file to contain GitHub label mappings, for example: - -```yaml -# Available areas (optional - if not specified, all areas are allowed) -available_areas: - - search - - security - - machine-learning - - observability - - index-management - - ES|QL - # Add more areas as needed - -# GitHub label mappings (optional - used when --pr option is specified) -# Maps GitHub PR labels to changelog type values -# When a PR has a label that matches a key, the corresponding type value is used -label_to_type: - # Example mappings - customize based on your label naming conventions - ">enhancement": enhancement - ">breaking": breaking-change - -# Maps GitHub PR labels to changelog area values -# Multiple labels can map to the same area, and a single label can map to multiple areas (comma-separated) -label_to_areas: - # Example mappings - customize based on your label naming conventions - ":Search Relevance/ES|QL": "ES|QL" +```sh +docs-builder changelog render \ + --input "./changelog-bundle.yaml,./changelogs,elasticsearch" \ <1> + --title 9.2.2 \ <2> + --output ./release-notes \ <3> + --subsections \ <4> ``` -When you use the `--pr` option to derive information from a pull request, it can make use of those mappings: +1. Provide information about the changelog bundle. The format is `", , "`. Only the `` is required. The `` is useful if the changelogs are not in the default directory and are not resolved within the bundle. The `` is useful for PR or issue link checks. You can specify `--input` multiple times to merge multiple bundles. +2. The `--title` value is used for an output folder name and for section titles in the markdown files. If you omit `--title` and the first bundle contains a product `target` value, that value is used. Otherwise, if none of the bundles have product `target` fields, the title defaults to "unknown". +3. By default the command creates the output files in the current directory. +4. By default the changelog areas are not displayed in the output. Add `--subsections` to group changelog details by their `areas`. -```sh -docs-builder changelog add \ - --pr https://github.com/elastic/elasticsearch/pull/139272 \ - --products "elasticsearch 9.3.0" --config test/changelog.yml -``` +For example, the `index.md` output file contains information derived from the changelogs: -In this case, the changelog file derives the title, type, and areas: +```md +## 9.2.2 [elastic-release-notes-9.2.2] -```yaml -pr: https://github.com/elastic/elasticsearch/pull/139272 -type: enhancement -products: -- product: elasticsearch - target: 9.3.0 -areas: -- ES|QL -title: '[ES|QL] Take TOP_SNIPPETS out of snapshot' +### Fixes [elastic-9.2.2-fixes] + +**Network** +* Convert BytesTransportResponse when proxying response from/to local node. [#135873](https://github.com/elastic/elastic/pull/135873) + +**Machine Learning** +* Fix ML calendar event update scalability issues. [#136886](https://github.com/elastic/elastic/pull/136886) + +**Aggregations** +* Break on FieldData when building global ordinals. [#108875](https://github.com/elastic/elastic/pull/108875) ``` diff --git a/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs new file mode 100644 index 000000000..60387a5dd --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/BundleInput.cs @@ -0,0 +1,16 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input for a single bundle file with optional directory and repo +/// +public class BundleInput +{ + public string BundleFile { get; set; } = string.Empty; + public string? Directory { get; set; } + public string? Repo { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs new file mode 100644 index 000000000..e1b134295 --- /dev/null +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs @@ -0,0 +1,17 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +namespace Elastic.Documentation.Services.Changelog; + +/// +/// Input data for rendering changelog bundle to markdown +/// +public class ChangelogRenderInput +{ + public List Bundles { get; set; } = []; + public string? Output { get; set; } + public string? Title { get; set; } + public bool Subsections { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 2fce9000d..bd35b04de 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1004,5 +1004,783 @@ private static string NormalizePrForComparison(string pr, string? defaultOwner, // Return as-is for comparison (fallback) return pr.ToLowerInvariant(); } + + public async Task RenderChangelogs( + IDiagnosticsCollector collector, + ChangelogRenderInput input, + Cancel ctx + ) + { + try + { + // Validate input + if (input.Bundles == null || input.Bundles.Count == 0) + { + collector.EmitError(string.Empty, "At least one bundle file is required. Use --input to specify bundle files."); + return false; + } + + var deserializer = new StaticDeserializerBuilder(new ChangelogYamlStaticContext()) + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .Build(); + + // Validation phase: Load and validate all bundles before merging + var bundleDataList = new List<(BundledChangelogData data, BundleInput input, string directory)>(); + var seenFileNames = new Dictionary>(StringComparer.OrdinalIgnoreCase); // filename -> list of bundle files + var seenPrs = new Dictionary>(); // PR -> list of bundle files + var defaultRepo = "elastic"; + + foreach (var bundleInput in input.Bundles) + { + if (string.IsNullOrWhiteSpace(bundleInput.BundleFile)) + { + collector.EmitError(string.Empty, "Bundle file path is required for each --input"); + return false; + } + + if (!_fileSystem.File.Exists(bundleInput.BundleFile)) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file does not exist"); + return false; + } + + // Load bundle file + var bundleContent = await _fileSystem.File.ReadAllTextAsync(bundleInput.BundleFile, ctx); + + // Validate bundle structure - check for unexpected fields by deserializing + BundledChangelogData? bundledData; + try + { + bundledData = deserializer.Deserialize(bundleContent); + } + catch (YamlException yamlEx) + { + collector.EmitError(bundleInput.BundleFile, $"Failed to deserialize bundle file: {yamlEx.Message}", yamlEx); + return false; + } + + if (bundledData == null) + { + collector.EmitError(bundleInput.BundleFile, "Failed to deserialize bundle file"); + return false; + } + + // Validate bundle has required structure + if (bundledData.Products == null) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file is missing required field: products"); + return false; + } + + if (bundledData.Entries == null) + { + collector.EmitError(bundleInput.BundleFile, "Bundle file is missing required field: entries"); + return false; + } + + // Determine directory for resolving file references + var bundleDirectory = bundleInput.Directory ?? _fileSystem.Path.GetDirectoryName(bundleInput.BundleFile) ?? Directory.GetCurrentDirectory(); + + // Validate all referenced files exist and check for duplicates + var fileNamesInThisBundle = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var entry in bundledData.Entries) + { + // Track file names for duplicate detection + if (!string.IsNullOrWhiteSpace(entry.File?.Name)) + { + var fileName = entry.File.Name; + + // Check for duplicates within the same bundle + if (!fileNamesInThisBundle.Add(fileName)) + { + collector.EmitWarning(bundleInput.BundleFile, $"Changelog file '{fileName}' appears multiple times in the same bundle"); + } + + // Track across bundles + if (!seenFileNames.TryGetValue(fileName, out var bundleList)) + { + bundleList = []; + seenFileNames[fileName] = bundleList; + } + bundleList.Add(bundleInput.BundleFile); + } + + // If entry has resolved data, validate it + if (!string.IsNullOrWhiteSpace(entry.Title) && !string.IsNullOrWhiteSpace(entry.Type)) + { + // Validate required fields in resolved entry + if (string.IsNullOrWhiteSpace(entry.Title)) + { + collector.EmitError(bundleInput.BundleFile, $"Entry in bundle is missing required field: title"); + return false; + } + + if (string.IsNullOrWhiteSpace(entry.Type)) + { + collector.EmitError(bundleInput.BundleFile, $"Entry in bundle is missing required field: type"); + return false; + } + + if (entry.Products == null || entry.Products.Count == 0) + { + collector.EmitError(bundleInput.BundleFile, $"Entry '{entry.Title}' in bundle is missing required field: products"); + return false; + } + + // Track PRs for duplicate detection + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + var normalizedPr = NormalizePrForComparison(entry.Pr, null, null); + if (!seenPrs.TryGetValue(normalizedPr, out var prBundleList)) + { + prBundleList = []; + seenPrs[normalizedPr] = prBundleList; + } + prBundleList.Add(bundleInput.BundleFile); + } + } + else + { + // Entry only has file reference - validate file exists + if (string.IsNullOrWhiteSpace(entry.File?.Name)) + { + collector.EmitError(bundleInput.BundleFile, "Entry in bundle is missing required field: file.name"); + return false; + } + + if (string.IsNullOrWhiteSpace(entry.File.Checksum)) + { + collector.EmitError(bundleInput.BundleFile, $"Entry for file '{entry.File.Name}' in bundle is missing required field: file.checksum"); + return false; + } + + var filePath = _fileSystem.Path.Combine(bundleDirectory, entry.File.Name); + if (!_fileSystem.File.Exists(filePath)) + { + collector.EmitError(bundleInput.BundleFile, $"Referenced changelog file '{entry.File.Name}' does not exist at path: {filePath}"); + return false; + } + + // Validate the changelog file can be deserialized + try + { + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + var checksum = ComputeSha1(fileContent); + if (checksum != entry.File.Checksum) + { + collector.EmitWarning(bundleInput.BundleFile, $"Checksum mismatch for file {entry.File.Name}. Expected {entry.File.Checksum}, got {checksum}"); + } + + // Deserialize YAML (skip comment lines) to validate structure + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + + // Normalize "version:" to "target:" in products section + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + var entryData = deserializer.Deserialize(normalizedYaml); + if (entryData == null) + { + collector.EmitError(bundleInput.BundleFile, $"Failed to deserialize changelog file '{entry.File.Name}'"); + return false; + } + + // Validate required fields in changelog file + if (string.IsNullOrWhiteSpace(entryData.Title)) + { + collector.EmitError(filePath, "Changelog file is missing required field: title"); + return false; + } + + if (string.IsNullOrWhiteSpace(entryData.Type)) + { + collector.EmitError(filePath, "Changelog file is missing required field: type"); + return false; + } + + if (entryData.Products == null || entryData.Products.Count == 0) + { + collector.EmitError(filePath, "Changelog file is missing required field: products"); + return false; + } + + // Track PRs for duplicate detection + if (!string.IsNullOrWhiteSpace(entryData.Pr)) + { + var normalizedPr = NormalizePrForComparison(entryData.Pr, null, null); + if (!seenPrs.TryGetValue(normalizedPr, out var prBundleList2)) + { + prBundleList2 = []; + seenPrs[normalizedPr] = prBundleList2; + } + prBundleList2.Add(bundleInput.BundleFile); + } + } + catch (YamlException yamlEx) + { + collector.EmitError(filePath, $"Failed to parse changelog file: {yamlEx.Message}", yamlEx); + return false; + } + } + } + + bundleDataList.Add((bundledData, bundleInput, bundleDirectory)); + } + + // Check for duplicate file names across bundles + foreach (var (fileName, bundleFiles) in seenFileNames.Where(kvp => kvp.Value.Count > 1)) + { + var uniqueBundles = bundleFiles.Distinct().ToList(); + if (uniqueBundles.Count > 1) + { + collector.EmitWarning(string.Empty, $"Changelog file '{fileName}' appears in multiple bundles: {string.Join(", ", uniqueBundles)}"); + } + } + + // Check for duplicate PRs + foreach (var (pr, bundleFiles) in seenPrs.Where(kvp => kvp.Value.Count > 1)) + { + var uniqueBundles = bundleFiles.Distinct().ToList(); + if (uniqueBundles.Count > 1) + { + collector.EmitWarning(string.Empty, $"PR '{pr}' appears in multiple bundles: {string.Join(", ", uniqueBundles)}"); + } + } + + // If validation found errors, stop before merging + if (collector.Errors > 0) + { + return false; + } + + // Merge phase: Now that validation passed, load and merge all bundles + var allResolvedEntries = new List<(ChangelogData entry, string repo)>(); + var allProducts = new HashSet<(string product, string target)>(); + + foreach (var (bundledData, bundleInput, bundleDirectory) in bundleDataList) + { + // Collect products from this bundle + foreach (var product in bundledData.Products) + { + var target = product.Target ?? string.Empty; + _ = allProducts.Add((product.Product, target)); + } + + var repo = bundleInput.Repo ?? defaultRepo; + + // Resolve entries + foreach (var entry in bundledData.Entries) + { + ChangelogData? entryData = null; + + // If entry has resolved data, use it + if (!string.IsNullOrWhiteSpace(entry.Title) && !string.IsNullOrWhiteSpace(entry.Type)) + { + entryData = new ChangelogData + { + Title = entry.Title, + Type = entry.Type, + Subtype = entry.Subtype, + Description = entry.Description, + Impact = entry.Impact, + Action = entry.Action, + FeatureId = entry.FeatureId, + Highlight = entry.Highlight, + Pr = entry.Pr, + Products = entry.Products ?? [], + Areas = entry.Areas, + Issues = entry.Issues + }; + } + else + { + // Load from file (already validated to exist) + var filePath = _fileSystem.Path.Combine(bundleDirectory, entry.File.Name); + var fileContent = await _fileSystem.File.ReadAllTextAsync(filePath, ctx); + + // Deserialize YAML (skip comment lines) + var yamlLines = fileContent.Split('\n'); + var yamlWithoutComments = string.Join('\n', yamlLines.Where(line => !line.TrimStart().StartsWith('#'))); + + // Normalize "version:" to "target:" in products section + var normalizedYaml = VersionToTargetRegex().Replace(yamlWithoutComments, "$1target:"); + + entryData = deserializer.Deserialize(normalizedYaml); + } + + if (entryData != null) + { + allResolvedEntries.Add((entryData, repo)); + } + } + } + + if (allResolvedEntries.Count == 0) + { + collector.EmitError(string.Empty, "No changelog entries to render"); + return false; + } + + // Determine output directory + var outputDir = input.Output ?? Directory.GetCurrentDirectory(); + if (!_fileSystem.Directory.Exists(outputDir)) + { + _ = _fileSystem.Directory.CreateDirectory(outputDir); + } + + // Extract version from products (use first product's target if available, or "unknown") + var version = allProducts.Count > 0 + ? allProducts.OrderBy(p => p.product).ThenBy(p => p.target).First().target + : "unknown"; + + if (string.IsNullOrWhiteSpace(version)) + { + version = "unknown"; + } + + // Warn if --title was not provided and version defaults to "unknown" + if (string.IsNullOrWhiteSpace(input.Title) && version == "unknown") + { + collector.EmitWarning(string.Empty, "No --title option provided and bundle files do not contain 'target' values. Output folder and markdown titles will default to 'unknown'. Consider using --title to specify a custom title."); + } + + // Group entries by type (kind) + var entriesByType = allResolvedEntries.Select(e => e.entry).GroupBy(e => e.Type).ToDictionary(g => g.Key, g => g.ToList()); + + // Use title from input or default to version + var title = input.Title ?? version; + + // Render markdown files (use first repo found, or default) + var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; + + // Render index.md (features, enhancements, bug fixes, security) + await RenderIndexMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + + // Render breaking-changes.md + await RenderBreakingChangesMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + + // Render deprecations.md + await RenderDeprecationsMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + + _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); + + return true; + } + catch (OperationCanceledException) + { + throw; + } + catch (IOException ioEx) + { + collector.EmitError(string.Empty, $"IO error rendering changelogs: {ioEx.Message}", ioEx); + return false; + } + catch (UnauthorizedAccessException uaEx) + { + collector.EmitError(string.Empty, $"Access denied rendering changelogs: {uaEx.Message}", uaEx); + return false; + } + catch (YamlException yamlEx) + { + collector.EmitError(string.Empty, $"YAML parsing error: {yamlEx.Message}", yamlEx); + return false; + } + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderIndexMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + Cancel ctx + ) + { + var features = entriesByType.GetValueOrDefault("feature", []); + var enhancements = entriesByType.GetValueOrDefault("enhancement", []); + var security = entriesByType.GetValueOrDefault("security", []); + var bugFixes = entriesByType.GetValueOrDefault("bug-fix", []); + + if (features.Count == 0 && enhancements.Count == 0 && security.Count == 0 && bugFixes.Count == 0) + { + // Still create file with "no changes" message + } + + var hasBreakingChanges = entriesByType.ContainsKey("breaking-change"); + var hasDeprecations = entriesByType.ContainsKey("deprecation"); + var hasKnownIssues = entriesByType.ContainsKey("known-issue"); + + var otherLinks = new List(); + if (hasKnownIssues) + { + otherLinks.Add("[Known issues](/release-notes/known-issues.md)"); + } + if (hasBreakingChanges) + { + otherLinks.Add($"[Breaking changes](/release-notes/breaking-changes.md#{repo}-{title}-breaking-changes)"); + } + if (hasDeprecations) + { + otherLinks.Add($"[Deprecations](/release-notes/deprecations.md#{repo}-{title}-deprecations)"); + } + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-release-notes-{title}]"); + + if (otherLinks.Count > 0) + { + var linksText = string.Join(" and ", otherLinks); + sb.AppendLine(CultureInfo.InvariantCulture, $"_{linksText}._"); + sb.AppendLine(); + } + + if (features.Count > 0 || enhancements.Count > 0 || security.Count > 0 || bugFixes.Count > 0) + { + if (features.Count > 0 || enhancements.Count > 0) + { + sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{title}-features-enhancements]"); + var combined = features.Concat(enhancements).ToList(); + RenderEntriesByArea(sb, combined, repo, subsections); + } + + if (security.Count > 0 || bugFixes.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{title}-fixes]"); + var combined = security.Concat(bugFixes).ToList(); + RenderEntriesByArea(sb, combined, repo, subsections); + } + } + else + { + sb.AppendLine("_No new features, enhancements, or fixes._"); + } + + var indexPath = _fileSystem.Path.Combine(outputDir, title, "index.md"); + var indexDir = _fileSystem.Path.GetDirectoryName(indexPath); + if (!string.IsNullOrWhiteSpace(indexDir) && !_fileSystem.Directory.Exists(indexDir)) + { + _ = _fileSystem.Directory.CreateDirectory(indexDir); + } + + await _fileSystem.File.WriteAllTextAsync(indexPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderBreakingChangesMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + Cancel ctx + ) + { + var breakingChanges = entriesByType.GetValueOrDefault("breaking-change", []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{title}-breaking-changes]"); + + if (breakingChanges.Count > 0) + { + var groupedByArea = breakingChanges.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); + sb.AppendLine(entry.Description ?? "% Describe the functionality that changed"); + sb.AppendLine(); + sb.Append("For more information, check "); + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.Append(FormatPrLink(entry.Pr, repo)); + } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo)); + } + } + sb.AppendLine("."); + sb.AppendLine(); + + if (!string.IsNullOrWhiteSpace(entry.Impact)) + { + sb.AppendLine("**Impact**
" + entry.Impact); + } + else + { + sb.AppendLine("% **Impact**
_Add a description of the impact_"); + } + + sb.AppendLine(); + + if (!string.IsNullOrWhiteSpace(entry.Action)) + { + sb.AppendLine("**Action**
" + entry.Action); + } + else + { + sb.AppendLine("% **Action**
_Add a description of the what action to take_"); + } + + sb.AppendLine("::::"); + } + } + } + else + { + sb.AppendLine("_No breaking changes._"); + } + + var breakingPath = _fileSystem.Path.Combine(outputDir, title, "breaking-changes.md"); + var breakingDir = _fileSystem.Path.GetDirectoryName(breakingPath); + if (!string.IsNullOrWhiteSpace(breakingDir) && !_fileSystem.Directory.Exists(breakingDir)) + { + _ = _fileSystem.Directory.CreateDirectory(breakingDir); + } + + await _fileSystem.File.WriteAllTextAsync(breakingPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderDeprecationsMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + Cancel ctx + ) + { + var deprecations = entriesByType.GetValueOrDefault("deprecation", []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{title}-deprecations]"); + + if (deprecations.Count > 0) + { + var groupedByArea = deprecations.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); + sb.AppendLine(entry.Description ?? "% Describe the functionality that was deprecated"); + sb.AppendLine(); + sb.Append("For more information, check "); + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.Append(FormatPrLink(entry.Pr, repo)); + } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo)); + } + } + sb.AppendLine("."); + sb.AppendLine(); + + if (!string.IsNullOrWhiteSpace(entry.Impact)) + { + sb.AppendLine("**Impact**
" + entry.Impact); + } + else + { + sb.AppendLine("% **Impact**
_Add a description of the impact_"); + } + + sb.AppendLine(); + + if (!string.IsNullOrWhiteSpace(entry.Action)) + { + sb.AppendLine("**Action**
" + entry.Action); + } + else + { + sb.AppendLine("% **Action**
_Add a description of the what action to take_"); + } + + sb.AppendLine("::::"); + } + } + } + else + { + sb.AppendLine("_No deprecations._"); + } + + var deprecationsPath = _fileSystem.Path.Combine(outputDir, title, "deprecations.md"); + var deprecationsDir = _fileSystem.Path.GetDirectoryName(deprecationsPath); + if (!string.IsNullOrWhiteSpace(deprecationsDir) && !_fileSystem.Directory.Exists(deprecationsDir)) + { + _ = _fileSystem.Directory.CreateDirectory(deprecationsDir); + } + + await _fileSystem.File.WriteAllTextAsync(deprecationsPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections) + { + var groupedByArea = entries.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + sb.Append("* "); + sb.Append(Beautify(entry.Title)); + sb.Append(' '); + + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.Append(FormatPrLink(entry.Pr, repo)); + sb.Append(' '); + } + + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(FormatIssueLink(issue, repo)); + sb.Append(' '); + } + } + + if (!string.IsNullOrWhiteSpace(entry.Description)) + { + sb.AppendLine(); + var indented = Indent(entry.Description); + sb.AppendLine(indented); + } + else + { + sb.AppendLine(); + } + } + } + } + + private static string GetComponent(ChangelogData entry) + { + // Map areas (list) to component (string) - use first area or empty string + if (entry.Areas != null && entry.Areas.Count > 0) + { + return entry.Areas[0]; + } + return string.Empty; + } + + private static string FormatAreaHeader(string area) + { + // Capitalize first letter and replace hyphens with spaces + if (string.IsNullOrWhiteSpace(area)) + return string.Empty; + + var result = char.ToUpperInvariant(area[0]) + area.Substring(1); + return result.Replace("-", " "); + } + + private static string Beautify(string text) + { + if (string.IsNullOrWhiteSpace(text)) + return string.Empty; + + // Capitalize first letter and ensure ends with period + var result = char.ToUpperInvariant(text[0]) + text.Substring(1); + if (!result.EndsWith('.')) + { + result += "."; + } + return result; + } + + private static string Indent(string text) + { + // Indent each line with two spaces + var lines = text.Split('\n'); + return string.Join("\n", lines.Select(line => " " + line)); + } + + [GeneratedRegex(@"\d+$", RegexOptions.None)] + private static partial Regex PrNumberRegex(); + + [GeneratedRegex(@"\d+$", RegexOptions.None)] + private static partial Regex IssueNumberRegex(); + + private static string FormatPrLink(string pr, string repo) + { + // Extract PR number + var match = PrNumberRegex().Match(pr); + var prNumber = match.Success ? match.Value : pr; + + // Format as markdown link + if (pr.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + return $"[#{prNumber}]({pr})"; + } + else + { + var url = $"https://github.com/elastic/{repo}/pull/{prNumber}"; + return $"[#{prNumber}]({url})"; + } + } + + private static string FormatIssueLink(string issue, string repo) + { + // Extract issue number + var match = IssueNumberRegex().Match(issue); + var issueNumber = match.Success ? match.Value : issue; + + // Format as markdown link + if (issue.StartsWith("http", StringComparison.OrdinalIgnoreCase)) + { + return $"[#{issueNumber}]({issue})"; + } + else + { + var url = $"https://github.com/elastic/{repo}/issues/{issueNumber}"; + return $"[#{issueNumber}]({url})"; + } + } } diff --git a/src/tooling/docs-builder/Arguments/BundleInputParser.cs b/src/tooling/docs-builder/Arguments/BundleInputParser.cs new file mode 100644 index 000000000..f97e7011b --- /dev/null +++ b/src/tooling/docs-builder/Arguments/BundleInputParser.cs @@ -0,0 +1,51 @@ +// Licensed to Elasticsearch B.V under one or more agreements. +// Elasticsearch B.V licenses this file to you under the Apache 2.0 License. +// See the LICENSE file in the project root for more information + +using ConsoleAppFramework; +using Elastic.Documentation.Services.Changelog; + +namespace Documentation.Builder.Arguments; + +/// +/// Parser for bundle input format: "bundle-file-path, changelog-file-path, repo" +/// Only bundle-file-path is required. +/// Can be specified multiple times. +/// +[AttributeUsage(AttributeTargets.Parameter)] +public class BundleInputParserAttribute : Attribute, IArgumentParser> +{ + public static bool TryParse(ReadOnlySpan s, out List result) + { + result = []; + + // Split by comma to get parts + var parts = s.ToString().Split(',', StringSplitOptions.TrimEntries); + + if (parts.Length == 0 || string.IsNullOrWhiteSpace(parts[0])) + { + return false; + } + + var bundleInput = new BundleInput + { + BundleFile = parts[0] + }; + + // Directory is optional (second part) + if (parts.Length > 1 && !string.IsNullOrWhiteSpace(parts[1])) + { + bundleInput.Directory = parts[1]; + } + + // Repo is optional (third part) + if (parts.Length > 2 && !string.IsNullOrWhiteSpace(parts[2])) + { + bundleInput.Repo = parts[2]; + } + + result.Add(bundleInput); + return true; + } +} + diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index ff73e51a0..ba2d263d7 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -24,7 +24,7 @@ IConfigurationContext configurationContext [Command("")] public Task Default() { - collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n\nRun 'changelog add --help' or 'changelog bundle --help' for usage information."); + collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n - 'changelog render': Render a bundled changelog to markdown files\n\nRun 'changelog add --help', 'changelog bundle --help', or 'changelog render --help' for usage information."); return Task.FromResult(1); } @@ -154,5 +154,41 @@ async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, s return await serviceInvoker.InvokeAsync(ctx); } + + /// + /// Render bundled changelog(s) to markdown files + /// + /// Required: Bundle input(s) in format "bundle-file-path, changelog-file-path, repo". Can be specified multiple times. Only bundle-file-path is required. + /// Optional: Output directory for rendered markdown files. Defaults to current directory + /// Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle + /// Optional: Group entries by area/component in subsections. Defaults to false + /// + [Command("render")] + public async Task Render( + [BundleInputParser] List input, + string? output = null, + string? title = null, + bool subsections = false, + Cancel ctx = default + ) + { + await using var serviceInvoker = new ServiceInvoker(collector); + + var service = new ChangelogService(logFactory, configurationContext, null); + + var renderInput = new ChangelogRenderInput + { + Bundles = input ?? [], + Output = output, + Title = title, + Subsections = subsections + }; + + serviceInvoker.AddCommand(service, renderInput, + async static (s, collector, state, ctx) => await s.RenderChangelogs(collector, state, ctx) + ); + + return await serviceInvoker.InvokeAsync(ctx); + } } diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index a6de682e6..660dc826c 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -1695,5 +1695,691 @@ public async Task BundleChangelogs_WithResolveAndInvalidProduct_ReturnsError() _collector.Errors.Should().BeGreaterThan(0); _collector.Diagnostics.Should().Contain(d => d.Message.Contains("product entry missing required field: product")); } + + [Fact] + public async Task RenderChangelogs_WithValidBundle_CreatesMarkdownFiles() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This is a test feature + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(bundleFile)!); + + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("## 9.2.0"); + indexContent.Should().Contain("Test feature"); + } + + [Fact] + public async Task RenderChangelogs_WithMultipleBundles_MergesAndRenders() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create test changelog files + var changelog1 = """ + title: First feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second feature + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/200 + """; + + var file1 = fileSystem.Path.Combine(changelogDir1, "1755268130-first.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir2, "1755268140-second.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("First feature"); + indexContent.Should().Contain("Second feature"); + } + + [Fact] + public async Task RenderChangelogs_WithMissingBundleFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var missingBundle = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.yaml"); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = missingBundle }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("Bundle file does not exist")); + } + + [Fact] + public async Task RenderChangelogs_WithMissingChangelogFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: nonexistent.yaml + checksum: abc123 + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = bundleDir }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("does not exist")); + } + + [Fact] + public async Task RenderChangelogs_WithInvalidBundleStructure_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + invalid_field: value + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field") || d.Message.Contains("Failed to deserialize")); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicateFileName_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create same changelog file in both directories + var changelog = """ + title: Duplicate feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var fileName = "1755268130-duplicate.yaml"; + var file1 = fileSystem.Path.Combine(changelogDir1, fileName); + var file2 = fileSystem.Path.Combine(changelogDir2, fileName); + await fileSystem.File.WriteAllTextAsync(file1, changelog, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears in multiple bundles")); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicateFileNameInSameBundle_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog file + var changelog = """ + title: Test feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var fileName = "1755268130-test-feature.yaml"; + var changelogFile = fileSystem.Path.Combine(changelogDir, fileName); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Create bundle file with the same file referenced twice + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + - file: + name: {fileName} + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundleFile, Directory = changelogDir } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears multiple times in the same bundle") && + d.File == bundleFile); + } + + [Fact] + public async Task RenderChangelogs_WithDuplicatePr_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir1 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var changelogDir2 = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir1); + fileSystem.Directory.CreateDirectory(changelogDir2); + + // Create changelog files with same PR + var changelog1 = """ + title: First feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + var changelog2 = """ + title: Second feature + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var file1 = fileSystem.Path.Combine(changelogDir1, "1755268130-first.yaml"); + var file2 = fileSystem.Path.Combine(changelogDir2, "1755268140-second.yaml"); + await fileSystem.File.WriteAllTextAsync(file1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(file2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle files + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundle1 = fileSystem.Path.Combine(bundleDir, "bundle1.yaml"); + var bundleContent1 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundle1, bundleContent1, TestContext.Current.CancellationToken); + + var bundle2 = fileSystem.Path.Combine(bundleDir, "bundle2.yaml"); + var bundleContent2 = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundle2, bundleContent2, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [ + new BundleInput { BundleFile = bundle1, Directory = changelogDir1 }, + new BundleInput { BundleFile = bundle2, Directory = changelogDir2 } + ], + Output = outputDir + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("appears in multiple bundles")); + } + + [Fact] + public async Task RenderChangelogs_WithInvalidChangelogFile_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create invalid changelog file (missing required fields) + var invalidChangelog = """ + title: Invalid feature + # Missing type and products + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-invalid.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, invalidChangelog, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-invalid.yaml + checksum: {ComputeSha1(invalidChangelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()) + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeFalse(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("missing required field")); + } + + [Fact] + public async Task RenderChangelogs_WithResolvedEntry_ValidatesAndRenders() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = """ + products: + - product: elasticsearch + target: 9.2.0 + entries: + - type: feature + title: Resolved feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("Resolved feature"); + } + + [Fact] + public async Task RenderChangelogs_WithoutTitleAndNoTargets_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without target + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file without target + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir + // Note: Title is not set + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No --title option provided") && + d.Message.Contains("default to 'unknown'")); + } + + [Fact] + public async Task RenderChangelogs_WithTitleAndNoTargets_NoWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create test changelog file without target + var changelog1 = """ + title: Test feature + type: feature + products: + - product: elasticsearch + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-feature.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog1, TestContext.Current.CancellationToken); + + // Create bundle file without target + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + entries: + - file: + name: 1755268130-test-feature.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" // Title is provided + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + // Should not have warning about missing title + _collector.Diagnostics.Should().NotContain(d => + d.Severity == Severity.Warning && + d.Message.Contains("No --title option provided")); + } + + private static string ComputeSha1(string content) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(content); + var hash = System.Security.Cryptography.SHA1.HashData(bytes); + return System.Convert.ToHexString(hash).ToLowerInvariant(); + } } From f4b7250489733174172b9e46a0aae264f4bc242f Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 31 Dec 2025 13:33:17 -0800 Subject: [PATCH 18/54] Add option to hide private links when rendering changelogs (#2408) --- docs/contribute/changelog.md | 1 + .../Changelog/ChangelogRenderInput.cs | 1 + .../ChangelogService.cs | 174 ++++++++++++++---- .../docs-builder/Commands/ChangelogCommand.cs | 5 +- 4 files changed, 143 insertions(+), 38 deletions(-) diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 4effb6a3b..f85ce839e 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -258,6 +258,7 @@ Options: --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] --subsections Optional: Group entries by area/component in subsections. Defaults to false + --hide-private-links Optional: Hide private links by commenting them out in markdown output. Defaults to false ``` Before you can use this command you must create changelog files and collect them into bundles. diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs index e1b134295..b0c596464 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs @@ -13,5 +13,6 @@ public class ChangelogRenderInput public string? Output { get; set; } public string? Title { get; set; } public bool Subsections { get; set; } + public bool HidePrivateLinks { get; set; } } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index bd35b04de..eeb5374ff 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1354,13 +1354,13 @@ Cancel ctx var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; // Render index.md (features, enhancements, bug fixes, security) - await RenderIndexMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + await RenderIndexMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); // Render breaking-changes.md - await RenderBreakingChangesMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + await RenderBreakingChangesMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); // Render deprecations.md - await RenderDeprecationsMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, ctx); + await RenderDeprecationsMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); @@ -1397,6 +1397,7 @@ private async Task RenderIndexMarkdown( List entries, Dictionary> entriesByType, bool subsections, + bool hidePrivateLinks, Cancel ctx ) { @@ -1444,7 +1445,7 @@ Cancel ctx { sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{title}-features-enhancements]"); var combined = features.Concat(enhancements).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); } if (security.Count > 0 || bugFixes.Count > 0) @@ -1452,7 +1453,7 @@ Cancel ctx sb.AppendLine(); sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{title}-fixes]"); var combined = security.Concat(bugFixes).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); } } else @@ -1480,6 +1481,7 @@ private async Task RenderBreakingChangesMarkdown( List entries, Dictionary> entriesByType, bool subsections, + bool hidePrivateLinks, Cancel ctx ) { @@ -1506,20 +1508,39 @@ Cancel ctx sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); sb.AppendLine(entry.Description ?? "% Describe the functionality that changed"); sb.AppendLine(); - sb.Append("For more information, check "); - if (!string.IsNullOrWhiteSpace(entry.Pr)) + if (hidePrivateLinks) { - sb.Append(FormatPrLink(entry.Pr, repo)); + // When hiding private links, put them on separate lines as comments + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.AppendLine(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); + } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("For more information, check the pull request or issue above."); } - if (entry.Issues != null && entry.Issues.Count > 0) + else { - foreach (var issue in entry.Issues) + sb.Append("For more information, check "); + if (!string.IsNullOrWhiteSpace(entry.Pr)) { - sb.Append(' '); - sb.Append(FormatIssueLink(issue, repo)); + sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("."); } - sb.AppendLine("."); sb.AppendLine(); if (!string.IsNullOrWhiteSpace(entry.Impact)) @@ -1571,6 +1592,7 @@ private async Task RenderDeprecationsMarkdown( List entries, Dictionary> entriesByType, bool subsections, + bool hidePrivateLinks, Cancel ctx ) { @@ -1597,20 +1619,39 @@ Cancel ctx sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); sb.AppendLine(entry.Description ?? "% Describe the functionality that was deprecated"); sb.AppendLine(); - sb.Append("For more information, check "); - if (!string.IsNullOrWhiteSpace(entry.Pr)) + if (hidePrivateLinks) { - sb.Append(FormatPrLink(entry.Pr, repo)); + // When hiding private links, put them on separate lines as comments + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.AppendLine(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); + } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("For more information, check the pull request or issue above."); } - if (entry.Issues != null && entry.Issues.Count > 0) + else { - foreach (var issue in entry.Issues) + sb.Append("For more information, check "); + if (!string.IsNullOrWhiteSpace(entry.Pr)) { - sb.Append(' '); - sb.Append(FormatIssueLink(issue, repo)); + sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); } + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("."); } - sb.AppendLine("."); sb.AppendLine(); if (!string.IsNullOrWhiteSpace(entry.Impact)) @@ -1653,7 +1694,7 @@ Cancel ctx } [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] - private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections) + private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections, bool hidePrivateLinks) { var groupedByArea = entries.GroupBy(e => GetComponent(e)).ToList(); foreach (var areaGroup in groupedByArea) @@ -1669,26 +1710,67 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, { sb.Append("* "); sb.Append(Beautify(entry.Title)); - sb.Append(' '); - if (!string.IsNullOrWhiteSpace(entry.Pr)) + var hasCommentedLinks = false; + if (hidePrivateLinks) { - sb.Append(FormatPrLink(entry.Pr, repo)); - sb.Append(' '); - } + // When hiding private links, put them on separate lines as comments with proper indentation + if (!string.IsNullOrWhiteSpace(entry.Pr)) + { + sb.AppendLine(); + sb.Append(" "); + sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); + hasCommentedLinks = true; + } + + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.AppendLine(); + sb.Append(" "); + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + hasCommentedLinks = true; + } + } - if (entry.Issues != null && entry.Issues.Count > 0) + // Add newline after the last link if there are commented links + if (hasCommentedLinks) + { + sb.AppendLine(); + } + } + else { - foreach (var issue in entry.Issues) + sb.Append(' '); + if (!string.IsNullOrWhiteSpace(entry.Pr)) { - sb.Append(FormatIssueLink(issue, repo)); + sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); sb.Append(' '); } + + if (entry.Issues != null && entry.Issues.Count > 0) + { + foreach (var issue in entry.Issues) + { + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + sb.Append(' '); + } + } } if (!string.IsNullOrWhiteSpace(entry.Description)) { - sb.AppendLine(); + // Add blank line before description + // When hidePrivateLinks is true and links exist, add an indented blank line + if (hidePrivateLinks && hasCommentedLinks) + { + sb.AppendLine(" "); + } + else + { + sb.AppendLine(); + } var indented = Indent(entry.Description); sb.AppendLine(indented); } @@ -1747,40 +1829,58 @@ private static string Indent(string text) [GeneratedRegex(@"\d+$", RegexOptions.None)] private static partial Regex IssueNumberRegex(); - private static string FormatPrLink(string pr, string repo) + private static string FormatPrLink(string pr, string repo, bool hidePrivateLinks) { // Extract PR number var match = PrNumberRegex().Match(pr); var prNumber = match.Success ? match.Value : pr; // Format as markdown link + string link; if (pr.StartsWith("http", StringComparison.OrdinalIgnoreCase)) { - return $"[#{prNumber}]({pr})"; + link = $"[#{prNumber}]({pr})"; } else { var url = $"https://github.com/elastic/{repo}/pull/{prNumber}"; - return $"[#{prNumber}]({url})"; + link = $"[#{prNumber}]({url})"; } + + // Comment out link if hiding private links + if (hidePrivateLinks) + { + return $"% {link}"; + } + + return link; } - private static string FormatIssueLink(string issue, string repo) + private static string FormatIssueLink(string issue, string repo, bool hidePrivateLinks) { // Extract issue number var match = IssueNumberRegex().Match(issue); var issueNumber = match.Success ? match.Value : issue; // Format as markdown link + string link; if (issue.StartsWith("http", StringComparison.OrdinalIgnoreCase)) { - return $"[#{issueNumber}]({issue})"; + link = $"[#{issueNumber}]({issue})"; } else { var url = $"https://github.com/elastic/{repo}/issues/{issueNumber}"; - return $"[#{issueNumber}]({url})"; + link = $"[#{issueNumber}]({url})"; } + + // Comment out link if hiding private links + if (hidePrivateLinks) + { + return $"% {link}"; + } + + return link; } } diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index ba2d263d7..e1f2b5591 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -162,6 +162,7 @@ async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, s /// Optional: Output directory for rendered markdown files. Defaults to current directory /// Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle /// Optional: Group entries by area/component in subsections. Defaults to false + /// Optional: Hide private links by commenting them out in markdown output. Defaults to false /// [Command("render")] public async Task Render( @@ -169,6 +170,7 @@ public async Task Render( string? output = null, string? title = null, bool subsections = false, + bool hidePrivateLinks = false, Cancel ctx = default ) { @@ -181,7 +183,8 @@ public async Task Render( Bundles = input ?? [], Output = output, Title = title, - Subsections = subsections + Subsections = subsections, + HidePrivateLinks = hidePrivateLinks }; serviceInvoker.AddCommand(service, renderInput, From 0645a4c98b26baf584f76370394ccd79e23ac6cb Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 31 Dec 2025 14:01:18 -0800 Subject: [PATCH 19/54] Fix changelog render when title has spaces (#2409) --- .../ChangelogService.cs | 40 +++++++++++++------ 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index eeb5374ff..b4357b33b 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1349,18 +1349,20 @@ Cancel ctx // Use title from input or default to version var title = input.Title ?? version; + // Convert title to slug format for folder names and anchors (lowercase, dashes instead of spaces) + var titleSlug = TitleToSlug(title); // Render markdown files (use first repo found, or default) var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; // Render index.md (features, enhancements, bug fixes, security) - await RenderIndexMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); // Render breaking-changes.md - await RenderBreakingChangesMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); // Render deprecations.md - await RenderDeprecationsMarkdown(collector, outputDir, title, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); @@ -1393,6 +1395,7 @@ private async Task RenderIndexMarkdown( IDiagnosticsCollector collector, string outputDir, string title, + string titleSlug, string repo, List entries, Dictionary> entriesByType, @@ -1422,15 +1425,15 @@ Cancel ctx } if (hasBreakingChanges) { - otherLinks.Add($"[Breaking changes](/release-notes/breaking-changes.md#{repo}-{title}-breaking-changes)"); + otherLinks.Add($"[Breaking changes](/release-notes/breaking-changes.md#{repo}-{titleSlug}-breaking-changes)"); } if (hasDeprecations) { - otherLinks.Add($"[Deprecations](/release-notes/deprecations.md#{repo}-{title}-deprecations)"); + otherLinks.Add($"[Deprecations](/release-notes/deprecations.md#{repo}-{titleSlug}-deprecations)"); } var sb = new StringBuilder(); - sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-release-notes-{title}]"); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-release-notes-{titleSlug}]"); if (otherLinks.Count > 0) { @@ -1443,7 +1446,7 @@ Cancel ctx { if (features.Count > 0 || enhancements.Count > 0) { - sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{title}-features-enhancements]"); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{titleSlug}-features-enhancements]"); var combined = features.Concat(enhancements).ToList(); RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); } @@ -1451,7 +1454,7 @@ Cancel ctx if (security.Count > 0 || bugFixes.Count > 0) { sb.AppendLine(); - sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{title}-fixes]"); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{titleSlug}-fixes]"); var combined = security.Concat(bugFixes).ToList(); RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); } @@ -1461,7 +1464,7 @@ Cancel ctx sb.AppendLine("_No new features, enhancements, or fixes._"); } - var indexPath = _fileSystem.Path.Combine(outputDir, title, "index.md"); + var indexPath = _fileSystem.Path.Combine(outputDir, titleSlug, "index.md"); var indexDir = _fileSystem.Path.GetDirectoryName(indexPath); if (!string.IsNullOrWhiteSpace(indexDir) && !_fileSystem.Directory.Exists(indexDir)) { @@ -1477,6 +1480,7 @@ private async Task RenderBreakingChangesMarkdown( IDiagnosticsCollector collector, string outputDir, string title, + string titleSlug, string repo, List entries, Dictionary> entriesByType, @@ -1488,7 +1492,7 @@ Cancel ctx var breakingChanges = entriesByType.GetValueOrDefault("breaking-change", []); var sb = new StringBuilder(); - sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{title}-breaking-changes]"); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-breaking-changes]"); if (breakingChanges.Count > 0) { @@ -1572,7 +1576,7 @@ Cancel ctx sb.AppendLine("_No breaking changes._"); } - var breakingPath = _fileSystem.Path.Combine(outputDir, title, "breaking-changes.md"); + var breakingPath = _fileSystem.Path.Combine(outputDir, titleSlug, "breaking-changes.md"); var breakingDir = _fileSystem.Path.GetDirectoryName(breakingPath); if (!string.IsNullOrWhiteSpace(breakingDir) && !_fileSystem.Directory.Exists(breakingDir)) { @@ -1588,6 +1592,7 @@ private async Task RenderDeprecationsMarkdown( IDiagnosticsCollector collector, string outputDir, string title, + string titleSlug, string repo, List entries, Dictionary> entriesByType, @@ -1599,7 +1604,7 @@ Cancel ctx var deprecations = entriesByType.GetValueOrDefault("deprecation", []); var sb = new StringBuilder(); - sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{title}-deprecations]"); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-deprecations]"); if (deprecations.Count > 0) { @@ -1683,7 +1688,7 @@ Cancel ctx sb.AppendLine("_No deprecations._"); } - var deprecationsPath = _fileSystem.Path.Combine(outputDir, title, "deprecations.md"); + var deprecationsPath = _fileSystem.Path.Combine(outputDir, titleSlug, "deprecations.md"); var deprecationsDir = _fileSystem.Path.GetDirectoryName(deprecationsPath); if (!string.IsNullOrWhiteSpace(deprecationsDir) && !_fileSystem.Directory.Exists(deprecationsDir)) { @@ -1816,6 +1821,15 @@ private static string Beautify(string text) return result; } + private static string TitleToSlug(string title) + { + if (string.IsNullOrWhiteSpace(title)) + return string.Empty; + + // Convert to lowercase and replace spaces with dashes + return title.ToLowerInvariant().Replace(' ', '-'); + } + private static string Indent(string text) { // Indent each line with two spaces From dcaa3a6a590c0114626f6a5b0325bf1f4de6c762 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:30:30 -0800 Subject: [PATCH 20/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- .../ChangelogService.cs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index b4357b33b..502809e26 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1108,18 +1108,6 @@ Cancel ctx // If entry has resolved data, validate it if (!string.IsNullOrWhiteSpace(entry.Title) && !string.IsNullOrWhiteSpace(entry.Type)) { - // Validate required fields in resolved entry - if (string.IsNullOrWhiteSpace(entry.Title)) - { - collector.EmitError(bundleInput.BundleFile, $"Entry in bundle is missing required field: title"); - return false; - } - - if (string.IsNullOrWhiteSpace(entry.Type)) - { - collector.EmitError(bundleInput.BundleFile, $"Entry in bundle is missing required field: type"); - return false; - } if (entry.Products == null || entry.Products.Count == 0) { From 4217f5801bd91bacd02924bbee6d4312cb0e7f05 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:00 -0800 Subject: [PATCH 21/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- .../Elastic.Documentation.Services/ChangelogService.cs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 502809e26..00debfcab 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1801,7 +1801,9 @@ private static string Beautify(string text) return string.Empty; // Capitalize first letter and ensure ends with period - var result = char.ToUpperInvariant(text[0]) + text.Substring(1); + var result = text.Length < 2 + ? char.ToUpperInvariant(text[0]).ToString() + : char.ToUpperInvariant(text[0]) + text[1..]; if (!result.EndsWith('.')) { result += "."; From 558ebe07b6987647aa512be7826145488390ea11 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:10 -0800 Subject: [PATCH 22/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- .../Elastic.Documentation.Services/ChangelogService.cs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 00debfcab..1e2be8070 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1791,7 +1791,9 @@ private static string FormatAreaHeader(string area) if (string.IsNullOrWhiteSpace(area)) return string.Empty; - var result = char.ToUpperInvariant(area[0]) + area.Substring(1); + var result = area.Length < 2 + ? char.ToUpperInvariant(area[0]).ToString() + : char.ToUpperInvariant(area[0]) + area[1..]; return result.Replace("-", " "); } From c9fb894aec5b562aee38138fc104f6893c9451d7 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:26 -0800 Subject: [PATCH 23/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- .../Elastic.Documentation.Services/ChangelogService.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 1e2be8070..3330f6433 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -531,9 +531,9 @@ Cancel ctx var filterCount = 0; if (input.All) filterCount++; - if (input.InputProducts != null && input.InputProducts.Count > 0) + if (input.InputProducts is { Count: > 0 }) filterCount++; - if (input.Prs != null && input.Prs.Length > 0) + if (input.Prs is { Length: > 0 }) filterCount++; if (!string.IsNullOrWhiteSpace(input.PrsFile)) filterCount++; From 676e101b19f901691e4fc1ceae20d7459d3fa4b1 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:36 -0800 Subject: [PATCH 24/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- src/services/Elastic.Documentation.Services/ChangelogService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 3330f6433..662943ef5 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -566,7 +566,7 @@ Cancel ctx .Where(p => !string.IsNullOrWhiteSpace(p)) .ToArray(); - if (input.Prs != null && input.Prs.Length > 0) + if (input.Prs is { Length: > 0 }) { foreach (var pr in input.Prs) { From b31964358810509dccbb4c41d80a171b7ab9dfe5 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:43 -0800 Subject: [PATCH 25/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- src/services/Elastic.Documentation.Services/ChangelogService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 662943ef5..334579d45 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -579,7 +579,7 @@ Cancel ctx _ = prsToMatch.Add(pr); } } - else if (input.Prs != null && input.Prs.Length > 0) + else if (input.Prs is { Length: > 0 }) { foreach (var pr in input.Prs) { From 249db6c46055962b426317db8b9a8f6c0e2cb06e Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:31:51 -0800 Subject: [PATCH 26/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- src/services/Elastic.Documentation.Services/ChangelogService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 334579d45..8edd50412 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -589,7 +589,7 @@ Cancel ctx // Build set of product/version combinations to filter by var productsToMatch = new HashSet<(string product, string version)>(); - if (input.InputProducts != null && input.InputProducts.Count > 0) + if (input.InputProducts is { Count: > 0 }) { foreach (var product in input.InputProducts) { From 31fca084830387a3d14607c1c404068ba966914f Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 09:32:03 -0800 Subject: [PATCH 27/54] Update src/services/Elastic.Documentation.Services/ChangelogService.cs Co-authored-by: Felipe Cotti --- src/services/Elastic.Documentation.Services/ChangelogService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 8edd50412..16d483ae0 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -768,7 +768,7 @@ Cancel ctx // Set products array in output // If --output-products was specified, use those values (override any from changelogs) - if (input.OutputProducts != null && input.OutputProducts.Count > 0) + if (input.OutputProducts is { Count: > 0 }) { bundledData.Products = input.OutputProducts .OrderBy(p => p.Product) From 405131926fb4f002257ff5e3267994d1e68d0a54 Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 09:39:44 -0800 Subject: [PATCH 28/54] Consolidate regex patterns --- .../Elastic.Documentation.Services/ChangelogService.cs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 16d483ae0..8328690c1 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1830,15 +1830,12 @@ private static string Indent(string text) } [GeneratedRegex(@"\d+$", RegexOptions.None)] - private static partial Regex PrNumberRegex(); - - [GeneratedRegex(@"\d+$", RegexOptions.None)] - private static partial Regex IssueNumberRegex(); + private static partial Regex TrailingNumberRegex(); private static string FormatPrLink(string pr, string repo, bool hidePrivateLinks) { // Extract PR number - var match = PrNumberRegex().Match(pr); + var match = TrailingNumberRegex().Match(pr); var prNumber = match.Success ? match.Value : pr; // Format as markdown link @@ -1865,7 +1862,7 @@ private static string FormatPrLink(string pr, string repo, bool hidePrivateLinks private static string FormatIssueLink(string issue, string repo, bool hidePrivateLinks) { // Extract issue number - var match = IssueNumberRegex().Match(issue); + var match = TrailingNumberRegex().Match(issue); var issueNumber = match.Success ? match.Value : issue; // Format as markdown link From 188348c56b1aaa565cb788f40680364c61063c6d Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 09:43:56 -0800 Subject: [PATCH 29/54] Static class for changelog entry types --- .../ChangelogService.cs | 29 +++++++++++++------ 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 8328690c1..845620090 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -27,6 +27,17 @@ public partial class ChangelogService( private readonly IFileSystem _fileSystem = new FileSystem(); private readonly IGitHubPrService? _githubPrService = githubPrService; + private static class ChangelogEntryTypes + { + public const string Feature = "feature"; + public const string Enhancement = "enhancement"; + public const string Security = "security"; + public const string BugFix = "bug-fix"; + public const string BreakingChange = "breaking-change"; + public const string Deprecation = "deprecation"; + public const string KnownIssue = "known-issue"; + } + public async Task CreateChangelog( IDiagnosticsCollector collector, ChangelogInput input, @@ -1392,19 +1403,19 @@ private async Task RenderIndexMarkdown( Cancel ctx ) { - var features = entriesByType.GetValueOrDefault("feature", []); - var enhancements = entriesByType.GetValueOrDefault("enhancement", []); - var security = entriesByType.GetValueOrDefault("security", []); - var bugFixes = entriesByType.GetValueOrDefault("bug-fix", []); + var features = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Feature, []); + var enhancements = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Enhancement, []); + var security = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Security, []); + var bugFixes = entriesByType.GetValueOrDefault(ChangelogEntryTypes.BugFix, []); if (features.Count == 0 && enhancements.Count == 0 && security.Count == 0 && bugFixes.Count == 0) { // Still create file with "no changes" message } - var hasBreakingChanges = entriesByType.ContainsKey("breaking-change"); - var hasDeprecations = entriesByType.ContainsKey("deprecation"); - var hasKnownIssues = entriesByType.ContainsKey("known-issue"); + var hasBreakingChanges = entriesByType.ContainsKey(ChangelogEntryTypes.BreakingChange); + var hasDeprecations = entriesByType.ContainsKey(ChangelogEntryTypes.Deprecation); + var hasKnownIssues = entriesByType.ContainsKey(ChangelogEntryTypes.KnownIssue); var otherLinks = new List(); if (hasKnownIssues) @@ -1477,7 +1488,7 @@ private async Task RenderBreakingChangesMarkdown( Cancel ctx ) { - var breakingChanges = entriesByType.GetValueOrDefault("breaking-change", []); + var breakingChanges = entriesByType.GetValueOrDefault(ChangelogEntryTypes.BreakingChange, []); var sb = new StringBuilder(); sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-breaking-changes]"); @@ -1589,7 +1600,7 @@ private async Task RenderDeprecationsMarkdown( Cancel ctx ) { - var deprecations = entriesByType.GetValueOrDefault("deprecation", []); + var deprecations = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Deprecation, []); var sb = new StringBuilder(); sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-deprecations]"); From 752db4c3443c4ca92d7f0d9300a5c168bd6be7e0 Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 10:16:29 -0800 Subject: [PATCH 30/54] Add docs for changelog render --- docs/_docset.yml | 1 + docs/cli/release/index.md | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/_docset.yml b/docs/_docset.yml index 6eab69091..a85eb37c8 100644 --- a/docs/_docset.yml +++ b/docs/_docset.yml @@ -159,6 +159,7 @@ toc: - file: index.md - file: changelog-add.md - file: changelog-bundle.md + - file: changelog-render.md - folder: mcp children: - file: index.md diff --git a/docs/cli/release/index.md b/docs/cli/release/index.md index 5a2829c29..3e8b9bb23 100644 --- a/docs/cli/release/index.md +++ b/docs/cli/release/index.md @@ -10,3 +10,4 @@ These commands are associated with product release documentation. - [changelog add](changelog-add.md) - Create a changelog file - [changelog bundle](changelog-bundle.md) - Create a changelog bundle file +- [changelog render](changelog-render.md) - Generate markdown output from changelog bundle files From 3aadae52320eeab7202e2396ad5d2df1d352d1ed Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 10:21:13 -0800 Subject: [PATCH 31/54] Add changelog-render --- docs/cli/release/changelog-render.md | 44 ++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 docs/cli/release/changelog-render.md diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md new file mode 100644 index 000000000..604222492 --- /dev/null +++ b/docs/cli/release/changelog-render.md @@ -0,0 +1,44 @@ +# changelog render + +Generate markdown files from changelog bundle files. + +To create the bundle files, use [](/cli/release/changelog-bundle.md). + +For details and examples, go to [](/contribute/changelog.md). + +## Usage + +```sh +docs-builder changelog render [options...] [-h|--help] +``` + +## Options + + --input > Required: Bundle input(s) in format "bundle-file-path, changelog-file-path, repo". Can be specified multiple times. Only bundle-file-path is required. [Required] + --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] + --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] + --subsections Optional: Group entries by area/component in subsections. Defaults to false + --hide-private-links Optional: Hide private links by commenting them out in markdown output. Defaults to false + +`--input >` +: One or more bundle input files. +: Each item can be specified as "bundle-file-path, changelog-file-path, repo" to accommodate files coming from multiple locations. +: Only `bundle-file-path` is required. + +`--output ` +: Optional: The output directory for rendered markdown files. +: Defaults to current directory. + +`--title ` +: Optional: The title to use for section headers, directories, and anchors in output markdown files. +: Defaults to the version in the first bundle. +: If the string contains spaces, they are replaced with dashes when used in directory names and anchors. + +`--subsections` +: Optional: Group entries by area in subsections. +: Defaults to false. + +`--hide-private-links` +: Optional: Hide private links by commenting them out in markdown output. +: This option is useful when rendering changelog bundles in private repositories. +: Defaults to false. From 0a2bb0695b6fa8658e4835ed2cf0df115380b63e Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 10:34:03 -0800 Subject: [PATCH 32/54] Augment docs --- docs/cli/release/changelog-render.md | 2 ++ docs/contribute/changelog.md | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md index 604222492..f62e961de 100644 --- a/docs/cli/release/changelog-render.md +++ b/docs/cli/release/changelog-render.md @@ -23,7 +23,9 @@ docs-builder changelog render [options...] [-h|--help] `--input >` : One or more bundle input files. : Each item can be specified as "bundle-file-path, changelog-file-path, repo" to accommodate files coming from multiple locations. +: For example, `--input "./changelog-bundle.yaml,./changelogs,elasticsearch"`. : Only `bundle-file-path` is required. +: Use `repo` if your changelogs do not contain full URLs for the pull requests or issues; otherwise they will be incorrectly derived with "elastic/elastic" in the URL by default. `--output ` : Optional: The output directory for rendered markdown files. diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index f85ce839e..bcb0b9e96 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -290,7 +290,7 @@ docs-builder changelog render \ --subsections \ <4> ``` -1. Provide information about the changelog bundle. The format is `", , "`. Only the `` is required. The `` is useful if the changelogs are not in the default directory and are not resolved within the bundle. The `` is useful for PR or issue link checks. You can specify `--input` multiple times to merge multiple bundles. +1. Provide information about the changelog bundle. The format is `", , "`. Only the `` is required. The `` is useful if the changelogs are not in the default directory and are not resolved within the bundle. The `` is necessary if your changelogs do not contain full URLs for the pull requests or issues. You can specify `--input` multiple times to merge multiple bundles. 2. The `--title` value is used for an output folder name and for section titles in the markdown files. If you omit `--title` and the first bundle contains a product `target` value, that value is used. Otherwise, if none of the bundles have product `target` fields, the title defaults to "unknown". 3. By default the command creates the output files in the current directory. 4. By default the changelog areas are not displayed in the output. Add `--subsections` to group changelog details by their `areas`. @@ -311,3 +311,5 @@ For example, the `index.md` output file contains information derived from the ch **Aggregations** * Break on FieldData when building global ordinals. [#108875](https://github.com/elastic/elastic/pull/108875) ``` + +To comment out the pull request and issue links, for example if they relate to a private repository, use the `--hide-private-links` option. From 659c090e58d106b98933919eaa4d40e395190464 Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 2 Jan 2026 10:37:15 -0800 Subject: [PATCH 33/54] Fix docs typo --- docs/cli/release/changelog-render.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md index f62e961de..f394a82f7 100644 --- a/docs/cli/release/changelog-render.md +++ b/docs/cli/release/changelog-render.md @@ -14,12 +14,6 @@ docs-builder changelog render [options...] [-h|--help] ## Options - --input > Required: Bundle input(s) in format "bundle-file-path, changelog-file-path, repo". Can be specified multiple times. Only bundle-file-path is required. [Required] - --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] - --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] - --subsections Optional: Group entries by area/component in subsections. Defaults to false - --hide-private-links Optional: Hide private links by commenting them out in markdown output. Defaults to false - `--input >` : One or more bundle input files. : Each item can be specified as "bundle-file-path, changelog-file-path, repo" to accommodate files coming from multiple locations. From cbeb702c010911ddd4c44f40d7677bb57249726f Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 2 Jan 2026 16:35:59 -0800 Subject: [PATCH 34/54] Combine docs-builder bundle prs and prs-file options (#2411) --- docs/cli/release/changelog-bundle.md | 8 +- docs/contribute/changelog.md | 33 ++-- .../Changelog/ChangelogBundleInput.cs | 1 - .../ChangelogService.cs | 173 ++++++++++++++---- .../docs-builder/Commands/ChangelogCommand.cs | 32 +++- .../ChangelogServiceTests.cs | 99 +++++++++- 6 files changed, 280 insertions(+), 66 deletions(-) diff --git a/docs/cli/release/changelog-bundle.md b/docs/cli/release/changelog-bundle.md index 379ae2b11..223800c4c 100644 --- a/docs/cli/release/changelog-bundle.md +++ b/docs/cli/release/changelog-bundle.md @@ -38,10 +38,10 @@ docs-builder changelog bundle [options...] [-h|--help] : Optional: The GitHub repository owner, which is required when pull requests are specified as numbers. `--prs ` -: Filter by pull request URLs or numbers (can specify multiple times). - -`--prs-file ` -: The path to a newline-delimited file containing PR URLs or numbers. +: Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. +: Each occurrence can be either comma-separated PRs (e.g., `--prs "https://github.com/owner/repo/pull/123,6789"`) or a file path (e.g., `--prs /path/to/file.txt`). +: When specifying PRs directly, provide comma-separated values. +: When specifying a file path, provide a single value that points to a newline-delimited file. `--repo ` : Optional: The GitHub repository name, which is required when PRs are specified as numbers. diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index bcb0b9e96..9d4cf2b4c 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -107,8 +107,7 @@ Options: --input-products ?> Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") [Default: null] --output-products ?> Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. [Default: null] --resolve Copy the contents of each changelog file into the entries array - --prs Filter by pull request URLs or numbers (can specify multiple times) [Default: null] - --prs-file Path to a newline-delimited file containing PR URLs or numbers [Default: null] + --prs Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. [Default: null] --owner Optional: GitHub repository owner (used when PRs are specified as numbers) [Default: null] --repo Optional: GitHub repository name (used when PRs are specified as numbers) [Default: null] ``` @@ -124,12 +123,11 @@ You can specify only one of the following filter options: : For example, `"cloud-serverless 2025-12-02, cloud-serverless 2025-12-06"`. `--prs` -: Include changelogs for the specified pull request URLs or numbers. -: Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). - -`--prs-file` -: Include changelogs for the pull request URLs or numbers specified in a newline-delimited file. -: Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`, a short format (such as `owner/repo#123`) or just a number (in which case you must also provide `--owner` and `--repo` options). +: Include changelogs for the specified pull request URLs or numbers, or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. +: Each occurrence can be either comma-separated PRs (e.g., `--prs "https://github.com/owner/repo/pull/123,12345"`) or a file path (e.g., `--prs /path/to/file.txt`). +: When specifying PRs directly, provide comma-separated values. +: When specifying a file path, provide a single value that points to a newline-delimited file. The file should contain one PR URL or number per line. +: Pull requests can be identified by a full URL (such as `https://github.com/owner/repo/pull/123`), a short format (such as `owner/repo#123`), or just a number (in which case you must also provide `--owner` and `--repo` options). By default, the output file contains only the changelog file names and checksums. You can optionally use the `--resolve` command option to pull all of the content from each changelog into the bundle. @@ -171,13 +169,13 @@ If you add the `--resolve` option, the contents of each changelog will be includ You can use the `--prs` option (with the `--repo` and `--owner` options if you provide only the PR numbers) to create a bundle of the changelogs that relate to those pull requests: ```sh -docs-builder changelog bundle --prs 108875,135873,136886 \ <1> +docs-builder changelog bundle --prs "108875,135873,136886" \ <1> --repo elasticsearch \ <2> --owner elastic \ <3> --output-products "elasticsearch 9.2.2" <4> ``` -1. The list of pull request numbers to seek. +1. The comma-separated list of pull request numbers to seek. You can also specify multiple `--prs` options, each with comma-separated PRs or a file path. 2. The repository in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. 3. The owner in the pull request URLs. This option is not required if you specify the short or full PR URLs in the `--prs` option. 4. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. @@ -213,17 +211,20 @@ https://github.com/elastic/elasticsearch/pull/136886 https://github.com/elastic/elasticsearch/pull/137126 ``` -You can use the `--prs-file` option to create a bundle of the changelogs that relate to those pull requests: +You can use the `--prs` option with a file path to create a bundle of the changelogs that relate to those pull requests. You can also combine multiple `--prs` options: ```sh -./docs-builder changelog bundle --prs-file test/9.2.2.txt \ <1> +./docs-builder changelog bundle \ + --prs "https://github.com/elastic/elasticsearch/pull/108875,135873" \ <1> + --prs test/9.2.2.txt \ <2> --output-products "elasticsearch 9.2.2" <3> - --resolve <3> + --resolve <4> ``` -1. The path for the file that lists the pull requests. If the file contains only PR numbers, you must add `--repo` and `--owner` command options. -2. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. -3. Optionally include the contents of each changelog in the output file. +1. Comma-separated list of pull request URLs or numbers. +2. The path for the file that lists the pull requests. If the file contains only PR numbers, you must add `--repo` and `--owner` command options. +3. The product metadata for the bundle. If it is not provided, it will be derived from all the changelog product values. +4. Optionally include the contents of each changelog in the output file. If you have changelog files that reference those pull requests, the command creates a file like this: diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs index 00c762f68..8a79bd5f5 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogBundleInput.cs @@ -16,7 +16,6 @@ public class ChangelogBundleInput public List? OutputProducts { get; set; } public bool Resolve { get; set; } public string[]? Prs { get; set; } - public string? PrsFile { get; set; } public string? Owner { get; set; } public string? Repo { get; set; } } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 845620090..c67f8b632 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -546,55 +546,145 @@ Cancel ctx filterCount++; if (input.Prs is { Length: > 0 }) filterCount++; - if (!string.IsNullOrWhiteSpace(input.PrsFile)) - filterCount++; if (filterCount == 0) { - collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --input-products, --prs, or --prs-file"); + collector.EmitError(string.Empty, "At least one filter option must be specified: --all, --input-products, or --prs"); return false; } if (filterCount > 1) { - collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --input-products, --prs, or --prs-file"); + collector.EmitError(string.Empty, "Only one filter option can be specified at a time: --all, --input-products, or --prs"); return false; } - // Load PRs from file if specified + // Load PRs - check if --prs contains a file path or a list of PRs var prsToMatch = new HashSet(StringComparer.OrdinalIgnoreCase); - if (!string.IsNullOrWhiteSpace(input.PrsFile)) + if (input.Prs is { Length: > 0 }) { - if (!_fileSystem.File.Exists(input.PrsFile)) + // If there's exactly one value, check if it's a file path + if (input.Prs.Length == 1) { - collector.EmitError(input.PrsFile, "PRs file does not exist"); - return false; - } + var singleValue = input.Prs[0]; - var prsFileContent = await _fileSystem.File.ReadAllTextAsync(input.PrsFile, ctx); - var prsFromFile = prsFileContent - .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) - .Where(p => !string.IsNullOrWhiteSpace(p)) - .ToArray(); + // Check if it's a URL - URLs should always be treated as PRs, not file paths + var isUrl = singleValue.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + singleValue.StartsWith("https://", StringComparison.OrdinalIgnoreCase); - if (input.Prs is { Length: > 0 }) - { - foreach (var pr in input.Prs) + if (isUrl) { - _ = prsToMatch.Add(pr); + // Treat as PR identifier + _ = prsToMatch.Add(singleValue); } - } + else if (_fileSystem.File.Exists(singleValue)) + { + // File exists, read PRs from it + var prsFileContent = await _fileSystem.File.ReadAllTextAsync(singleValue, ctx); + var prsFromFile = prsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToArray(); + + foreach (var pr in prsFromFile) + { + _ = prsToMatch.Add(pr); + } + } + else + { + // Check if it looks like a file path (contains path separators or has extension) + var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || + singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(singleValue); - foreach (var pr in prsFromFile) - { - _ = prsToMatch.Add(pr); + if (looksLikeFilePath) + { + // File path doesn't exist - if there are no other PRs, return error; otherwise emit warning + if (prsToMatch.Count == 0) + { + collector.EmitError(singleValue, $"File does not exist: {singleValue}"); + return false; + } + else + { + collector.EmitWarning(singleValue, $"File does not exist, skipping: {singleValue}"); + } + } + else + { + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(singleValue); + } + } } - } - else if (input.Prs is { Length: > 0 }) - { - foreach (var pr in input.Prs) + else { - _ = prsToMatch.Add(pr); + // Multiple values - process all values first, then check for errors + var nonExistentFiles = new List(); + foreach (var value in input.Prs) + { + // Check if it's a URL - URLs should always be treated as PRs + var isUrl = value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + value.StartsWith("https://", StringComparison.OrdinalIgnoreCase); + + if (isUrl) + { + // Treat as PR identifier + _ = prsToMatch.Add(value); + } + else if (_fileSystem.File.Exists(value)) + { + // File exists, read PRs from it + var prsFileContent = await _fileSystem.File.ReadAllTextAsync(value, ctx); + var prsFromFile = prsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToArray(); + + foreach (var pr in prsFromFile) + { + _ = prsToMatch.Add(pr); + } + } + else + { + // Check if it looks like a file path + var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || + value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(value); + + if (looksLikeFilePath) + { + // Track non-existent files to check later + nonExistentFiles.Add(value); + } + else + { + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(value); + } + } + } + + // After processing all values, handle non-existent files + if (nonExistentFiles.Count > 0) + { + // If there are no valid PRs and we have non-existent files, return error + if (prsToMatch.Count == 0) + { + collector.EmitError(nonExistentFiles[0], $"File does not exist: {nonExistentFiles[0]}"); + return false; + } + else + { + // Emit warnings for non-existent files since we have valid PRs + foreach (var file in nonExistentFiles) + { + collector.EmitWarning(file, $"File does not exist, skipping: {file}"); + } + } + } } } @@ -766,12 +856,6 @@ Cancel ctx } } - if (changelogEntries.Count == 0) - { - collector.EmitError(string.Empty, "No changelog entries matched the filter criteria"); - return false; - } - _logger.LogInformation("Found {Count} matching changelog entries", changelogEntries.Count); // Build bundled data @@ -805,7 +889,7 @@ Cancel ctx .ToList(); } // Otherwise, extract unique products/versions from changelog entries - else + else if (changelogEntries.Count > 0) { var productVersions = new HashSet<(string product, string version)>(); foreach (var (data, _, _, _) in changelogEntries) @@ -827,6 +911,18 @@ Cancel ctx }) .ToList(); } + else + { + // No entries and no products specified - initialize to empty list + bundledData.Products = []; + } + + // Check if we should allow empty result + if (changelogEntries.Count == 0) + { + collector.EmitError(string.Empty, "No changelog entries matched the filter criteria"); + return false; + } // Check for products with same product ID but different versions var productsByProductId = bundledData.Products.GroupBy(p => p.Product, StringComparer.OrdinalIgnoreCase) @@ -840,7 +936,12 @@ Cancel ctx } // Build entries - if (input.Resolve) + if (changelogEntries.Count == 0) + { + // No entries - initialize to empty list + bundledData.Entries = []; + } + else if (input.Resolve) { // When resolving, include changelog contents and validate required fields var resolvedEntries = new List(); diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index e1f2b5591..a20970970 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -110,8 +110,7 @@ async static (s, collector, state, ctx) => await s.CreateChangelog(collector, st /// Filter by products in format "product target lifecycle, ..." (e.g., "cloud-serverless 2025-12-02, cloud-serverless 2025-12-06") /// Explicitly set the products array in the output file in format "product target lifecycle, ...". Overrides any values from changelogs. /// Copy the contents of each changelog file into the entries array - /// Filter by pull request URLs or numbers (can specify multiple times) - /// Path to a newline-delimited file containing PR URLs or numbers + /// Filter by pull request URLs or numbers (comma-separated), or a path to a newline-delimited file containing PR URLs or numbers. Can be specified multiple times. /// Optional: GitHub repository owner (used when PRs are specified as numbers) /// Optional: GitHub repository name (used when PRs are specified as numbers) /// @@ -124,7 +123,6 @@ public async Task Bundle( [ProductInfoParser] List? outputProducts = null, bool resolve = false, string[]? prs = null, - string? prsFile = null, string? owner = null, string? repo = null, Cancel ctx = default @@ -134,6 +132,31 @@ public async Task Bundle( var service = new ChangelogService(logFactory, configurationContext, null); + // Process each --prs occurrence: each can be comma-separated PRs or a file path + var allPrs = new List(); + if (prs is { Length: > 0 }) + { + foreach (var prsValue in prs) + { + if (string.IsNullOrWhiteSpace(prsValue)) + continue; + + // Check if it contains commas - if so, split and add each as a PR + if (prsValue.Contains(',')) + { + var commaSeparatedPrs = prsValue + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)); + allPrs.AddRange(commaSeparatedPrs); + } + else + { + // Single value - pass as-is (will be handled by service layer as file path or PR) + allPrs.Add(prsValue); + } + } + } + var input = new ChangelogBundleInput { Directory = directory ?? Directory.GetCurrentDirectory(), @@ -142,8 +165,7 @@ public async Task Bundle( InputProducts = inputProducts, OutputProducts = outputProducts, Resolve = resolve, - Prs = prs, - PrsFile = prsFile, + Prs = allPrs.Count > 0 ? allPrs.ToArray() : null, Owner = owner, Repo = repo }; diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 660dc826c..2b0f22464 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -1087,7 +1087,7 @@ await fileSystem.File.WriteAllTextAsync(prsFile, """ var input = new ChangelogBundleInput { Directory = changelogDir, - PrsFile = prsFile, + Prs = new[] { prsFile }, Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1342,7 +1342,7 @@ public async Task BundleChangelogs_WithMultipleProducts_CreatesValidBundle() } [Fact] - public async Task BundleChangelogs_WithInvalidPrsFile_ReturnsError() + public async Task BundleChangelogs_WithNonExistentFileAsPrs_ReturnsError() { // Arrange var service = new ChangelogService(_loggerFactory, _configurationContext, null); @@ -1350,10 +1350,12 @@ public async Task BundleChangelogs_WithInvalidPrsFile_ReturnsError() var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); fileSystem.Directory.CreateDirectory(changelogDir); + // Provide a non-existent file path - should return error since there are no other PRs + var nonexistentFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"); var input = new ChangelogBundleInput { Directory = changelogDir, - PrsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"), + Prs = new[] { nonexistentFile }, Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; @@ -1361,9 +1363,98 @@ public async Task BundleChangelogs_WithInvalidPrsFile_ReturnsError() var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); // Assert + // File doesn't exist and there are no other PRs, so should return error result.Should().BeFalse(); _collector.Errors.Should().BeGreaterThan(0); - _collector.Diagnostics.Should().Contain(d => d.Message.Contains("PRs file does not exist")); + _collector.Diagnostics.Should().Contain(d => d.Message.Contains("File does not exist")); + } + + [Fact] + public async Task BundleChangelogs_WithUrlAsPrs_TreatsAsPrIdentifier() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create a changelog file for a specific PR + var changelog = """ + title: Test PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/123 + """; + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Provide a URL - should be treated as a PR identifier, not a file path + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = new[] { "https://github.com/elastic/elasticsearch/pull/123" }, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + // URL should be treated as PR identifier and match the changelog + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().Be(0); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-test-pr.yaml"); + } + + [Fact] + public async Task BundleChangelogs_WithNonExistentFileAndOtherPrs_EmitsWarning() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create a changelog file for a specific PR + var changelog = """ + title: Test PR + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/123 + """; + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-test-pr.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Provide a non-existent file path along with a valid PR - should emit warning for file but continue with PR + var nonexistentFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "nonexistent.txt"); + var input = new ChangelogBundleInput + { + Directory = changelogDir, + Prs = new[] { nonexistentFile, "https://github.com/elastic/elasticsearch/pull/123" }, + Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") + }; + + // Act + var result = await service.BundleChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + // Should succeed because we have a valid PR, but should emit warning for the non-existent file + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + // Check that we have a warning about the file not existing + var fileWarning = _collector.Diagnostics.FirstOrDefault(d => d.Message.Contains("File does not exist, skipping")); + fileWarning.Should().NotBeNull("Expected a warning about the non-existent file being skipped"); + + var bundleContent = await fileSystem.File.ReadAllTextAsync(input.Output!, TestContext.Current.CancellationToken); + bundleContent.Should().Contain("name: 1755268130-test-pr.yaml"); } [Fact] From 236a8197b9e6c72c850d4b4938d0470dce759dca Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 5 Jan 2026 07:40:43 -0800 Subject: [PATCH 35/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/tooling/docs-builder/Commands/ChangelogCommand.cs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index a20970970..b6bf11aa9 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -136,11 +136,8 @@ public async Task Bundle( var allPrs = new List(); if (prs is { Length: > 0 }) { - foreach (var prsValue in prs) + foreach (var prsValue in prs.Where(p => !string.IsNullOrWhiteSpace(p))) { - if (string.IsNullOrWhiteSpace(prsValue)) - continue; - // Check if it contains commas - if so, split and add each as a PR if (prsValue.Contains(',')) { From dc8f8c5aa7f8e8900a1e0ef8dc7a7c927a52cb5e Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 5 Jan 2026 13:45:22 -0800 Subject: [PATCH 36/54] Add docs-builder changelog render --hide-features option (#2412) --- docs/cli/release/changelog-render.md | 9 +- docs/contribute/changelog.md | 6 +- .../Changelog/ChangelogRenderInput.cs | 1 + .../ChangelogService.cs | 178 ++++++- .../docs-builder/Commands/ChangelogCommand.cs | 32 +- .../ChangelogServiceTests.cs | 442 ++++++++++++++++++ 6 files changed, 657 insertions(+), 11 deletions(-) diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md index f394a82f7..128778251 100644 --- a/docs/cli/release/changelog-render.md +++ b/docs/cli/release/changelog-render.md @@ -35,6 +35,13 @@ docs-builder changelog render [options...] [-h|--help] : Defaults to false. `--hide-private-links` -: Optional: Hide private links by commenting them out in markdown output. +: Optional: Hide private links by commenting them out in the markdown output. : This option is useful when rendering changelog bundles in private repositories. : Defaults to false. + +`--hide-features ` +: Optional: Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. +: Each occurrence can be either comma-separated feature IDs (e.g., `--hide-features "feature:new-search-api,feature:enhanced-analytics"`) or a file path (e.g., `--hide-features /path/to/file.txt`). +: When specifying feature IDs directly, provide comma-separated values. +: When specifying a file path, provide a single value that points to a newline-delimited file. The file should contain one feature ID per line. +: Entries with matching `feature-id` values will be commented out in the markdown output and a warning will be emitted. diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 9d4cf2b4c..723cd1e35 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -259,7 +259,8 @@ Options: --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] --subsections Optional: Group entries by area/component in subsections. Defaults to false - --hide-private-links Optional: Hide private links by commenting them out in markdown output. Defaults to false + --hide-private-links Optional: Hide private links by commenting them out in the markdown output. Defaults to false + --hide-features Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. [Default: null] ``` Before you can use this command you must create changelog files and collect them into bundles. @@ -314,3 +315,6 @@ For example, the `index.md` output file contains information derived from the ch ``` To comment out the pull request and issue links, for example if they relate to a private repository, use the `--hide-private-links` option. + +If you have changelogs with `feature-id` values and you want them to be omitted from the output, use the `--hide-features` option. +For more information, refer to [](/cli/release/changelog-render.md). diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs index b0c596464..2daa7d569 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs @@ -14,5 +14,6 @@ public class ChangelogRenderInput public string? Title { get; set; } public bool Subsections { get; set; } public bool HidePrivateLinks { get; set; } + public string[]? HideFeatures { get; set; } } diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index c67f8b632..dd5916418 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1452,17 +1452,131 @@ Cancel ctx // Convert title to slug format for folder names and anchors (lowercase, dashes instead of spaces) var titleSlug = TitleToSlug(title); + // Load feature IDs to hide - check if --hide-features contains a file path or a list of feature IDs + var featureIdsToHide = new HashSet(StringComparer.OrdinalIgnoreCase); + if (input.HideFeatures is { Length: > 0 }) + { + // If there's exactly one value, check if it's a file path + if (input.HideFeatures.Length == 1) + { + var singleValue = input.HideFeatures[0]; + + if (_fileSystem.File.Exists(singleValue)) + { + // File exists, read feature IDs from it + var featureIdsFileContent = await _fileSystem.File.ReadAllTextAsync(singleValue, ctx); + var featureIdsFromFile = featureIdsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)) + .ToArray(); + + foreach (var featureId in featureIdsFromFile) + { + _ = featureIdsToHide.Add(featureId); + } + } + else + { + // Check if it looks like a file path + var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || + singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(singleValue); + + if (looksLikeFilePath) + { + // File path doesn't exist + collector.EmitError(singleValue, $"File does not exist: {singleValue}"); + return false; + } + else + { + // Doesn't look like a file path, treat as feature ID + _ = featureIdsToHide.Add(singleValue); + } + } + } + else + { + // Multiple values - process all values first, then check for errors + var nonExistentFiles = new List(); + foreach (var value in input.HideFeatures) + { + if (_fileSystem.File.Exists(value)) + { + // File exists, read feature IDs from it + var featureIdsFileContent = await _fileSystem.File.ReadAllTextAsync(value, ctx); + var featureIdsFromFile = featureIdsFileContent + .Split('\n', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)) + .ToArray(); + + foreach (var featureId in featureIdsFromFile) + { + _ = featureIdsToHide.Add(featureId); + } + } + else + { + // Check if it looks like a file path + var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || + value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(value); + + if (looksLikeFilePath) + { + // Track non-existent files to check later + nonExistentFiles.Add(value); + } + else + { + // Doesn't look like a file path, treat as feature ID + _ = featureIdsToHide.Add(value); + } + } + } + + // Report errors for non-existent files + if (nonExistentFiles.Count > 0) + { + foreach (var filePath in nonExistentFiles) + { + collector.EmitError(filePath, $"File does not exist: {filePath}"); + } + return false; + } + } + } + + // Track hidden entries for warnings + var hiddenEntries = new List<(string title, string featureId)>(); + foreach (var (entry, _) in allResolvedEntries) + { + if (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) + { + hiddenEntries.Add((entry.Title ?? "Unknown", entry.FeatureId)); + } + } + + // Emit warnings for hidden entries + if (hiddenEntries.Count > 0) + { + foreach (var (entryTitle, featureId) in hiddenEntries) + { + collector.EmitWarning(string.Empty, $"Changelog entry '{entryTitle}' with feature-id '{featureId}' will be commented out in markdown output"); + } + } + // Render markdown files (use first repo found, or default) var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; // Render index.md (features, enhancements, bug fixes, security) - await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); // Render breaking-changes.md - await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); // Render deprecations.md - await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, ctx); + await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); @@ -1501,6 +1615,7 @@ private async Task RenderIndexMarkdown( Dictionary> entriesByType, bool subsections, bool hidePrivateLinks, + HashSet featureIdsToHide, Cancel ctx ) { @@ -1548,7 +1663,7 @@ Cancel ctx { sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{titleSlug}-features-enhancements]"); var combined = features.Concat(enhancements).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide); } if (security.Count > 0 || bugFixes.Count > 0) @@ -1556,7 +1671,7 @@ Cancel ctx sb.AppendLine(); sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{titleSlug}-fixes]"); var combined = security.Concat(bugFixes).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide); } } else @@ -1586,6 +1701,7 @@ private async Task RenderBreakingChangesMarkdown( Dictionary> entriesByType, bool subsections, bool hidePrivateLinks, + HashSet featureIdsToHide, Cancel ctx ) { @@ -1608,7 +1724,13 @@ Cancel ctx foreach (var entry in areaGroup) { + var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine(""); + } } } } @@ -1698,6 +1824,7 @@ private async Task RenderDeprecationsMarkdown( Dictionary> entriesByType, bool subsections, bool hidePrivateLinks, + HashSet featureIdsToHide, Cancel ctx ) { @@ -1720,7 +1847,13 @@ Cancel ctx foreach (var entry in areaGroup) { + var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine(""); + } } } } @@ -1799,7 +1936,7 @@ Cancel ctx } [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] - private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections, bool hidePrivateLinks) + private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide) { var groupedByArea = entries.GroupBy(e => GetComponent(e)).ToList(); foreach (var areaGroup in groupedByArea) @@ -1813,6 +1950,12 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, foreach (var entry in areaGroup) { + var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + + if (shouldHide) + { + sb.Append("% "); + } sb.Append("* "); sb.Append(Beautify(entry.Title)); @@ -1823,6 +1966,10 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, if (!string.IsNullOrWhiteSpace(entry.Pr)) { sb.AppendLine(); + if (shouldHide) + { + sb.Append("% "); + } sb.Append(" "); sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); hasCommentedLinks = true; @@ -1833,6 +1980,10 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, foreach (var issue in entry.Issues) { sb.AppendLine(); + if (shouldHide) + { + sb.Append("% "); + } sb.Append(" "); sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); hasCommentedLinks = true; @@ -1877,7 +2028,20 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, sb.AppendLine(); } var indented = Indent(entry.Description); - sb.AppendLine(indented); + if (shouldHide) + { + // Comment out each line of the description + var indentedLines = indented.Split('\n'); + foreach (var line in indentedLines) + { + sb.Append("% "); + sb.AppendLine(line); + } + } + else + { + sb.AppendLine(indented); + } } else { diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index b6bf11aa9..c50f1f0e0 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -181,7 +181,8 @@ async static (s, collector, state, ctx) => await s.BundleChangelogs(collector, s /// Optional: Output directory for rendered markdown files. Defaults to current directory /// Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle /// Optional: Group entries by area/component in subsections. Defaults to false - /// Optional: Hide private links by commenting them out in markdown output. Defaults to false + /// Optional: Hide private links by commenting them out in the markdown output. Defaults to false + /// Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. /// [Command("render")] public async Task Render( @@ -190,6 +191,7 @@ public async Task Render( string? title = null, bool subsections = false, bool hidePrivateLinks = false, + string[]? hideFeatures = null, Cancel ctx = default ) { @@ -197,13 +199,39 @@ public async Task Render( var service = new ChangelogService(logFactory, configurationContext, null); + // Process each --hide-features occurrence: each can be comma-separated feature IDs or a file path + var allFeatureIds = new List(); + if (hideFeatures is { Length: > 0 }) + { + foreach (var hideFeaturesValue in hideFeatures) + { + if (string.IsNullOrWhiteSpace(hideFeaturesValue)) + continue; + + // Check if it contains commas - if so, split and add each as a feature ID + if (hideFeaturesValue.Contains(',')) + { + var commaSeparatedFeatureIds = hideFeaturesValue + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(f => !string.IsNullOrWhiteSpace(f)); + allFeatureIds.AddRange(commaSeparatedFeatureIds); + } + else + { + // Single value - pass as-is (will be handled by service layer as file path or feature ID) + allFeatureIds.Add(hideFeaturesValue); + } + } + } + var renderInput = new ChangelogRenderInput { Bundles = input ?? [], Output = output, Title = title, Subsections = subsections, - HidePrivateLinks = hidePrivateLinks + HidePrivateLinks = hidePrivateLinks, + HideFeatures = allFeatureIds.Count > 0 ? allFeatureIds.ToArray() : null }; serviceInvoker.AddCommand(service, renderInput, diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 2b0f22464..7257f1fee 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -2466,6 +2466,448 @@ public async Task RenderChangelogs_WithTitleAndNoTargets_NoWarning() d.Message.Contains("No --title option provided")); } + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with feature-id + var changelog1 = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:hidden-api + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be hidden + """; + + // Create changelog without feature-id (should not be hidden) + var changelog2 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-api"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Hidden feature") && + d.Message.Contains("feature:hidden-api") && + d.Message.Contains("will be commented out")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Hidden entry should be commented out with % prefix + indexContent.Should().Contain("% * Hidden feature"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_BreakingChange_UsesBlockComments() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden breaking change + type: breaking-change + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:hidden-breaking + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This breaking change should be hidden + impact: Users will be affected + action: Update your code + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-breaking.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-breaking.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-breaking"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var breakingFile = fileSystem.Path.Combine(outputDir, "9.2.0", "breaking-changes.md"); + fileSystem.File.Exists(breakingFile).Should().BeTrue(); + + var breakingContent = await fileSystem.File.ReadAllTextAsync(breakingFile, TestContext.Current.CancellationToken); + // Should use block comments + breakingContent.Should().Contain(""); + breakingContent.Should().Contain("Hidden breaking change"); + // Entry should be between comment markers + var commentStart = breakingContent.IndexOf("", StringComparison.Ordinal); + commentStart.Should().BeLessThan(commentEnd); + breakingContent.Substring(commentStart, commentEnd - commentStart).Should().Contain("Hidden breaking change"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_Deprecation_UsesBlockComments() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden deprecation + type: deprecation + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:hidden-deprecation + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This deprecation should be hidden + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-deprecation.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-deprecation.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:hidden-deprecation"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var deprecationsFile = fileSystem.Path.Combine(outputDir, "9.2.0", "deprecations.md"); + fileSystem.File.Exists(deprecationsFile).Should().BeTrue(); + + var deprecationsContent = await fileSystem.File.ReadAllTextAsync(deprecationsFile, TestContext.Current.CancellationToken); + // Should use block comments + deprecationsContent.Should().Contain(""); + deprecationsContent.Should().Contain("Hidden deprecation"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CommaSeparated_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog1 = """ + title: First hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:first + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelog2 = """ + title: Second hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:second + pr: https://github.com/elastic/elasticsearch/pull/101 + """; + + var changelog3 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/102 + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-first.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-second.yaml"); + var changelogFile3 = fileSystem.Path.Combine(changelogDir, "1755268150-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile3, changelog3, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-first.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-second.yaml + checksum: {ComputeSha1(changelog2)} + - file: + name: 1755268150-visible.yaml + checksum: {ComputeSha1(changelog3)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:first", "feature:second"] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("% * First hidden feature"); + indexContent.Should().Contain("% * Second hidden feature"); + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_FromFile_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature_id: feature:from-file + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Create feature IDs file + var featureIdsFile = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "feature-ids.txt"); + fileSystem.Directory.CreateDirectory(fileSystem.Path.GetDirectoryName(featureIdsFile)!); + await fileSystem.File.WriteAllTextAsync(featureIdsFile, "feature:from-file\nfeature:another", TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = [featureIdsFile] + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + indexContent.Should().Contain("% * Hidden feature"); + } + + [Fact] + public async Task RenderChangelogs_WithHideFeatures_CaseInsensitive_MatchesFeatureIds() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + var changelog = """ + title: Hidden feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + feature_id: Feature:UpperCase + pr: https://github.com/elastic/elasticsearch/pull/100 + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-hidden.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-hidden.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + HideFeatures = ["feature:uppercase"] // Different case + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Should match case-insensitively + indexContent.Should().Contain("% * Hidden feature"); + } + private static string ComputeSha1(string content) { var bytes = System.Text.Encoding.UTF8.GetBytes(content); From ae7a00b9effe0575299052111fd40822924f3094 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 5 Jan 2026 15:34:50 -0800 Subject: [PATCH 37/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/tooling/docs-builder/Commands/ChangelogCommand.cs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index c50f1f0e0..fae44af60 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -9,6 +9,7 @@ using Elastic.Documentation.Services; using Elastic.Documentation.Services.Changelog; using Microsoft.Extensions.Logging; +using System.Linq; namespace Documentation.Builder.Commands; @@ -203,11 +204,8 @@ public async Task Render( var allFeatureIds = new List(); if (hideFeatures is { Length: > 0 }) { - foreach (var hideFeaturesValue in hideFeatures) + foreach (var hideFeaturesValue in hideFeatures.Where(v => !string.IsNullOrWhiteSpace(v))) { - if (string.IsNullOrWhiteSpace(hideFeaturesValue)) - continue; - // Check if it contains commas - if so, split and add each as a feature ID if (hideFeaturesValue.Contains(',')) { From 20dfba1f2c1719c08739bd4dd91752ec7ece0a91 Mon Sep 17 00:00:00 2001 From: lcawl Date: Tue, 6 Jan 2026 12:00:37 -0800 Subject: [PATCH 38/54] Fix failing test --- .../ChangelogService.cs | 41 +++++++++++++++++-- .../docs-builder/Commands/ChangelogCommand.cs | 2 +- .../ChangelogServiceTests.cs | 14 +++---- 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index dd5916418..5b6e74adb 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1072,6 +1072,9 @@ private static string ComputeSha1(string content) [GeneratedRegex(@"(\s+)version:", RegexOptions.Multiline)] private static partial Regex VersionToTargetRegex(); + [GeneratedRegex(@"(?:https?://)?(?:www\.)?github\.com/([^/]+)/([^/]+)/pull/(\d+)", RegexOptions.IgnoreCase)] + private static partial Regex GitHubPrUrlRegex(); + private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) { // Parse PR using the same logic as GitHubPrService.ParsePrUrl @@ -1081,16 +1084,34 @@ private static string NormalizePrForComparison(string pr, string? defaultOwner, if (pr.StartsWith("https://github.com/", StringComparison.OrdinalIgnoreCase) || pr.StartsWith("http://github.com/", StringComparison.OrdinalIgnoreCase)) { + // Use regex to parse URL more reliably + var match = GitHubPrUrlRegex().Match(pr); + if (match.Success) + { + var owner = match.Groups[1].Value; + var repo = match.Groups[2].Value; + var prPart = match.Groups[3].Value; + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo) && + int.TryParse(prPart, out var prNum)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + + // Fallback to URI parsing if regex fails try { var uri = new Uri(pr); var segments = uri.Segments; + // segments[0] is "/", segments[1] is "owner/", segments[2] is "repo/", segments[3] is "pull/", segments[4] is "123" if (segments.Length >= 5 && segments[3].Equals("pull/", StringComparison.OrdinalIgnoreCase)) { var owner = segments[1].TrimEnd('/'); var repo = segments[2].TrimEnd('/'); - var prNum = segments[4].Trim(); - return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + if (int.TryParse(segments[4], out var prNum)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } } } catch (UriFormatException) @@ -1103,7 +1124,21 @@ private static string NormalizePrForComparison(string pr, string? defaultOwner, var hashIndex = pr.LastIndexOf('#'); if (hashIndex > 0 && hashIndex < pr.Length - 1) { - return pr.ToLowerInvariant(); + var repoPart = pr[..hashIndex].Trim(); + var prPart = pr[(hashIndex + 1)..].Trim(); + if (int.TryParse(prPart, out var prNum)) + { + var repoParts = repoPart.Split('/'); + if (repoParts.Length == 2) + { + var owner = repoParts[0].Trim(); + var repo = repoParts[1].Trim(); + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo)) + { + return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); + } + } + } } // Handle just a PR number when owner/repo are provided diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index fae44af60..61f496195 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -2,6 +2,7 @@ // Elasticsearch B.V licenses this file to you under the Apache 2.0 License. // See the LICENSE file in the project root for more information +using System.Linq; using ConsoleAppFramework; using Documentation.Builder.Arguments; using Elastic.Documentation.Configuration; @@ -9,7 +10,6 @@ using Elastic.Documentation.Services; using Elastic.Documentation.Services.Changelog; using Microsoft.Extensions.Logging; -using System.Linq; namespace Documentation.Builder.Commands; diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 523d140dc..951e5aab7 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -2482,7 +2482,7 @@ public async Task RenderChangelogs_WithHideFeatures_CommentsOutMatchingEntries() products: - product: elasticsearch target: 9.2.0 - feature_id: feature:hidden-api + feature-id: feature:hidden-api pr: https://github.com/elastic/elasticsearch/pull/100 description: This feature should be hidden """; @@ -2571,7 +2571,7 @@ public async Task RenderChangelogs_WithHideFeatures_BreakingChange_UsesBlockComm products: - product: elasticsearch target: 9.2.0 - feature_id: feature:hidden-breaking + feature-id: feature:hidden-breaking pr: https://github.com/elastic/elasticsearch/pull/100 description: This breaking change should be hidden impact: Users will be affected @@ -2643,7 +2643,7 @@ public async Task RenderChangelogs_WithHideFeatures_Deprecation_UsesBlockComment products: - product: elasticsearch target: 9.2.0 - feature_id: feature:hidden-deprecation + feature-id: feature:hidden-deprecation pr: https://github.com/elastic/elasticsearch/pull/100 description: This deprecation should be hidden """; @@ -2708,7 +2708,7 @@ public async Task RenderChangelogs_WithHideFeatures_CommaSeparated_CommentsOutMa products: - product: elasticsearch target: 9.2.0 - feature_id: feature:first + feature-id: feature:first pr: https://github.com/elastic/elasticsearch/pull/100 """; @@ -2718,7 +2718,7 @@ public async Task RenderChangelogs_WithHideFeatures_CommaSeparated_CommentsOutMa products: - product: elasticsearch target: 9.2.0 - feature_id: feature:second + feature-id: feature:second pr: https://github.com/elastic/elasticsearch/pull/101 """; @@ -2799,7 +2799,7 @@ public async Task RenderChangelogs_WithHideFeatures_FromFile_CommentsOutMatching products: - product: elasticsearch target: 9.2.0 - feature_id: feature:from-file + feature-id: feature:from-file pr: https://github.com/elastic/elasticsearch/pull/100 """; @@ -2863,7 +2863,7 @@ public async Task RenderChangelogs_WithHideFeatures_CaseInsensitive_MatchesFeatu products: - product: elasticsearch target: 9.2.0 - feature_id: Feature:UpperCase + feature-id: Feature:UpperCase pr: https://github.com/elastic/elasticsearch/pull/100 """; From df4bd8b79a0e1171507f45f6a90b5c71e9caae28 Mon Sep 17 00:00:00 2001 From: lcawl Date: Tue, 6 Jan 2026 13:23:57 -0800 Subject: [PATCH 39/54] Fix BundleChangelogs_WithShortPrFormat_FiltersCorrectly --- .../ChangelogService.cs | 152 ++++++++++++++---- .../ChangelogServiceTests.cs | 4 +- 2 files changed, 120 insertions(+), 36 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 5b6e74adb..2084c0ee6 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -593,29 +593,48 @@ Cancel ctx } else { - // Check if it looks like a file path (contains path separators or has extension) - var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || - singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || - _fileSystem.Path.HasExtension(singleValue); + // Check if it's in short PR format (owner/repo#number) before treating as file path + var hashIndex = singleValue.LastIndexOf('#'); + var isShortPrFormat = false; + if (hashIndex > 0 && hashIndex < singleValue.Length - 1) + { + var repoPart = singleValue[..hashIndex]; + var prPart = singleValue[(hashIndex + 1)..]; + var repoParts = repoPart.Split('/'); + // Check if it matches owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + { + isShortPrFormat = true; + _ = prsToMatch.Add(singleValue); + } + } - if (looksLikeFilePath) + if (!isShortPrFormat) { - // File path doesn't exist - if there are no other PRs, return error; otherwise emit warning - if (prsToMatch.Count == 0) + // Check if it looks like a file path (contains path separators or has extension) + var looksLikeFilePath = singleValue.Contains(_fileSystem.Path.DirectorySeparatorChar) || + singleValue.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(singleValue); + + if (looksLikeFilePath) { - collector.EmitError(singleValue, $"File does not exist: {singleValue}"); - return false; + // File path doesn't exist - if there are no other PRs, return error; otherwise emit warning + if (prsToMatch.Count == 0) + { + collector.EmitError(singleValue, $"File does not exist: {singleValue}"); + return false; + } + else + { + collector.EmitWarning(singleValue, $"File does not exist, skipping: {singleValue}"); + } } else { - collector.EmitWarning(singleValue, $"File does not exist, skipping: {singleValue}"); + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(singleValue); } } - else - { - // Doesn't look like a file path, treat as PR identifier - _ = prsToMatch.Add(singleValue); - } } } else @@ -649,20 +668,39 @@ Cancel ctx } else { - // Check if it looks like a file path - var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || - value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || - _fileSystem.Path.HasExtension(value); - - if (looksLikeFilePath) + // Check if it's in short PR format (owner/repo#number) before treating as file path + var hashIndex = value.LastIndexOf('#'); + var isShortPrFormat = false; + if (hashIndex > 0 && hashIndex < value.Length - 1) { - // Track non-existent files to check later - nonExistentFiles.Add(value); + var repoPart = value[..hashIndex]; + var prPart = value[(hashIndex + 1)..]; + var repoParts = repoPart.Split('/'); + // Check if it matches owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + { + isShortPrFormat = true; + _ = prsToMatch.Add(value); + } } - else + + if (!isShortPrFormat) { - // Doesn't look like a file path, treat as PR identifier - _ = prsToMatch.Add(value); + // Check if it looks like a file path + var looksLikeFilePath = value.Contains(_fileSystem.Path.DirectorySeparatorChar) || + value.Contains(_fileSystem.Path.AltDirectorySeparatorChar) || + _fileSystem.Path.HasExtension(value); + + if (looksLikeFilePath) + { + // Track non-existent files to check later + nonExistentFiles.Add(value); + } + else + { + // Doesn't look like a file path, treat as PR identifier + _ = prsToMatch.Add(value); + } } } } @@ -688,6 +726,47 @@ Cancel ctx } } + // Validate that if any PR is just a number (not a URL and not in owner/repo#number format), + // then owner and repo must be provided + if (prsToMatch.Count > 0) + { + var hasNumericOnlyPr = false; + foreach (var pr in prsToMatch) + { + // Check if it's a URL - URLs don't need owner/repo + var isUrl = pr.StartsWith("http://", StringComparison.OrdinalIgnoreCase) || + pr.StartsWith("https://", StringComparison.OrdinalIgnoreCase); + + if (isUrl) + continue; + + // Check if it's in owner/repo#number format - these don't need owner/repo + var hashIndex = pr.LastIndexOf('#'); + if (hashIndex > 0 && hashIndex < pr.Length - 1) + { + var repoPart = pr[..hashIndex].Trim(); + var prPart = pr[(hashIndex + 1)..].Trim(); + var repoParts = repoPart.Split('/'); + // If it has a # and the part before # contains a /, it's likely owner/repo#number format + if (repoParts.Length == 2 && int.TryParse(prPart, out _)) + continue; + } + + // If it's just a number, it needs owner/repo + if (int.TryParse(pr, out _)) + { + hasNumericOnlyPr = true; + break; + } + } + + if (hasNumericOnlyPr && (string.IsNullOrWhiteSpace(input.Owner) || string.IsNullOrWhiteSpace(input.Repo))) + { + collector.EmitError(string.Empty, "When --prs contains PR numbers (not URLs or owner/repo#number format), both --owner and --repo must be provided"); + return false; + } + } + // Build set of product/version combinations to filter by var productsToMatch = new HashSet<(string product, string version)>(); if (input.InputProducts is { Count: > 0 }) @@ -1072,7 +1151,7 @@ private static string ComputeSha1(string content) [GeneratedRegex(@"(\s+)version:", RegexOptions.Multiline)] private static partial Regex VersionToTargetRegex(); - [GeneratedRegex(@"(?:https?://)?(?:www\.)?github\.com/([^/]+)/([^/]+)/pull/(\d+)", RegexOptions.IgnoreCase)] + [GeneratedRegex(@"github\.com/([^/]+)/([^/]+)/pull/(\d+)", RegexOptions.IgnoreCase)] private static partial Regex GitHubPrUrlRegex(); private static string NormalizePrForComparison(string pr, string? defaultOwner, string? defaultRepo) @@ -1080,17 +1159,20 @@ private static string NormalizePrForComparison(string pr, string? defaultOwner, // Parse PR using the same logic as GitHubPrService.ParsePrUrl // Return a normalized format (owner/repo#number) for comparison + // Trim whitespace first + pr = pr.Trim(); + // Handle full URL: https://github.com/owner/repo/pull/123 if (pr.StartsWith("https://github.com/", StringComparison.OrdinalIgnoreCase) || pr.StartsWith("http://github.com/", StringComparison.OrdinalIgnoreCase)) { // Use regex to parse URL more reliably var match = GitHubPrUrlRegex().Match(pr); - if (match.Success) + if (match.Success && match.Groups.Count >= 4) { - var owner = match.Groups[1].Value; - var repo = match.Groups[2].Value; - var prPart = match.Groups[3].Value; + var owner = match.Groups[1].Value.Trim(); + var repo = match.Groups[2].Value.Trim(); + var prPart = match.Groups[3].Value.Trim(); if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo) && int.TryParse(prPart, out var prNum)) { @@ -1106,9 +1188,11 @@ private static string NormalizePrForComparison(string pr, string? defaultOwner, // segments[0] is "/", segments[1] is "owner/", segments[2] is "repo/", segments[3] is "pull/", segments[4] is "123" if (segments.Length >= 5 && segments[3].Equals("pull/", StringComparison.OrdinalIgnoreCase)) { - var owner = segments[1].TrimEnd('/'); - var repo = segments[2].TrimEnd('/'); - if (int.TryParse(segments[4], out var prNum)) + var owner = segments[1].TrimEnd('/').Trim(); + var repo = segments[2].TrimEnd('/').Trim(); + var prPart = segments[4].TrimEnd('/').Trim(); + if (!string.IsNullOrWhiteSpace(owner) && !string.IsNullOrWhiteSpace(repo) && + int.TryParse(prPart, out var prNum)) { return $"{owner}/{repo}#{prNum}".ToLowerInvariant(); } diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index 951e5aab7..ddd693a62 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -1161,7 +1161,7 @@ public async Task BundleChangelogs_WithShortPrFormat_FiltersCorrectly() products: - product: elasticsearch target: 9.2.0 - pr: https://github.com/elastic/elasticsearch/pull/100 + pr: https://github.com/elastic/elasticsearch/pull/133609 """; var file1 = fileSystem.Path.Combine(changelogDir, "1755268130-short-format.yaml"); @@ -1170,7 +1170,7 @@ public async Task BundleChangelogs_WithShortPrFormat_FiltersCorrectly() var input = new ChangelogBundleInput { Directory = changelogDir, - Prs = ["elastic/elasticsearch#100"], + Prs = ["elastic/elasticsearch#133609"], Output = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), "bundle.yaml") }; From f065913a61b8fe5fe5a174dc8583e86d2ef8b89e Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 18:06:38 -0800 Subject: [PATCH 40/54] Add render_blockers to changelog config (#2426) --- config/changelog.yml.example | 75 +- docs/cli/release/changelog-render.md | 8 + docs/contribute/changelog.md | 46 + .../Changelog/ChangelogConfiguration.cs | 25 + .../Changelog/ChangelogRenderInput.cs | 1 + .../Changelog/ChangelogYamlStaticContext.cs | 1 + .../ChangelogService.cs | 268 ++++- .../Elastic.Documentation.Services.csproj | 6 + .../docs-builder/Commands/ChangelogCommand.cs | 4 +- .../ChangelogServiceTests.cs | 952 ++++++++++++++++++ 10 files changed, 1330 insertions(+), 56 deletions(-) diff --git a/config/changelog.yml.example b/config/changelog.yml.example index 5d44b20ba..d6ef55b5c 100644 --- a/config/changelog.yml.example +++ b/config/changelog.yml.example @@ -4,16 +4,16 @@ # Available types for changelog entries available_types: - - feature - - enhancement - - bug-fix - - known-issue - breaking-change + - bug-fix - deprecation - docs + - enhancement + - feature + - known-issue + - other - regression - security - - other # Available subtypes for breaking changes available_subtypes: @@ -34,24 +34,23 @@ available_lifecycles: # Available areas (optional - if not specified, all areas are allowed) available_areas: - - search - - security - - machine-learning - - observability - - index-management + # - Autoscaling + # - Search + # - Security + # - Watcher # Add more areas as needed # Available products (optional - if not specified, all products are allowed) available_products: - - elasticsearch - - kibana - - apm - - beats - - elastic-agent - - fleet - - cloud-hosted - - cloud-serverless - - cloud-enterprise + # - elasticsearch + # - kibana + # - apm + # - beats + # - elastic-agent + # - fleet + # - cloud-hosted + # - cloud-serverless + # - cloud-enterprise # Add more products as needed # GitHub label mappings (optional - used when --pr option is specified) @@ -59,20 +58,36 @@ available_products: # When a PR has a label that matches a key, the corresponding type value is used label_to_type: # Example mappings - customize based on your label naming conventions - # "type:feature": feature - # "type:bug": bug-fix - # "type:enhancement": enhancement - # "type:breaking": breaking-change - # "type:security": security + # ">breaking": breaking-change + # ">bug": bug-fix + # ">docs": docs + # ">enhancement": enhancement + # ">feature": feature # Maps GitHub PR labels to changelog area values # Multiple labels can map to the same area, and a single label can map to multiple areas (comma-separated) label_to_areas: # Example mappings - customize based on your label naming conventions - # "area:search": search - # "area:security": security - # "area:ml": machine-learning - # "area:observability": observability - # "area:index": index-management - # "area:multiple": "search, security" # Multiple areas comma-separated + # ":Distributed Coordination/Autoscaling": Autoscaling + # ":Search/Search": Search + # ":Security/Security": Security + # ":Data Management/Watcher": Watcher + # "area:multiple": "Search, Security" # Multiple areas comma-separated + +# Render blockers (optional - used by the "docs-builder changelog render" command) +# Changelogs matching the specified products and areas/types will be commented out in rendered output files +# Dictionary key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") +# Dictionary value contains areas and/or types that should be blocked for those products +render_blockers: + # Multiple products (comma-separated) with areas and types that should be blocked + "cloud-hosted, cloud-serverless": + areas: # List of area values that should be blocked (commented out) during render + - Autoscaling + - Watcher + types: # List of type values that should be blocked (commented out) during render + - docs + # Single product with areas that should be blocked + elasticsearch: + areas: + - Security diff --git a/docs/cli/release/changelog-render.md b/docs/cli/release/changelog-render.md index 128778251..437483fbe 100644 --- a/docs/cli/release/changelog-render.md +++ b/docs/cli/release/changelog-render.md @@ -45,3 +45,11 @@ docs-builder changelog render [options...] [-h|--help] : When specifying feature IDs directly, provide comma-separated values. : When specifying a file path, provide a single value that points to a newline-delimited file. The file should contain one feature ID per line. : Entries with matching `feature-id` values will be commented out in the markdown output and a warning will be emitted. + +`--config ` +: Optional: Path to the changelog.yml configuration file. +: Defaults to `docs/changelog.yml`. +: This configuration file is where the command looks for `render_blockers` details. + +You can configure `render_blockers` in your `changelog.yml` configuration file to automatically block changelog entries from being rendered based on their products, areas, and/or types. +For more information, refer to [](/contribute/changelog.md#render-blockers). diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index 723cd1e35..b4d874185 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -85,6 +85,9 @@ If a configuration file exists, the command validates all its values before gene - If the configuration file contains `lifecycle`, `product`, `subtype`, or `type` values that don't match the values in `products.yml` and `ChangelogConfiguration.cs`, validation fails. The changelog file is not created. - If the configuration file contains `areas` values and they don't match what you specify in the `--areas` command option, validation fails. The changelog file is not created. +The `available_types`, `available_subtypes`, and `available_lifecycles` fields are optional in the configuration file. +If not specified, all default values from `ChangelogConfiguration.cs` are used. + ### GitHub label mappings You can optionally add `label_to_type` and `label_to_areas` mappings in your changelog configuration. @@ -92,6 +95,48 @@ When you run the command with the `--pr` option, it can use these mappings to fi Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). +### Render blockers [render-blockers] + +You can optionally add `render_blockers` in your changelog configuration to block specific changelog entries from being rendered in markdown output files. +When you run the `docs-builder changelog render` command, changelog entries that match the specified products and areas/types will be commented out in the markdown output. + +By default, the `docs-builder changelog render` command checks the following path: `docs/changelog.yml`. +You can specify a different path with the `--config` command option. + +The `render_blockers` configuration uses a dictionary format where: + +- The key can be a single product ID or comma-separated product IDs (e.g., `"elasticsearch, cloud-serverless"`) +- The value contains `areas` and/or `types` that should be blocked for those products + +An entry is blocked if any product in the changelog entry matches any product key in `render_blockers` AND (any area matches OR any type matches). +If a changelog entry has multiple products, all matching products in `render_blockers` are checked. + +The `types` values in `render_blockers` must exist in the `available_types` list (or in the default types if `available_types` is not specified). + +Example configuration: + +```yaml +render_blockers: + "cloud-hosted, cloud-serverless": + areas: # List of area values that should be blocked (commented out) during render + - Autoscaling + - Watcher + types: # List of type values that should be blocked (commented out) during render + - docs + elasticsearch: # Another single product case + areas: + - Security +``` + +When rendering, entries with: + +- Product `cloud-hosted` or `cloud-serverless` AND (area `Autoscaling` or `Watcher` OR type `docs`) will be commented out +- Product `elasticsearch` AND area `Security` will be commented out + +The command will emit warnings indicating which changelog entries were commented out and why. + +Refer to [changelog.yml.example](https://github.com/elastic/docs-builder/blob/main/config/changelog.yml.example). + ## Create bundles [changelog-bundle] You can use the `docs-builder changelog bundle` command to create a YAML file that lists multiple changelogs. @@ -261,6 +306,7 @@ Options: --subsections Optional: Group entries by area/component in subsections. Defaults to false --hide-private-links Optional: Hide private links by commenting them out in the markdown output. Defaults to false --hide-features Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. [Default: null] + --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] ``` Before you can use this command you must create changelog files and collect them into bundles. diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs index 90f08db0e..61c0c0aa7 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogConfiguration.cs @@ -57,6 +57,31 @@ public class ChangelogConfiguration /// public Dictionary? LabelToAreas { get; set; } + /// + /// Configuration for blocking changelogs from being rendered (commented out in markdown output) + /// Dictionary key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + /// Dictionary value contains areas and/or types that should be blocked for those products + /// Changelogs matching any product key and any area/type in the corresponding entry will be commented out + /// + public Dictionary? RenderBlockers { get; set; } + public static ChangelogConfiguration Default => new(); } +/// +/// Configuration entry for blocking changelogs during render +/// +public class RenderBlockersEntry +{ + /// + /// List of area values that should be blocked (commented out) during render + /// + public List? Areas { get; set; } + + /// + /// List of type values that should be blocked (commented out) during render + /// Types must exist in the available_types list (or default AvailableTypes if not specified) + /// + public List? Types { get; set; } +} + diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs index 2daa7d569..0d726761d 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogRenderInput.cs @@ -15,5 +15,6 @@ public class ChangelogRenderInput public bool Subsections { get; set; } public bool HidePrivateLinks { get; set; } public string[]? HideFeatures { get; set; } + public string? Config { get; set; } } diff --git a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs index 2dfb04ff8..3785b2f64 100644 --- a/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs +++ b/src/services/Elastic.Documentation.Services/Changelog/ChangelogYamlStaticContext.cs @@ -10,6 +10,7 @@ namespace Elastic.Documentation.Services.Changelog; [YamlSerializable(typeof(ChangelogData))] [YamlSerializable(typeof(ProductInfo))] [YamlSerializable(typeof(ChangelogConfiguration))] +[YamlSerializable(typeof(RenderBlockersEntry))] [YamlSerializable(typeof(BundledChangelogData))] [YamlSerializable(typeof(BundledProduct))] [YamlSerializable(typeof(BundledEntry))] diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 2084c0ee6..dd541b6af 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -235,7 +235,7 @@ Cancel ctx } } - private async Task LoadChangelogConfiguration( + internal async Task LoadChangelogConfiguration( IDiagnosticsCollector collector, string? configPath, Cancel ctx @@ -264,25 +264,68 @@ Cancel ctx var defaultConfig = ChangelogConfiguration.Default; var validProductIds = configurationContext.ProductsConfiguration.Products.Keys.ToHashSet(StringComparer.OrdinalIgnoreCase); - // Validate available_types - foreach (var type in config.AvailableTypes.Where(t => !defaultConfig.AvailableTypes.Contains(t))) + // If available_types is not specified or empty, use defaults + if (config.AvailableTypes == null || config.AvailableTypes.Count == 0) { - collector.EmitError(finalConfigPath, $"Type '{type}' in changelog.yml is not in the list of available types. Available types: {string.Join(", ", defaultConfig.AvailableTypes)}"); - return null; + config.AvailableTypes = defaultConfig.AvailableTypes.ToList(); + } + else + { + // Validate available_types - must be subset of defaults + foreach (var type in config.AvailableTypes.Where(t => !defaultConfig.AvailableTypes.Contains(t))) + { + collector.EmitError(finalConfigPath, $"Type '{type}' in changelog.yml is not in the list of available types. Available types: {string.Join(", ", defaultConfig.AvailableTypes)}"); + return null; + } + } + + // If available_subtypes is not specified or empty, use defaults + if (config.AvailableSubtypes == null || config.AvailableSubtypes.Count == 0) + { + config.AvailableSubtypes = defaultConfig.AvailableSubtypes.ToList(); + } + else + { + // Validate available_subtypes - must be subset of defaults + foreach (var subtype in config.AvailableSubtypes.Where(s => !defaultConfig.AvailableSubtypes.Contains(s))) + { + collector.EmitError(finalConfigPath, $"Subtype '{subtype}' in changelog.yml is not in the list of available subtypes. Available subtypes: {string.Join(", ", defaultConfig.AvailableSubtypes)}"); + return null; + } } - // Validate available_subtypes - foreach (var subtype in config.AvailableSubtypes.Where(s => !defaultConfig.AvailableSubtypes.Contains(s))) + // If available_lifecycles is not specified or empty, use defaults + if (config.AvailableLifecycles == null || config.AvailableLifecycles.Count == 0) + { + config.AvailableLifecycles = defaultConfig.AvailableLifecycles.ToList(); + } + else { - collector.EmitError(finalConfigPath, $"Subtype '{subtype}' in changelog.yml is not in the list of available subtypes. Available subtypes: {string.Join(", ", defaultConfig.AvailableSubtypes)}"); - return null; + // Validate available_lifecycles - must be subset of defaults + foreach (var lifecycle in config.AvailableLifecycles.Where(l => !defaultConfig.AvailableLifecycles.Contains(l))) + { + collector.EmitError(finalConfigPath, $"Lifecycle '{lifecycle}' in changelog.yml is not in the list of available lifecycles. Available lifecycles: {string.Join(", ", defaultConfig.AvailableLifecycles)}"); + return null; + } } - // Validate available_lifecycles - foreach (var lifecycle in config.AvailableLifecycles.Where(l => !defaultConfig.AvailableLifecycles.Contains(l))) + // Validate render_blockers types against available_types + if (config.RenderBlockers != null) { - collector.EmitError(finalConfigPath, $"Lifecycle '{lifecycle}' in changelog.yml is not in the list of available lifecycles. Available lifecycles: {string.Join(", ", defaultConfig.AvailableLifecycles)}"); - return null; + foreach (var (productKey, blockersEntry) in config.RenderBlockers) + { + if (blockersEntry?.Types != null && blockersEntry.Types.Count > 0) + { + foreach (var type in blockersEntry.Types) + { + if (!config.AvailableTypes.Contains(type)) + { + collector.EmitError(finalConfigPath, $"Type '{type}' in render_blockers for '{productKey}' is not in the list of available types. Available types: {string.Join(", ", config.AvailableTypes)}"); + return null; + } + } + } + } } // Validate available_products (if specified) - must be from products.yml @@ -1473,16 +1516,21 @@ Cancel ctx } // Merge phase: Now that validation passed, load and merge all bundles - var allResolvedEntries = new List<(ChangelogData entry, string repo)>(); + var allResolvedEntries = new List<(ChangelogData entry, string repo, HashSet bundleProductIds)>(); var allProducts = new HashSet<(string product, string target)>(); foreach (var (bundledData, bundleInput, bundleDirectory) in bundleDataList) { // Collect products from this bundle + var bundleProductIds = new HashSet(StringComparer.OrdinalIgnoreCase); foreach (var product in bundledData.Products) { var target = product.Target ?? string.Empty; _ = allProducts.Add((product.Product, target)); + if (!string.IsNullOrWhiteSpace(product.Product)) + { + _ = bundleProductIds.Add(product.Product); + } } var repo = bundleInput.Repo ?? defaultRepo; @@ -1529,7 +1577,7 @@ Cancel ctx if (entryData != null) { - allResolvedEntries.Add((entryData, repo)); + allResolvedEntries.Add((entryData, repo, bundleProductIds)); } } } @@ -1571,6 +1619,20 @@ Cancel ctx // Convert title to slug format for folder names and anchors (lowercase, dashes instead of spaces) var titleSlug = TitleToSlug(title); + // Load changelog configuration to check for render_blockers + var config = await LoadChangelogConfiguration(collector, input.Config, ctx); + if (config == null) + { + collector.EmitError(string.Empty, "Failed to load changelog configuration"); + return false; + } + + // Extract render blockers from configuration + // RenderBlockers is a Dictionary where: + // - Key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + // - Value is a RenderBlockersEntry containing areas and/or types that should be blocked for those products + var renderBlockers = config.RenderBlockers; + // Load feature IDs to hide - check if --hide-features contains a file path or a list of feature IDs var featureIdsToHide = new HashSet(StringComparer.OrdinalIgnoreCase); if (input.HideFeatures is { Length: > 0 }) @@ -1668,7 +1730,7 @@ Cancel ctx // Track hidden entries for warnings var hiddenEntries = new List<(string title, string featureId)>(); - foreach (var (entry, _) in allResolvedEntries) + foreach (var (entry, _, _) in allResolvedEntries) { if (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) { @@ -1685,17 +1747,48 @@ Cancel ctx } } + // Check entries against render blockers and track blocked entries + // render_blockers matches against bundle products, not individual entry products + var blockedEntries = new List<(string title, List reasons)>(); + foreach (var (entry, _, bundleProductIds) in allResolvedEntries) + { + var reasons = new List(); + var isBlocked = ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out var blockReasons); + if (isBlocked) + { + blockedEntries.Add((entry.Title ?? "Unknown", blockReasons)); + } + } + + // Emit warnings for blocked entries + if (blockedEntries.Count > 0) + { + foreach (var (entryTitle, reasons) in blockedEntries) + { + var reasonsText = string.Join(" and ", reasons); + collector.EmitWarning(string.Empty, $"Changelog entry '{entryTitle}' will be commented out in markdown output because it matches render_blockers: {reasonsText}"); + } + } + + // Create mapping from entries to their bundle product IDs for render_blockers checking + // Use a custom comparer for reference equality since entries are objects + var entryToBundleProducts = new Dictionary>(); + foreach (var (entry, _, bundleProductIds) in allResolvedEntries) + { + entryToBundleProducts[entry] = bundleProductIds; + } + // Render markdown files (use first repo found, or default) var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; // Render index.md (features, enhancements, bug fixes, security) - await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); + await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); // Render breaking-changes.md - await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); + await RenderBreakingChangesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); // Render deprecations.md - await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, ctx); + await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); @@ -1735,6 +1828,8 @@ private async Task RenderIndexMarkdown( bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, Cancel ctx ) { @@ -1782,7 +1877,7 @@ Cancel ctx { sb.AppendLine(CultureInfo.InvariantCulture, $"### Features and enhancements [{repo}-{titleSlug}-features-enhancements]"); var combined = features.Concat(enhancements).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); } if (security.Count > 0 || bugFixes.Count > 0) @@ -1790,7 +1885,7 @@ Cancel ctx sb.AppendLine(); sb.AppendLine(CultureInfo.InvariantCulture, $"### Fixes [{repo}-{titleSlug}-fixes]"); var combined = security.Concat(bugFixes).ToList(); - RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide); + RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); } } else @@ -1821,6 +1916,8 @@ private async Task RenderBreakingChangesMarkdown( bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, Cancel ctx ) { @@ -1843,7 +1940,9 @@ Cancel ctx foreach (var entry in areaGroup) { - var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); sb.AppendLine(); if (shouldHide) @@ -1944,6 +2043,8 @@ private async Task RenderDeprecationsMarkdown( bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, Cancel ctx ) { @@ -1966,7 +2067,9 @@ Cancel ctx foreach (var entry in areaGroup) { - var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); sb.AppendLine(); if (shouldHide) @@ -2055,7 +2158,7 @@ Cancel ctx } [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] - private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide) + private void RenderEntriesByArea(StringBuilder sb, List entries, string repo, bool subsections, bool hidePrivateLinks, HashSet featureIdsToHide, Dictionary? renderBlockers, Dictionary> entryToBundleProducts) { var groupedByArea = entries.GroupBy(e => GetComponent(e)).ToList(); foreach (var areaGroup in groupedByArea) @@ -2069,7 +2172,9 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, foreach (var entry in areaGroup) { - var shouldHide = !string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId); + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); if (shouldHide) { @@ -2170,6 +2275,119 @@ private void RenderEntriesByArea(StringBuilder sb, List entries, } } + /// + /// Checks if an entry should be blocked based on render_blockers configuration. + /// RenderBlockers is a Dictionary where: + /// - Key can be a single product ID or comma-separated product IDs (e.g., "elasticsearch, cloud-serverless") + /// - Value is a RenderBlockersEntry containing areas and/or types that should be blocked for those products + /// An entry is blocked if ANY product in the bundle matches ANY product key AND (ANY area matches OR ANY type matches). + /// Note: render_blockers matches against bundle products, not individual entry products. + /// + private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundleProductIds, Dictionary? renderBlockers, out List reasons) + { + reasons = []; + if (renderBlockers == null || renderBlockers.Count == 0) + { + return false; + } + + // Bundle must have products to be blocked + if (bundleProductIds == null || bundleProductIds.Count == 0) + { + return false; + } + + // Extract area values from entry (case-insensitive comparison) + var entryAreas = entry.Areas != null && entry.Areas.Count > 0 + ? entry.Areas + .Where(a => !string.IsNullOrWhiteSpace(a)) + .Select(a => a!) + .ToHashSet(StringComparer.OrdinalIgnoreCase) + : new HashSet(StringComparer.OrdinalIgnoreCase); + + // Extract type from entry (case-insensitive comparison) + var entryType = !string.IsNullOrWhiteSpace(entry.Type) + ? entry.Type + : null; + + // Check each render_blockers entry + foreach (var (productKey, blockersEntry) in renderBlockers) + { + if (blockersEntry == null) + { + continue; + } + + // Parse product key - can be comma-separated (e.g., "elasticsearch, cloud-serverless") + var productKeys = productKey + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Where(p => !string.IsNullOrWhiteSpace(p)) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + // Check if any product in the bundle matches any product in the key + var matchingProducts = bundleProductIds.Intersect(productKeys, StringComparer.OrdinalIgnoreCase).ToList(); + if (matchingProducts.Count == 0) + { + continue; + } + + var isBlocked = false; + var blockReasons = new List(); + + // Check areas if specified + if (blockersEntry.Areas != null && blockersEntry.Areas.Count > 0 && entryAreas.Count > 0) + { + var matchingAreas = entryAreas.Intersect(blockersEntry.Areas, StringComparer.OrdinalIgnoreCase).ToList(); + if (matchingAreas.Count > 0) + { + isBlocked = true; + foreach (var product in matchingProducts) + { + foreach (var area in matchingAreas) + { + var reason = $"product '{product}' with area '{area}'"; + if (!blockReasons.Contains(reason)) + { + blockReasons.Add(reason); + } + } + } + } + } + + // Check types if specified + if (blockersEntry.Types != null && blockersEntry.Types.Count > 0 && !string.IsNullOrWhiteSpace(entryType)) + { + var matchingTypes = blockersEntry.Types + .Where(t => string.Equals(t, entryType, StringComparison.OrdinalIgnoreCase)) + .ToList(); + if (matchingTypes.Count > 0) + { + isBlocked = true; + foreach (var product in matchingProducts) + { + foreach (var type in matchingTypes) + { + var reason = $"product '{product}' with type '{type}'"; + if (!blockReasons.Contains(reason)) + { + blockReasons.Add(reason); + } + } + } + } + } + + if (isBlocked) + { + reasons.AddRange(blockReasons); + return true; + } + } + + return false; + } + private static string GetComponent(ChangelogData entry) { // Map areas (list) to component (string) - use first area or empty string diff --git a/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj b/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj index 0494a04d1..96209f37e 100644 --- a/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj +++ b/src/services/Elastic.Documentation.Services/Elastic.Documentation.Services.csproj @@ -6,6 +6,12 @@ enable + + + <_Parameter1>Elastic.Documentation.Services.Tests + + + diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs index 61f496195..c61130533 100644 --- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs +++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs @@ -193,6 +193,7 @@ public async Task Render( bool subsections = false, bool hidePrivateLinks = false, string[]? hideFeatures = null, + string? config = null, Cancel ctx = default ) { @@ -229,7 +230,8 @@ public async Task Render( Title = title, Subsections = subsections, HidePrivateLinks = hidePrivateLinks, - HideFeatures = allFeatureIds.Count > 0 ? allFeatureIds.ToArray() : null + HideFeatures = allFeatureIds.Count > 0 ? allFeatureIds.ToArray() : null, + Config = config }; serviceInvoker.AddCommand(service, renderInput, diff --git a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs index ddd693a62..be866bfc1 100644 --- a/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs +++ b/tests/Elastic.Documentation.Services.Tests/ChangelogServiceTests.cs @@ -2908,6 +2908,958 @@ public async Task RenderChangelogs_WithHideFeatures_CaseInsensitive_MatchesFeatu indexContent.Should().Contain("% * Hidden feature"); } + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + search area) + var changelog1 = """ + title: Blocked feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked + """; + + // Create changelog that should NOT be blocked (elasticsearch but different area) + var changelog2 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - observability + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("area 'search'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_CommaSeparatedProducts_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with cloud-serverless product that should be blocked + var changelog1 = """ + title: Blocked cloud feature + type: feature + products: + - product: cloud-serverless + target: 2025-12-02 + areas: + - security + pr: https://github.com/elastic/cloud-serverless/pull/100 + description: This feature should be blocked + """; + + // Create changelog with elasticsearch product that should also be blocked + var changelog2 = """ + title: Blocked elasticsearch feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - security + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This feature should also be blocked + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-cloud-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-es-blocked.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers using comma-separated products in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + "elasticsearch, cloud-serverless": + areas: + - security + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + - product: cloud-serverless + target: 2025-12-02 + entries: + - file: + name: 1755268130-cloud-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-es-blocked.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Both entries should be commented out + indexContent.Should().Contain("% * Blocked cloud feature"); + indexContent.Should().Contain("% * Blocked elasticsearch feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_MultipleProductsInEntry_ChecksAllProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with multiple products - one matches render_blockers + var changelog = """ + title: Multi-product feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked because elasticsearch matches + """; + + var changelogFile = fileSystem.Path.Combine(changelogDir, "1755268130-multi-product.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile, changelog, TestContext.Current.CancellationToken); + + // Create config file with render_blockers for elasticsearch only in docs/ subdirectory + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + - product: kibana + target: 9.2.0 + entries: + - file: + name: 1755268130-multi-product.yaml + checksum: {ComputeSha1(changelog)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Multi-product feature") && + d.Message.Contains("product 'elasticsearch'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Should be blocked because elasticsearch matches, even though kibana doesn't + indexContent.Should().Contain("% * Multi-product feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_TypeBlocking_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + feature type, blocked by type) + var changelog1 = """ + title: Blocked feature by type + type: feature + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked by type + """; + + // Create changelog that should NOT be blocked (elasticsearch but different type) + var changelog2 = """ + title: Visible enhancement + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This enhancement should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + + // Create config file with render_blockers blocking docs type + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + - enhancement + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + types: + - feature + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-visible.yaml + checksum: {ComputeSha1(changelog2)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature by type") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("type 'feature'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature by type"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible enhancement"); + indexContent.Should().NotContain("% * Visible enhancement"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_AreasAndTypes_CommentsOutMatchingEntries() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked by area (elasticsearch + search area) + var changelog1 = """ + title: Blocked by area + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This should be blocked by area + """; + + // Create changelog that should be blocked by type (elasticsearch + enhancement type, blocked by type) + var changelog2 = """ + title: Blocked by type + type: enhancement + products: + - product: elasticsearch + target: 9.2.0 + pr: https://github.com/elastic/elasticsearch/pull/101 + description: This should be blocked by type + """; + + // Create changelog that should NOT be blocked + var changelog3 = """ + title: Visible feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - observability + pr: https://github.com/elastic/elasticsearch/pull/102 + description: This should be visible + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-area-blocked.yaml"); + var changelogFile2 = fileSystem.Path.Combine(changelogDir, "1755268140-type-blocked.yaml"); + var changelogFile3 = fileSystem.Path.Combine(changelogDir, "1755268150-visible.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile2, changelog2, TestContext.Current.CancellationToken); + await fileSystem.File.WriteAllTextAsync(changelogFile3, changelog3, TestContext.Current.CancellationToken); + + // Create config file with render_blockers blocking both areas and types + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + - enhancement + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + types: + - enhancement + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-area-blocked.yaml + checksum: {ComputeSha1(changelog1)} + - file: + name: 1755268140-type-blocked.yaml + checksum: {ComputeSha1(changelog2)} + - file: + name: 1755268150-visible.yaml + checksum: {ComputeSha1(changelog3)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Both blocked entries should be commented out + indexContent.Should().Contain("% * Blocked by area"); + indexContent.Should().Contain("% * Blocked by type"); + // Visible entry should not be commented + indexContent.Should().Contain("* Visible feature"); + indexContent.Should().NotContain("% * Visible feature"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithRenderBlockers_UsesBundleProductsNotEntryProducts() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog with elasticsearch product and search area + // But bundle has kibana product - should NOT be blocked because render_blockers matches against bundle products + var changelog1 = """ + title: Entry with elasticsearch but bundle has kibana + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This should NOT be blocked because bundle product is kibana + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-test.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + + // Create config file with render_blockers blocking elasticsearch + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file with kibana product (not elasticsearch) + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: kibana + target: 9.2.0 + entries: + - file: + name: 1755268130-test.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Set current directory to where config file is located so it can be found + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0" + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + // Should have no warnings because entry is NOT blocked (bundle product is kibana, not elasticsearch) + _collector.Warnings.Should().Be(0); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Entry should NOT be commented out because bundle product is kibana, not elasticsearch + indexContent.Should().Contain("* Entry with elasticsearch but bundle has kibana"); + indexContent.Should().NotContain("% * Entry with elasticsearch but bundle has kibana"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task RenderChangelogs_WithCustomConfigPath_UsesSpecifiedConfigFile() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var changelogDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(changelogDir); + + // Create changelog that should be blocked (elasticsearch + search area) + var changelog1 = """ + title: Blocked feature + type: feature + products: + - product: elasticsearch + target: 9.2.0 + areas: + - search + pr: https://github.com/elastic/elasticsearch/pull/100 + description: This feature should be blocked + """; + + var changelogFile1 = fileSystem.Path.Combine(changelogDir, "1755268130-blocked.yaml"); + await fileSystem.File.WriteAllTextAsync(changelogFile1, changelog1, TestContext.Current.CancellationToken); + + // Create config file in a custom location (not in docs/ subdirectory) + var customConfigDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(customConfigDir); + var customConfigPath = fileSystem.Path.Combine(customConfigDir, "custom-changelog.yml"); + var configContent = """ + available_types: + - feature + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + areas: + - search + """; + await fileSystem.File.WriteAllTextAsync(customConfigPath, configContent, TestContext.Current.CancellationToken); + + // Create bundle file + var bundleDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + fileSystem.Directory.CreateDirectory(bundleDir); + + var bundleFile = fileSystem.Path.Combine(bundleDir, "bundle.yaml"); + var bundleContent = $""" + products: + - product: elasticsearch + target: 9.2.0 + entries: + - file: + name: 1755268130-blocked.yaml + checksum: {ComputeSha1(changelog1)} + """; + await fileSystem.File.WriteAllTextAsync(bundleFile, bundleContent, TestContext.Current.CancellationToken); + + // Don't change directory - use custom config path via Config property + var outputDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + + var input = new ChangelogRenderInput + { + Bundles = [new BundleInput { BundleFile = bundleFile, Directory = changelogDir }], + Output = outputDir, + Title = "9.2.0", + Config = customConfigPath + }; + + // Act + var result = await service.RenderChangelogs(_collector, input, TestContext.Current.CancellationToken); + + // Assert + result.Should().BeTrue(); + _collector.Errors.Should().Be(0); + _collector.Warnings.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Warning && + d.Message.Contains("Blocked feature") && + d.Message.Contains("render_blockers") && + d.Message.Contains("product 'elasticsearch'") && + d.Message.Contains("area 'search'")); + + var indexFile = fileSystem.Path.Combine(outputDir, "9.2.0", "index.md"); + fileSystem.File.Exists(indexFile).Should().BeTrue(); + + var indexContent = await fileSystem.File.ReadAllTextAsync(indexFile, TestContext.Current.CancellationToken); + // Blocked entry should be commented out with % prefix + indexContent.Should().Contain("% * Blocked feature"); + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableTypes_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_types - should use defaults + var configContent = """ + available_subtypes: [] + available_lifecycles: + - ga + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default types + config!.AvailableTypes.Should().Contain("feature"); + config.AvailableTypes.Should().Contain("bug-fix"); + config.AvailableTypes.Should().Contain("docs"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableSubtypes_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_subtypes - should use defaults + var configContent = """ + available_types: + - feature + available_lifecycles: + - ga + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default subtypes + config!.AvailableSubtypes.Should().Contain("api"); + config.AvailableSubtypes.Should().Contain("behavioral"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithoutAvailableLifecycles_UsesDefaults() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config without available_lifecycles - should use defaults + var configContent = """ + available_types: + - feature + available_subtypes: [] + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().NotBeNull(); + _collector.Errors.Should().Be(0); + // Should have default lifecycles + config!.AvailableLifecycles.Should().Contain("preview"); + config.AvailableLifecycles.Should().Contain("beta"); + config.AvailableLifecycles.Should().Contain("ga"); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + + [Fact] + public async Task LoadChangelogConfiguration_WithInvalidRenderBlockersType_ReturnsError() + { + // Arrange + var service = new ChangelogService(_loggerFactory, _configurationContext, null); + var fileSystem = new FileSystem(); + var configDir = fileSystem.Path.Combine(fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString()); + var docsDir = fileSystem.Path.Combine(configDir, "docs"); + fileSystem.Directory.CreateDirectory(docsDir); + var configPath = fileSystem.Path.Combine(docsDir, "changelog.yml"); + // Config with invalid type in render_blockers + var configContent = """ + available_types: + - feature + - docs + available_subtypes: [] + available_lifecycles: + - ga + render_blockers: + elasticsearch: + types: + - invalid-type + """; + await fileSystem.File.WriteAllTextAsync(configPath, configContent, TestContext.Current.CancellationToken); + + var originalDir = Directory.GetCurrentDirectory(); + try + { + Directory.SetCurrentDirectory(configDir); + + // Act + var config = await service.LoadChangelogConfiguration(_collector, null, TestContext.Current.CancellationToken); + + // Assert + config.Should().BeNull(); + _collector.Errors.Should().BeGreaterThan(0); + _collector.Diagnostics.Should().Contain(d => + d.Severity == Severity.Error && + d.Message.Contains("Type 'invalid-type' in render_blockers") && + d.Message.Contains("is not in the list of available types")); + } + finally + { + Directory.SetCurrentDirectory(originalDir); + } + } + private static string ComputeSha1(string content) { var bytes = System.Text.Encoding.UTF8.GetBytes(content); From 72011731afb3c23b19335f813205abd3643b6ae6 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 19:03:15 -0800 Subject: [PATCH 41/54] Potential fix for pull request finding 'Container contents are never accessed' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/services/Elastic.Documentation.Services/ChangelogService.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index dd541b6af..2e16548bb 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -1752,7 +1752,6 @@ Cancel ctx var blockedEntries = new List<(string title, List reasons)>(); foreach (var (entry, _, bundleProductIds) in allResolvedEntries) { - var reasons = new List(); var isBlocked = ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out var blockReasons); if (isBlocked) { From c04e7bf4354a16015e4c922fb43ba341b9499940 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 19:14:03 -0800 Subject: [PATCH 42/54] Potential fix for pull request finding 'Missed opportunity to use Select' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../Elastic.Documentation.Services/ChangelogService.cs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 2e16548bb..9ea921921 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -2365,9 +2365,12 @@ private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundle isBlocked = true; foreach (var product in matchingProducts) { - foreach (var type in matchingTypes) + var reasonsForProduct = matchingTypes + .Select(type => $"product '{product}' with type '{type}'") + .Distinct(); + + foreach (var reason in reasonsForProduct) { - var reason = $"product '{product}' with type '{type}'"; if (!blockReasons.Contains(reason)) { blockReasons.Add(reason); From 691aa2bc2b72d8b109b6211c4b83515d0014db48 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 19:26:19 -0800 Subject: [PATCH 43/54] Potential fix for pull request finding 'Missed opportunity to use Select' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../ChangelogService.cs | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 9ea921921..af6ca5446 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -2363,18 +2363,16 @@ private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundle if (matchingTypes.Count > 0) { isBlocked = true; - foreach (var product in matchingProducts) - { - var reasonsForProduct = matchingTypes - .Select(type => $"product '{product}' with type '{type}'") - .Distinct(); + var reasonsForProducts = matchingProducts + .SelectMany(product => matchingTypes + .Select(type => $"product '{product}' with type '{type}'")) + .Distinct(); - foreach (var reason in reasonsForProduct) + foreach (var reason in reasonsForProducts) + { + if (!blockReasons.Contains(reason)) { - if (!blockReasons.Contains(reason)) - { - blockReasons.Add(reason); - } + blockReasons.Add(reason); } } } From 24b88a53be81a2e93a87188c776cf7cc78249b48 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 20:12:46 -0800 Subject: [PATCH 44/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../Elastic.Documentation.Services/ChangelogService.cs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index af6ca5446..120ce9bcd 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -2368,12 +2368,9 @@ private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundle .Select(type => $"product '{product}' with type '{type}'")) .Distinct(); - foreach (var reason in reasonsForProducts) + foreach (var reason in reasonsForProducts.Where(reason => !blockReasons.Contains(reason))) { - if (!blockReasons.Contains(reason)) - { - blockReasons.Add(reason); - } + blockReasons.Add(reason); } } } From d86ad43c7419ca2e55693cc04a79313afa0fd03c Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 21:12:18 -0800 Subject: [PATCH 45/54] Potential fix for pull request finding 'Missed opportunity to use Select' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../ChangelogService.cs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 120ce9bcd..82a5b3d03 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -2340,15 +2340,16 @@ private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundle if (matchingAreas.Count > 0) { isBlocked = true; - foreach (var product in matchingProducts) + var reasonsForProductsAndAreas = matchingProducts + .SelectMany(product => matchingAreas + .Select(area => $"product '{product}' with area '{area}'")) + .Distinct(); + + foreach (var reason in reasonsForProductsAndAreas) { - foreach (var area in matchingAreas) + if (!blockReasons.Contains(reason)) { - var reason = $"product '{product}' with area '{area}'"; - if (!blockReasons.Contains(reason)) - { - blockReasons.Add(reason); - } + blockReasons.Add(reason); } } } From a96f1c76c600513f8ae772742f54c32844fa6388 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 21:17:55 -0800 Subject: [PATCH 46/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../Elastic.Documentation.Services/ChangelogService.cs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index 82a5b3d03..fdb34048f 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -2345,12 +2345,9 @@ private static bool ShouldBlockEntry(ChangelogData entry, HashSet bundle .Select(area => $"product '{product}' with area '{area}'")) .Distinct(); - foreach (var reason in reasonsForProductsAndAreas) + foreach (var reason in reasonsForProductsAndAreas.Where(reason => !blockReasons.Contains(reason))) { - if (!blockReasons.Contains(reason)) - { - blockReasons.Add(reason); - } + blockReasons.Add(reason); } } } From b5b03c2ec6d8872f1bce8cda6ab723587d765a71 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 21:27:26 -0800 Subject: [PATCH 47/54] Potential fix for pull request finding 'Missed opportunity to use Where' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- .../Elastic.Documentation.Services/ChangelogService.cs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index fdb34048f..a951abef8 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -316,13 +316,11 @@ Cancel ctx { if (blockersEntry?.Types != null && blockersEntry.Types.Count > 0) { - foreach (var type in blockersEntry.Types) + var invalidType = blockersEntry.Types.FirstOrDefault(type => !config.AvailableTypes.Contains(type)); + if (invalidType != null) { - if (!config.AvailableTypes.Contains(type)) - { - collector.EmitError(finalConfigPath, $"Type '{type}' in render_blockers for '{productKey}' is not in the list of available types. Available types: {string.Join(", ", config.AvailableTypes)}"); - return null; - } + collector.EmitError(finalConfigPath, $"Type '{invalidType}' in render_blockers for '{productKey}' is not in the list of available types. Available types: {string.Join(", ", config.AvailableTypes)}"); + return null; } } } From 0b6310e0065132a4f1bc2d5e383e21967f3dd8de Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Tue, 6 Jan 2026 21:28:21 -0800 Subject: [PATCH 48/54] Render missing changelog types and subtypes (#2427) --- docs/contribute/changelog.md | 4 +- .../ChangelogService.cs | 333 ++++++++++++++---- .../docs-builder/Commands/ChangelogCommand.cs | 2 +- .../ChangelogServiceTests.cs | 86 +++++ 4 files changed, 362 insertions(+), 63 deletions(-) diff --git a/docs/contribute/changelog.md b/docs/contribute/changelog.md index b4d874185..790ef927e 100644 --- a/docs/contribute/changelog.md +++ b/docs/contribute/changelog.md @@ -303,7 +303,7 @@ Options: --input > Required: Bundle input(s) in format "bundle-file-path, changelog-file-path, repo". Can be specified multiple times. Only bundle-file-path is required. [Required] --output Optional: Output directory for rendered markdown files. Defaults to current directory [Default: null] --title Optional: Title to use for section headers in output markdown files. Defaults to version from first bundle [Default: null] - --subsections Optional: Group entries by area/component in subsections. Defaults to false + --subsections Optional: Group entries by area/component in subsections. For breaking changes with a subtype, groups by subtype instead of area. Defaults to false --hide-private-links Optional: Hide private links by commenting them out in the markdown output. Defaults to false --hide-features Filter by feature IDs (comma-separated), or a path to a newline-delimited file containing feature IDs. Can be specified multiple times. Entries with matching feature-id values will be commented out in the markdown output. [Default: null] --config Optional: Path to the changelog.yml configuration file. Defaults to 'docs/changelog.yml' [Default: null] @@ -341,7 +341,7 @@ docs-builder changelog render \ 1. Provide information about the changelog bundle. The format is `", , "`. Only the `` is required. The `` is useful if the changelogs are not in the default directory and are not resolved within the bundle. The `` is necessary if your changelogs do not contain full URLs for the pull requests or issues. You can specify `--input` multiple times to merge multiple bundles. 2. The `--title` value is used for an output folder name and for section titles in the markdown files. If you omit `--title` and the first bundle contains a product `target` value, that value is used. Otherwise, if none of the bundles have product `target` fields, the title defaults to "unknown". 3. By default the command creates the output files in the current directory. -4. By default the changelog areas are not displayed in the output. Add `--subsections` to group changelog details by their `areas`. +4. By default the changelog areas are not displayed in the output. Add `--subsections` to group changelog details by their `areas`. For breaking changes that have a `subtype` value, the subsections will be grouped by subtype instead of area. For example, the `index.md` output file contains information derived from the changelogs: diff --git a/src/services/Elastic.Documentation.Services/ChangelogService.cs b/src/services/Elastic.Documentation.Services/ChangelogService.cs index a951abef8..e6805c77b 100644 --- a/src/services/Elastic.Documentation.Services/ChangelogService.cs +++ b/src/services/Elastic.Documentation.Services/ChangelogService.cs @@ -36,6 +36,9 @@ private static class ChangelogEntryTypes public const string BreakingChange = "breaking-change"; public const string Deprecation = "deprecation"; public const string KnownIssue = "known-issue"; + public const string Docs = "docs"; + public const string Regression = "regression"; + public const string Other = "other"; } public async Task CreateChangelog( @@ -1767,6 +1770,34 @@ Cancel ctx } } + // Check for unhandled changelog types + var handledTypes = new HashSet(StringComparer.OrdinalIgnoreCase) + { + ChangelogEntryTypes.Feature, + ChangelogEntryTypes.Enhancement, + ChangelogEntryTypes.Security, + ChangelogEntryTypes.BugFix, + ChangelogEntryTypes.BreakingChange, + ChangelogEntryTypes.Deprecation, + ChangelogEntryTypes.KnownIssue, + ChangelogEntryTypes.Docs, + ChangelogEntryTypes.Regression, + ChangelogEntryTypes.Other + }; + + var availableTypes = config.AvailableTypes ?? ChangelogConfiguration.Default.AvailableTypes; + var availableTypesSet = new HashSet(availableTypes, StringComparer.OrdinalIgnoreCase); + + foreach (var entryType in entriesByType.Keys) + { + // Only warn if the type is valid according to config but not handled in rendering + if (availableTypesSet.Contains(entryType) && !handledTypes.Contains(entryType)) + { + var entryCount = entriesByType[entryType].Count; + collector.EmitWarning(string.Empty, $"Changelog type '{entryType}' is valid according to configuration but is not handled in rendering output. {entryCount} entry/entries of this type will not be included in the generated markdown files."); + } + } + // Create mapping from entries to their bundle product IDs for render_blockers checking // Use a custom comparer for reference equality since entries are objects var entryToBundleProducts = new Dictionary>(); @@ -1778,7 +1809,7 @@ Cancel ctx // Render markdown files (use first repo found, or default) var repoForRendering = allResolvedEntries.Count > 0 ? allResolvedEntries[0].repo : defaultRepo; - // Render index.md (features, enhancements, bug fixes, security) + // Render index.md (features, enhancements, bug fixes, security, docs, regression, other) await RenderIndexMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); // Render breaking-changes.md @@ -1787,6 +1818,9 @@ Cancel ctx // Render deprecations.md await RenderDeprecationsMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); + // Render known-issues.md + await RenderKnownIssuesMarkdown(collector, outputDir, title, titleSlug, repoForRendering, allResolvedEntries.Select(e => e.entry).ToList(), entriesByType, input.Subsections, input.HidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts, ctx); + _logger.LogInformation("Rendered changelog markdown files to {OutputDir}", outputDir); return true; @@ -1834,11 +1868,9 @@ Cancel ctx var enhancements = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Enhancement, []); var security = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Security, []); var bugFixes = entriesByType.GetValueOrDefault(ChangelogEntryTypes.BugFix, []); - - if (features.Count == 0 && enhancements.Count == 0 && security.Count == 0 && bugFixes.Count == 0) - { - // Still create file with "no changes" message - } + var docs = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Docs, []); + var regressions = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Regression, []); + var other = entriesByType.GetValueOrDefault(ChangelogEntryTypes.Other, []); var hasBreakingChanges = entriesByType.ContainsKey(ChangelogEntryTypes.BreakingChange); var hasDeprecations = entriesByType.ContainsKey(ChangelogEntryTypes.Deprecation); @@ -1847,7 +1879,7 @@ Cancel ctx var otherLinks = new List(); if (hasKnownIssues) { - otherLinks.Add("[Known issues](/release-notes/known-issues.md)"); + otherLinks.Add($"[Known issues](/release-notes/known-issues.md#{repo}-{titleSlug}-known-issues)"); } if (hasBreakingChanges) { @@ -1868,7 +1900,9 @@ Cancel ctx sb.AppendLine(); } - if (features.Count > 0 || enhancements.Count > 0 || security.Count > 0 || bugFixes.Count > 0) + var hasAnyEntries = features.Count > 0 || enhancements.Count > 0 || security.Count > 0 || bugFixes.Count > 0 || docs.Count > 0 || regressions.Count > 0 || other.Count > 0; + + if (hasAnyEntries) { if (features.Count > 0 || enhancements.Count > 0) { @@ -1884,6 +1918,27 @@ Cancel ctx var combined = security.Concat(bugFixes).ToList(); RenderEntriesByArea(sb, combined, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); } + + if (docs.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Documentation [{repo}-{titleSlug}-docs]"); + RenderEntriesByArea(sb, docs, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); + } + + if (regressions.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Regressions [{repo}-{titleSlug}-regressions]"); + RenderEntriesByArea(sb, regressions, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); + } + + if (other.Count > 0) + { + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"### Other changes [{repo}-{titleSlug}-other]"); + RenderEntriesByArea(sb, other, repo, subsections, hidePrivateLinks, featureIdsToHide, renderBlockers, entryToBundleProducts); + } } else { @@ -1925,17 +1980,21 @@ Cancel ctx if (breakingChanges.Count > 0) { - var groupedByArea = breakingChanges.GroupBy(e => GetComponent(e)).ToList(); - foreach (var areaGroup in groupedByArea) + // Group by subtype if subsections is enabled, otherwise group by area + var groupedEntries = subsections + ? breakingChanges.GroupBy(e => string.IsNullOrWhiteSpace(e.Subtype) ? string.Empty : e.Subtype).ToList() + : breakingChanges.GroupBy(e => GetComponent(e)).ToList(); + + foreach (var group in groupedEntries) { - if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + if (subsections && !string.IsNullOrWhiteSpace(group.Key)) { - var header = FormatAreaHeader(areaGroup.Key); + var header = FormatSubtypeHeader(group.Key); sb.AppendLine(); sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); } - foreach (var entry in areaGroup) + foreach (var entry in group) { var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || @@ -1949,40 +2008,45 @@ Cancel ctx sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); sb.AppendLine(entry.Description ?? "% Describe the functionality that changed"); sb.AppendLine(); - if (hidePrivateLinks) + var hasPr = !string.IsNullOrWhiteSpace(entry.Pr); + var hasIssues = entry.Issues != null && entry.Issues.Count > 0; + if (hasPr || hasIssues) { - // When hiding private links, put them on separate lines as comments - if (!string.IsNullOrWhiteSpace(entry.Pr)) - { - sb.AppendLine(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); - } - if (entry.Issues != null && entry.Issues.Count > 0) + if (hidePrivateLinks) { - foreach (var issue in entry.Issues) + // When hiding private links, put them on separate lines as comments + if (hasPr) { - sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + sb.AppendLine(FormatPrLink(entry.Pr!, repo, hidePrivateLinks)); } + if (hasIssues) + { + foreach (var issue in entry.Issues!) + { + sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("For more information, check the pull request or issue above."); } - sb.AppendLine("For more information, check the pull request or issue above."); - } - else - { - sb.Append("For more information, check "); - if (!string.IsNullOrWhiteSpace(entry.Pr)) - { - sb.Append(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); - } - if (entry.Issues != null && entry.Issues.Count > 0) + else { - foreach (var issue in entry.Issues) + sb.Append("For more information, check "); + if (hasPr) { - sb.Append(' '); - sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + sb.Append(FormatPrLink(entry.Pr!, repo, hidePrivateLinks)); } + if (hasIssues) + { + foreach (var issue in entry.Issues!) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("."); } - sb.AppendLine("."); + sb.AppendLine(); } - sb.AppendLine(); if (!string.IsNullOrWhiteSpace(entry.Impact)) { @@ -2076,40 +2140,177 @@ Cancel ctx sb.AppendLine(CultureInfo.InvariantCulture, $"::::{{dropdown}} {Beautify(entry.Title)}"); sb.AppendLine(entry.Description ?? "% Describe the functionality that was deprecated"); sb.AppendLine(); - if (hidePrivateLinks) + var hasPr = !string.IsNullOrWhiteSpace(entry.Pr); + var hasIssues = entry.Issues != null && entry.Issues.Count > 0; + if (hasPr || hasIssues) { - // When hiding private links, put them on separate lines as comments - if (!string.IsNullOrWhiteSpace(entry.Pr)) + if (hidePrivateLinks) { - sb.AppendLine(FormatPrLink(entry.Pr, repo, hidePrivateLinks)); + // When hiding private links, put them on separate lines as comments + if (hasPr) + { + sb.AppendLine(FormatPrLink(entry.Pr!, repo, hidePrivateLinks)); + } + if (hasIssues) + { + foreach (var issue in entry.Issues!) + { + sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("For more information, check the pull request or issue above."); } - if (entry.Issues != null && entry.Issues.Count > 0) + else { - foreach (var issue in entry.Issues) + sb.Append("For more information, check "); + if (hasPr) { - sb.AppendLine(FormatIssueLink(issue, repo, hidePrivateLinks)); + sb.Append(FormatPrLink(entry.Pr!, repo, hidePrivateLinks)); } + if (hasIssues) + { + foreach (var issue in entry.Issues!) + { + sb.Append(' '); + sb.Append(FormatIssueLink(issue, repo, hidePrivateLinks)); + } + } + sb.AppendLine("."); } - sb.AppendLine("For more information, check the pull request or issue above."); + sb.AppendLine(); + } + + if (!string.IsNullOrWhiteSpace(entry.Impact)) + { + sb.AppendLine("**Impact**
" + entry.Impact); } else { - sb.Append("For more information, check "); - if (!string.IsNullOrWhiteSpace(entry.Pr)) + sb.AppendLine("% **Impact**
_Add a description of the impact_"); + } + + sb.AppendLine(); + + if (!string.IsNullOrWhiteSpace(entry.Action)) + { + sb.AppendLine("**Action**
" + entry.Action); + } + else + { + sb.AppendLine("% **Action**
_Add a description of the what action to take_"); + } + + sb.AppendLine("::::"); + if (shouldHide) + { + sb.AppendLine("-->"); + } + } + } + } + else + { + sb.AppendLine("_No deprecations._"); + } + + var deprecationsPath = _fileSystem.Path.Combine(outputDir, titleSlug, "deprecations.md"); + var deprecationsDir = _fileSystem.Path.GetDirectoryName(deprecationsPath); + if (!string.IsNullOrWhiteSpace(deprecationsDir) && !_fileSystem.Directory.Exists(deprecationsDir)) + { + _ = _fileSystem.Directory.CreateDirectory(deprecationsDir); + } + + await _fileSystem.File.WriteAllTextAsync(deprecationsPath, sb.ToString(), ctx); + } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0060:Remove unused parameter", Justification = "Parameters match interface pattern")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE0058:Expression value is never used", Justification = "StringBuilder methods return builder for chaining")] + private async Task RenderKnownIssuesMarkdown( + IDiagnosticsCollector collector, + string outputDir, + string title, + string titleSlug, + string repo, + List entries, + Dictionary> entriesByType, + bool subsections, + bool hidePrivateLinks, + HashSet featureIdsToHide, + Dictionary? renderBlockers, + Dictionary> entryToBundleProducts, + Cancel ctx + ) + { + var knownIssues = entriesByType.GetValueOrDefault(ChangelogEntryTypes.KnownIssue, []); + + var sb = new StringBuilder(); + sb.AppendLine(CultureInfo.InvariantCulture, $"## {title} [{repo}-{titleSlug}-known-issues]"); + + if (knownIssues.Count > 0) + { + var groupedByArea = knownIssues.GroupBy(e => GetComponent(e)).ToList(); + foreach (var areaGroup in groupedByArea) + { + if (subsections && !string.IsNullOrWhiteSpace(areaGroup.Key)) + { + var header = FormatAreaHeader(areaGroup.Key); + sb.AppendLine(); + sb.AppendLine(CultureInfo.InvariantCulture, $"**{header}**"); + } + + foreach (var entry in areaGroup) + { + var bundleProductIds = entryToBundleProducts.GetValueOrDefault(entry, new HashSet(StringComparer.OrdinalIgnoreCase)); + var shouldHide = (!string.IsNullOrWhiteSpace(entry.FeatureId) && featureIdsToHide.Contains(entry.FeatureId)) || + ShouldBlockEntry(entry, bundleProductIds, renderBlockers, out _); + + sb.AppendLine(); + if (shouldHide) + { + sb.AppendLine("