diff --git a/database/scripts/format_report.rb b/database/scripts/format_report.rb
index ece53ad..d1684fd 100755
--- a/database/scripts/format_report.rb
+++ b/database/scripts/format_report.rb
@@ -7,13 +7,18 @@
report = JSON.load_file(report_name)
-report['urgent']['build_regressions'] = ReportFormatter::build_regressions(report['urgent']['build_regressions'])
-report['urgent']['test_regressions_consecutive'] = ReportFormatter::test_regressions_consecutive(report['urgent']['test_regressions_consecutive'])
-report['urgent']['test_regressions_flaky'] = ReportFormatter::test_regressions_flaky(report['urgent']['test_regressions_flaky'])
-report['maintenance']['jobs_last_success_date'] = ReportFormatter::jobs_last_success_date(report['maintenance']['jobs_last_success_date'])
-report['pending']['test_regressions_all'] = ReportFormatter::test_regressions_all(report['pending']['test_regressions_all'])
-report['pending']['test_regressions_known'] = ReportFormatter::test_regressions_known(report['pending']['test_regressions_known'])
+report['urgent']['build_regressions'] = ReportFormatter.build_regressions(report['urgent']['build_regressions'])
+report['urgent']['test_regressions_consecutive'] =
+ ReportFormatter.test_regressions_consecutive(report['urgent']['test_regressions_consecutive'])
+report['urgent']['test_regressions_flaky'] =
+ ReportFormatter.test_regressions_flaky(report['urgent']['test_regressions_flaky'])
+report['maintenance']['jobs_last_success_date'] =
+ ReportFormatter.jobs_last_success_date(report['maintenance']['jobs_last_success_date'])
+report['pending']['test_regressions_all'] =
+ ReportFormatter.test_regressions_all(report['pending']['test_regressions_all'])
+report['pending']['test_regressions_known'] =
+ ReportFormatter.test_regressions_known(report['pending']['test_regressions_known'])
# Sample output:
# puts report['urgent']['build_regressions']
-puts ReportFormatter::format_report report
+puts ReportFormatter.format_report report
diff --git a/database/scripts/generate_report.rb b/database/scripts/generate_report.rb
index 51c321d..414c4ce 100755
--- a/database/scripts/generate_report.rb
+++ b/database/scripts/generate_report.rb
@@ -8,48 +8,50 @@
options = {}
optparser = OptionParser.new do |o|
- o.on "-e", "--exclude EXCLUDE_JOBS", "A list of string separated by spaces that represent the regex on a job name that you want to exclude" do |exclude|
- options[:exclude] = Set.new(exclude.split(" "))
- end
- o.on "-o" "--output OUTPUT", "Name of output report" do |out|
- options[:report_name] = out
- end
- o.on("-h", "--help", "Prints this help") do
- puts o
- exit
- end
+ o.on '-e', '--exclude EXCLUDE_JOBS',
+ 'A list of string separated by spaces that represent the regex on a job name that you want to exclude' do |exclude|
+ options[:exclude] = Set.new(exclude.split(' '))
+ end
+ o.on '-o', '--output OUTPUT', 'Name of output report' do |out|
+ options[:report_name] = out
+ end
+ o.on('-h', '--help', 'Prints this help') do
+ puts o
+ exit
+ end
end
optparser.parse!(ARGV)
-options[:exclude] = Set.new() if options[:exclude].nil?
-options[:report_name] = "buildfarm-report_#{DateTime.now.strftime("%Y-%m-%d_%H-%M")}.json" if options[:report_name].nil?
+options[:exclude] = Set.new if options[:exclude].nil?
+options[:report_name] = "buildfarm-report_#{DateTime.now.strftime('%Y-%m-%d_%H-%M')}.json" if options[:report_name].nil?
-def generate_report(report_name, exclude_set)
- report_regressions_all = BuildfarmToolsLib::test_regressions_all(filter_known: true).freeze # Freeze as this is a read-only variable
- report_regressions_consecutive = BuildfarmToolsLib::test_regressions_today(filter_known: true, only_consistent: true, group_issues: true, report_regressions: report_regressions_all)
- report_flaky_regressions = BuildfarmToolsLib::flaky_test_regressions(filter_known: true, group_issues: true, report_regressions: report_regressions_all)
-
- report = {
- 'urgent' => {
- 'build_regressions' => BuildfarmToolsLib::build_regressions_today(filter_known: true),
- 'test_regressions_consecutive' => report_regressions_consecutive,
- 'test_regressions_flaky' => report_flaky_regressions,
- },
- 'maintenance' => {
- 'jobs_last_success_date' => BuildfarmToolsLib::jobs_last_success_date(older_than_days: 7),
- 'gh_issues_reported' => [],
- 'tests_disabled' => [],
- },
- 'pending' => {
- 'test_regressions_all' => BuildfarmToolsLib::test_regressions_all,
- 'test_regressions_known' => BuildfarmToolsLib::test_regressions_known,
- }
- }
+def generate_report(report_name, _exclude_set)
+ report_regressions_all = BuildfarmToolsLib.test_regressions_all(filter_known: true).freeze # Freeze as this is a read-only variable
+ report_regressions_consecutive = BuildfarmToolsLib.test_regressions_today(filter_known: true,
+ only_consistent: true, group_issues: true, report_regressions: report_regressions_all)
+ report_flaky_regressions = BuildfarmToolsLib.flaky_test_regressions(filter_known: true, group_issues: true,
+ report_regressions: report_regressions_all)
+ report = {
+ 'urgent' => {
+ 'build_regressions' => BuildfarmToolsLib.build_regressions_today(filter_known: true),
+ 'test_regressions_consecutive' => report_regressions_consecutive,
+ 'test_regressions_flaky' => report_flaky_regressions
+ },
+ 'maintenance' => {
+ 'jobs_last_success_date' => BuildfarmToolsLib.jobs_last_success_date(older_than_days: 7),
+ 'gh_issues_reported' => [],
+ 'tests_disabled' => []
+ },
+ 'pending' => {
+ 'test_regressions_all' => BuildfarmToolsLib.test_regressions_all,
+ 'test_regressions_known' => BuildfarmToolsLib.test_regressions_known
+ }
+ }
- File.open(report_name, 'w') do |f|
- f.write(report.to_json)
- end
+ File.open(report_name, 'w') do |f|
+ f.write(report.to_json)
+ end
end
generate_report(options[:report_name], options[:exclude])
diff --git a/database/scripts/lib/buildfarm_tools.rb b/database/scripts/lib/buildfarm_tools.rb
index b2cf360..be8969e 100644
--- a/database/scripts/lib/buildfarm_tools.rb
+++ b/database/scripts/lib/buildfarm_tools.rb
@@ -27,7 +27,7 @@ def self.build_regressions_today(filter_known: false)
def self.known_issues(status: '')
# Keys: error_name, job_name, github_issue, status
- run_command("./sql_run.sh get_known_issues.sql", args: [status.upcase])
+ run_command('./sql_run.sh get_known_issues.sql', args: [status.upcase])
end
def self.error_appearances_in_job(test_name, job_name)
@@ -53,7 +53,8 @@ def self.test_regressions_all(filter_known: false, include_reports: true)
out
end
- def self.test_regressions_today(filter_known: false, only_consistent: false, group_issues: false, report_regressions: [])
+ def self.test_regressions_today(filter_known: false, only_consistent: false, group_issues: false,
+ report_regressions: [])
# Keys: job_name, build_number, error_name, build_datetime, node_name
out = report_regressions.clone(freeze: false) # Clone because we return a modified version
out = test_regressions_all(filter_known: filter_known) if out.empty?
@@ -71,13 +72,16 @@ def self.test_regressions_today(filter_known: false, only_consistent: false, gro
out
end
- def self.flaky_test_regressions(filter_known: false, group_issues: false, time_range: FLAKY_BUILDS_DEFAULT_RANGE, report_regressions: [])
+ def self.flaky_test_regressions(filter_known: false, group_issues: false, time_range: FLAKY_BUILDS_DEFAULT_RANGE,
+ report_regressions: [])
# Keys: job_name, build_number, error_name, build_datetime, node_name, flakiness
out = []
today_regressions = report_regressions
today_regressions = test_regressions_all(filter_known: filter_known) if today_regressions.empty?
today_regressions.each do |tr|
- next if !tr['age'].to_i.nil? && (tr['age'].to_i >= CONSECUTIVE_THRESHOLD || tr['age'].to_i == WARNING_AGE_CONSTANT)
+ if !tr['age'].to_i.nil? && (tr['age'].to_i >= CONSECUTIVE_THRESHOLD || tr['age'].to_i == WARNING_AGE_CONSTANT)
+ next
+ end
tr_flakiness = test_regression_flakiness(tr['error_name'], time_range: time_range)
if tr_flakiness.nil?
@@ -90,15 +94,13 @@ def self.flaky_test_regressions(filter_known: false, group_issues: false, time_r
end
end
out.sort_by! { |e| -e['flakiness'][0]['failure_percentage'].to_f }
- if group_issues
- out = out.group_by { |o| o['flakiness'] }.values
- end
+ out = out.group_by { |o| o['flakiness'] }.values if group_issues
out
end
def self.test_regression_flakiness(error_name, time_range: FLAKY_BUILDS_DEFAULT_RANGE)
# Keys: job_name, last_fail, first_fail, build_count, failure_count, failure_percentage
- tr_flakiness = run_command('./sql_run.sh calculate_flakiness_jobs.sql', args:[error_name, time_range])
+ tr_flakiness = run_command('./sql_run.sh calculate_flakiness_jobs.sql', args: [error_name, time_range])
tr_flakiness.sort_by { |e| -e['failure_percentage'].to_f }
end
@@ -121,13 +123,14 @@ def self.jobs_last_success_date(older_than_days: 0)
# Keys: job_name, last_success
out = []
jobs_never_passed.each do |e|
- out << {"job_name" => e["job_name"], "last_success" => "Never"}
+ out << { 'job_name' => e['job_name'], 'last_success' => 'Never' }
end
jobs_last_success.each do |e|
- last_success = DateTime.parse(e['last_success_time'])
+ last_success = DateTime.parse(e['last_success_time'])
next if last_success > (Date.today - older_than_days)
- out << {"job_name" => e["job_name"], "last_success" => last_success.strftime('%Y-%m-%d')}
+
+ out << { 'job_name' => e['job_name'], 'last_success' => last_success.strftime('%Y-%m-%d') }
end
out
end
@@ -135,48 +138,48 @@ def self.jobs_last_success_date(older_than_days: 0)
def self.test_regressions_known(sort_by: 'priority')
out = known_issues(status: 'open')
out.concat known_issues(status: 'disabled')
- out = out.group_by { |e| e["github_issue"] }.to_a.map { |e| e[1] }
+ out = out.group_by { |e| e['github_issue'] }.to_a.map { |e| e[1] }
out.each do |error_list|
- priority = calculate_issue_priority(error_list.first["github_issue"])
+ priority = calculate_issue_priority(error_list.first['github_issue'])
error_list.each do |error|
- error["priority"] = priority
+ error['priority'] = priority
end
end
- unless sort_by.nil?
- out.sort_by! { |r| -r.first['priority'] }
- end
+ out.sort_by! { |r| -r.first['priority'] } unless sort_by.nil?
out
end
def self.calculate_issue_priority(issue_link)
sql_out = run_command('./sql_run.sh get_known_issue_by_url.sql', args: [issue_link])
- errors = sql_out.map {|e| e['error_name']}.uniq
- jobs = sql_out.map {|e| e['job_name']}.uniq
+ errors = sql_out.map { |e| e['error_name'] }.uniq
+ jobs = sql_out.map { |e| e['job_name'] }.uniq
error_score_jobs = {}
errors.each do |e|
jobs.each do |job|
- flaky_result = run_command('./sql_run.sh calculate_flakiness_jobs.sql', args: [e, FLAKY_BUILDS_DEFAULT_RANGE, job])
+ flaky_result = run_command('./sql_run.sh calculate_flakiness_jobs.sql',
+ args: [e, FLAKY_BUILDS_DEFAULT_RANGE, job])
next if flaky_result.empty?
+
# This is not guaranteed to be 'not consistent', we need to re-check if the last 3 builds were failing because of this
- flaky_ratio = flaky_result.first['failure_percentage'].to_f/100.0
+ flaky_ratio = flaky_result.first['failure_percentage'].to_f / 100.0
job_priority = JOB_PRIORITIES[job]
if job_priority.nil?
puts "WARNING: No job priority for job #{job}. Defaulting to 1"
job_priority = 1
end
- job_priority = job_priority*1.5 if flaky_ratio == 1
-
+ job_priority *= 1.5 if flaky_ratio == 1
+
error_score_jobs[job] = [] if error_score_jobs[job].nil?
- error_score_jobs[job] << (job_priority*flaky_ratio)
+ error_score_jobs[job] << (job_priority * flaky_ratio)
end
end
-
+
# Get only maximum score for each job
- error_score_jobs.each_value.map {|e| e.max}.sum.round(3)
+ error_score_jobs.each_value.map { |e| e.max }.sum.round(3)
end
def self.run_command(cmd, args: [], keys: [])
diff --git a/database/scripts/lib/report_formatter.rb b/database/scripts/lib/report_formatter.rb
index 16e0f6b..3f52af7 100644
--- a/database/scripts/lib/report_formatter.rb
+++ b/database/scripts/lib/report_formatter.rb
@@ -3,11 +3,10 @@
require 'json'
module ReportFormatter
-
TRACKED_PROJECTS = %w[ROS GAZEBO]
def self.get_job_project(job_name)
- project_name = ""
+ project_name = ''
JOB_PROJECT_PATTERN.each_pair do |pattern, project|
project_name = project if pattern.match? job_name
end
@@ -24,23 +23,23 @@ def self.format_reference_build(issue_hash)
def self.format_datetime(datetime)
date, time = datetime.split
- hour, minute, _ = time.split(':')
+ hour, minute, = time.split(':')
"#{date} #{hour}:#{minute}"
end
def self.format_flakiness(flakiness_arr)
- table_head = " "
- table_body = ""
+ table_head = 'Job Name Last Fail First Fail Build Count Failure Count Failure Percentage '
+ table_body = ''
flakiness_arr.each do |e|
table_body += "Job Name Last Fail First Fail Build Count Failure Count Failure Percentage