Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions bin/circleci-metrics
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ require 'log_uploader'
require 'usage_report_service'
require 'cloudwatch_metrics_service'
require 's3_upload_service'
require 'resource_usage_aggregator'

module CircleciMetrics
class CLI < Thor
Expand Down Expand Up @@ -202,6 +203,84 @@ module CircleciMetrics
metrics_service.upload_metrics(csv_file_path)
end

desc "aggregate_usage", "Aggregate resource usage for jobs"
method_option :job_ids, aliases: '-j', type: :array, desc: 'List of Job IDs', required: true
method_option :verbose, aliases: '-v', type: :boolean, default: false, desc: 'Enable verbose output'
method_option :format, aliases: '-f', type: :string, enum: ['text', 'json'], default: 'text', desc: 'Output format (text or json)'
def aggregate_usage
job_ids = options[:job_ids]
api_token = fetch_api_token(TTY::Prompt.new)
logger = Logger.new(STDOUT)
logger.level = options[:verbose] ? Logger::DEBUG : Logger::INFO

circleci_service = CircleciTools::ApiService.new(api_token:, org: 'N/A', project: 'N/A', logger:)
aggregator = CircleciTools::ResourceUsageAggregator.new(logger:)

puts "Fetching usage data for #{job_ids.size} job#{'s' if job_ids.size > 1}..." unless options[:format] == 'json'
usage_data = []
failed_jobs = []

job_ids.each do |job_id|
begin
data = circleci_service.fetch_resource_usage(job_id)
usage_data << data
puts "Successfully fetched data for job #{job_id}" if options[:verbose]
rescue => e
failed_jobs << job_id
puts "Failed to fetch data for job #{job_id}: #{e.message}" if options[:verbose]
end
end

if usage_data.empty?
if options[:format] == 'json'
puts JSON.pretty_generate({ error: "No usage data could be fetched" })
else
puts "No usage data could be fetched."
end
return
end

if options[:verbose]
puts "\nFetched usage data:"
puts JSON.pretty_generate(usage_data)
end

result = aggregator.aggregate(usage_data)

case options[:format]
when 'json'
json_output = {
jobs: {
total: job_ids.size,
processed: job_ids.size - failed_jobs.size,
failed: failed_jobs
},
tasks: result[:stats][:tasks_processed],
samples: result[:stats][:samples_processed],
metrics: {
cpu: {
min: result[:metrics][:cpu][:min].round(3),
max: result[:metrics][:cpu][:max].round(3),
avg: result[:metrics][:cpu][:avg].round(3)
},
memory: {
min: (result[:metrics][:memory_bytes][:min] / 1024.0 / 1024.0).round(2),
max: (result[:metrics][:memory_bytes][:max] / 1024.0 / 1024.0).round(2),
avg: (result[:metrics][:memory_bytes][:avg] / 1024.0 / 1024.0).round(2),
unit: 'MB'
}
}
}
puts JSON.pretty_generate(json_output)
else
puts aggregator.format_summary(result, jobs_total: job_ids.size, failed_jobs:)
if options[:verbose] && result
puts "\nRaw metrics:"
puts JSON.pretty_generate(result[:metrics])
end
end
end

no_commands do
def load_or_fetch_pipelines(circleci_service, org, project, days)
if options[:pipelines_json]
Expand Down
33 changes: 33 additions & 0 deletions lib/circleci-tools/api_service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,39 @@ def get_usage_export_job(org_id:, usage_export_job_id:)
JSON.parse(response.body)
end

def fetch_resource_usage(job_id)
url = URI("https://dl.circleci.com/private/output/job/#{job_id}/usage")
@logger.debug("Fetching resource usage from: #{url}")

request = Net::HTTP::Get.new(url)
request["Circle-Token"] = @api_token
request["Accept"] = "application/json"

begin
response = Net::HTTP.start(url.hostname, url.port, use_ssl: true) do |http|
http.request(request)
end

@logger.debug("Response status: #{response.code}")
@logger.debug("Response body: #{response.body}") if response.code != '200'

case response.code
when '200'
JSON.parse(response.body)
when '401'
raise "Unauthorized: Please check your API token"
when '404'
raise "Job not found: #{job_id}"
else
raise "API Error (#{response.code}): #{response.body}"
end
rescue JSON::ParserError => e
raise "Invalid JSON response: #{e.message}"
rescue StandardError => e
raise "Failed to fetch resource usage: #{e.message}"
end
end

private

def connection
Expand Down
90 changes: 90 additions & 0 deletions lib/circleci-tools/resource_usage_aggregator.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
module CircleciTools
class ResourceUsageAggregator
def initialize(logger: Logger.new(STDOUT))
@logger = logger
end

def aggregate(usage_data)
return nil if usage_data.empty?

aggregated_data = {
cpu: { min: Float::INFINITY, max: -Float::INFINITY, sum: 0, count: 0 },
memory_bytes: { min: Float::INFINITY, max: -Float::INFINITY, sum: 0, count: 0 }
}

stats = {
tasks_processed: 0,
samples_processed: { cpu: 0, memory: 0 }
}

usage_data.each do |data|
next unless data.is_a?(Array)
data.each do |task|
next unless task.is_a?(Hash)
cpu_usage = task['cpu']
memory_usage = task['memory_bytes']

next unless cpu_usage.is_a?(Array) && memory_usage.is_a?(Array)

@logger.debug("Processing task with #{cpu_usage.size} CPU samples and #{memory_usage.size} memory samples")

stats[:tasks_processed] += 1
stats[:samples_processed][:cpu] += cpu_usage.size
stats[:samples_processed][:memory] += memory_usage.size

aggregated_data[:cpu][:min] = [aggregated_data[:cpu][:min], cpu_usage.min].min
aggregated_data[:cpu][:max] = [aggregated_data[:cpu][:max], cpu_usage.max].max
aggregated_data[:cpu][:sum] += cpu_usage.sum
aggregated_data[:cpu][:count] += cpu_usage.size

aggregated_data[:memory_bytes][:min] = [aggregated_data[:memory_bytes][:min], memory_usage.min].min
aggregated_data[:memory_bytes][:max] = [aggregated_data[:memory_bytes][:max], memory_usage.max].max
aggregated_data[:memory_bytes][:sum] += memory_usage.sum
aggregated_data[:memory_bytes][:count] += memory_usage.size
end
end

if aggregated_data[:cpu][:count].positive?
aggregated_data[:cpu][:avg] = aggregated_data[:cpu][:sum] / aggregated_data[:cpu][:count].to_f
aggregated_data[:memory_bytes][:avg] = aggregated_data[:memory_bytes][:sum] / aggregated_data[:memory_bytes][:count].to_f

return {
metrics: aggregated_data,
stats: stats
}
end

nil
end

def format_summary(result, jobs_total:, failed_jobs: [])
return "No valid metrics found in the usage data." unless result

metrics = result[:metrics]
stats = result[:stats]

[
"\nResource Usage Summary:",
"----------------------",
"Jobs processed: #{jobs_total - failed_jobs.size}/#{jobs_total}",
"Tasks analyzed: #{stats[:tasks_processed]}",
"Samples analyzed: #{stats[:samples_processed][:cpu]} CPU, #{stats[:samples_processed][:memory]} memory",
"",
"CPU Usage:",
" Min: #{metrics[:cpu][:min].round(2)} cores",
" Max: #{metrics[:cpu][:max].round(2)} cores",
" Avg: #{metrics[:cpu][:avg].round(2)} cores",
"",
"Memory Usage:",
" Min: #{(metrics[:memory_bytes][:min] / 1024.0 / 1024.0).round(2)} MB",
" Max: #{(metrics[:memory_bytes][:max] / 1024.0 / 1024.0).round(2)} MB",
" Avg: #{(metrics[:memory_bytes][:avg] / 1024.0 / 1024.0).round(2)} MB",
(failed_jobs.any? ? [
"",
"Warning: Failed to fetch data for #{failed_jobs.size} job#{'s' if failed_jobs.size > 1}:",
*failed_jobs.map { |job_id| " - #{job_id}" }
] : [])
].flatten.join("\n")
end
end
end
70 changes: 70 additions & 0 deletions spec/api_service_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -142,4 +142,74 @@
expect { subject.get_usage_export_job(org_id: org_id, usage_export_job_id: usage_export_job_id) }.to raise_error(StandardError, "API Error")
end
end

describe '#fetch_resource_usage' do
let(:job_id) { '12345' }
let(:api_token) { 'test_token' }
let(:org) { 'test_org' }
let(:project) { 'test_project' }
let(:logger) { instance_double(Logger, debug: nil, info: nil) }
let(:service) { described_class.new(api_token: api_token, org: org, project: project, logger: logger) }

context 'on successful response' do
before do
http_client = instance_double(Net::HTTP)
http_response = instance_double(Net::HTTPResponse, code: '200', body: valid_resource_usage_json)

allow(Net::HTTP).to receive(:start).and_yield(http_client)
allow(http_client).to receive(:request).and_return(http_response)
end

let(:valid_resource_usage_json) {
JSON.generate([
{
'cpu' => [0.1, 0.2, 0.3],
'memory_bytes' => [100_000_000, 150_000_000, 200_000_000]
}
])
}

it 'returns parsed resource usage data' do
result = service.fetch_resource_usage(job_id)

expect(result).to be_an(Array)
expect(result[0]['cpu']).to eq([0.1, 0.2, 0.3])
expect(result[0]['memory_bytes']).to eq([100_000_000, 150_000_000, 200_000_000])
end
end

context 'on error responses' do
[
{ code: '401', error_class: StandardError, error_message: 'Unauthorized: Please check your API token' },
{ code: '404', error_class: StandardError, error_message: 'Job not found: 12345' },
{ code: '500', error_class: StandardError, error_message: 'API Error (500): Internal Server Error' }
].each do |scenario|
it "raises an error for HTTP #{scenario[:code]} response" do
http_client = instance_double(Net::HTTP)
http_response = instance_double(Net::HTTPResponse, code: scenario[:code], body: 'Internal Server Error')

allow(Net::HTTP).to receive(:start).and_yield(http_client)
allow(http_client).to receive(:request).and_return(http_response)

expect { service.fetch_resource_usage(job_id) }.to raise_error(scenario[:error_class], scenario[:error_message])
end
end

it 'raises an error when JSON parsing fails' do
http_client = instance_double(Net::HTTP)
http_response = instance_double(Net::HTTPResponse, code: '200', body: 'This is not valid JSON')

allow(Net::HTTP).to receive(:start).and_yield(http_client)
allow(http_client).to receive(:request).and_return(http_response)

expect { service.fetch_resource_usage(job_id) }.to raise_error(StandardError, /Invalid JSON response/)
end

it 'raises an error when the HTTP request fails' do
allow(Net::HTTP).to receive(:start).and_raise(StandardError.new('Connection timeout'))

expect { service.fetch_resource_usage(job_id) }.to raise_error(StandardError, /Failed to fetch resource usage: Connection timeout/)
end
end
end
end
Loading