From d2b8152ce8ec84621ef4b5a1e9e5f1c430892125 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Fri, 9 Jan 2026 23:33:44 -0800 Subject: [PATCH 01/23] o11y with context-aware logging and distributed tracing - Introduce a new centralized logging module (`src/server/lib/logger/`) with AsyncLocalStorage-based context propagation - Add Datadog tracing integration with automatic correlation ID and span tracking across async operations - Migrate all services and API routes to use context-aware `getLogger()` pattern instead of static logger instances - Standardize log format to single-line key=value style for better log aggregation - Selective/single deploys now are instantly processed --- dd-trace.js | 37 ++ helm/environments/local/lifecycle.yaml | 4 + next-env.d.ts | 3 +- package.json | 8 +- src/pages/api/v1/admin/ttl/cleanup.ts | 66 ++-- src/pages/api/v1/ai/chat.ts | 32 +- src/pages/api/v1/ai/models.ts | 11 +- src/pages/api/v1/builds/[uuid]/deploy.ts | 72 ++-- src/pages/api/v1/builds/[uuid]/graph.ts | 11 +- src/pages/api/v1/builds/[uuid]/index.ts | 40 ++- .../v1/builds/[uuid]/jobs/[jobName]/events.ts | 29 +- .../v1/builds/[uuid]/jobs/[jobName]/logs.ts | 13 +- .../v1/builds/[uuid]/services/[name]/build.ts | 108 +++--- .../[uuid]/services/[name]/buildLogs.ts | 17 +- .../services/[name]/buildLogs/[jobName].ts | 11 +- .../[uuid]/services/[name]/deployLogs.ts | 60 ++-- .../services/[name]/deployLogs/[jobName].ts | 11 +- .../[uuid]/services/[name]/deployment.ts | 37 +- .../[uuid]/services/[name]/logs/[jobName].ts | 28 +- src/pages/api/v1/builds/[uuid]/torndown.ts | 17 +- src/pages/api/v1/builds/[uuid]/webhooks.ts | 86 +++-- src/pages/api/v1/builds/index.ts | 8 +- src/pages/api/v1/config/cache.ts | 16 +- src/pages/api/v1/deploy-summary.ts | 12 +- src/pages/api/v1/deployables.ts | 12 +- src/pages/api/v1/deploys.ts | 12 +- src/pages/api/v1/pull-requests/[id]/builds.ts | 13 +- src/pages/api/v1/pull-requests/[id]/index.ts | 13 +- src/pages/api/v1/pull-requests/index.ts | 11 +- src/pages/api/v1/repos/index.ts | 8 +- src/pages/api/v1/schema/validate.ts | 11 +- src/pages/api/v1/users/index.ts | 8 +- src/pages/api/webhooks/github.ts | 51 ++- src/server/jobs/index.ts | 18 - src/server/lib/buildEnvVariables.ts | 25 +- src/server/lib/cli.ts | 31 +- src/server/lib/codefresh/index.ts | 17 +- src/server/lib/comment.ts | 8 +- .../lib/configFileWebhookEnvVariables.ts | 8 +- .../deploymentManager/deploymentManager.ts | 13 +- src/server/lib/envVariables.ts | 11 +- src/server/lib/fastly.ts | 16 +- src/server/lib/github/__tests__/index.test.ts | 12 +- src/server/lib/github/client.ts | 12 +- src/server/lib/github/deployments.ts | 34 +- src/server/lib/github/index.ts | 114 +++--- src/server/lib/github/types.ts | 3 - src/server/lib/helm/helm.ts | 38 +- src/server/lib/kubernetes.ts | 180 +++++----- src/server/lib/kubernetes/JobMonitor.ts | 26 +- .../lib/kubernetes/getDeploymentJobs.ts | 10 +- src/server/lib/kubernetes/rbac.ts | 15 +- .../lib/kubernetesApply/applyManifest.ts | 14 +- src/server/lib/kubernetesApply/logs.ts | 12 +- src/server/lib/logger.ts | 17 + .../lib/logger/__tests__/context.test.ts | 148 ++++++++ .../logger/__tests__/contextLogger.test.ts | 170 +++++++++ src/server/lib/logger/__tests__/spans.test.ts | 112 ++++++ .../lib/logger/__tests__/stages.test.ts | 118 +++++++ src/server/lib/logger/context.ts | 102 ++++++ src/server/lib/logger/contextLogger.ts | 55 +++ src/server/lib/logger/index.ts | 23 ++ src/server/lib/logger/spans.ts | 58 ++++ src/server/lib/logger/stages.ts | 59 ++++ src/server/lib/logger/types.ts | 41 +++ .../nativeBuild/__tests__/buildkit.test.ts | 1 + src/server/lib/nativeBuild/engines.ts | 20 +- src/server/lib/nativeBuild/index.ts | 74 ++-- src/server/lib/nativeBuild/utils.ts | 6 +- src/server/lib/nativeHelm/helm.ts | 135 ++++---- src/server/lib/nativeHelm/utils.ts | 70 ++-- src/server/lib/queueManager.ts | 20 +- src/server/lib/redisClient.ts | 10 +- src/server/lib/shell.ts | 8 +- src/server/lib/tracer/index.ts | 26 +- src/server/lib/webhook/index.ts | 35 +- src/server/lib/yamlConfigValidator.ts | 8 +- src/server/models/config/index.ts | 11 +- src/server/models/config/utils.ts | 16 +- src/server/models/yaml/Config.ts | 29 +- src/server/models/yaml/YamlService.ts | 19 +- .../services/__tests__/globalConfig.test.ts | 1 + src/server/services/activityStream.ts | 280 +++++++-------- src/server/services/build.ts | 327 ++++++++++-------- src/server/services/codefresh.ts | 17 +- src/server/services/deploy.ts | 278 +++++++-------- src/server/services/deployable.ts | 196 ++++++----- src/server/services/environment.ts | 10 +- src/server/services/github.ts | 169 ++++----- src/server/services/globalConfig.ts | 79 +++-- src/server/services/ingress.ts | 90 ++--- src/server/services/label.ts | 119 ++++--- src/server/services/override.ts | 19 +- src/server/services/pullRequest.ts | 46 ++- src/server/services/repository.ts | 10 +- src/server/services/service.ts | 12 +- src/server/services/ttlCleanup.ts | 199 +++++------ src/server/services/webhook.ts | 73 ++-- sysops/tilt/ngrok-keycloak.yaml | 5 +- sysops/tilt/ngrok.yaml | 5 +- tsconfig.json | 10 +- 101 files changed, 2822 insertions(+), 2007 deletions(-) create mode 100644 dd-trace.js create mode 100644 src/server/lib/logger/__tests__/context.test.ts create mode 100644 src/server/lib/logger/__tests__/contextLogger.test.ts create mode 100644 src/server/lib/logger/__tests__/spans.test.ts create mode 100644 src/server/lib/logger/__tests__/stages.test.ts create mode 100644 src/server/lib/logger/context.ts create mode 100644 src/server/lib/logger/contextLogger.ts create mode 100644 src/server/lib/logger/index.ts create mode 100644 src/server/lib/logger/spans.ts create mode 100644 src/server/lib/logger/stages.ts create mode 100644 src/server/lib/logger/types.ts diff --git a/dd-trace.js b/dd-trace.js new file mode 100644 index 0000000..9575e4f --- /dev/null +++ b/dd-trace.js @@ -0,0 +1,37 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +'use strict'; + +const tracer = require('dd-trace').init(); + +const blocklist = [/^\/api\/health/, /^\/api\/jobs/, /^\/_next\/static/, /^\/_next\/webpack-hmr/]; + +tracer.use('http', { + server: { + blocklist, + }, + client: { + blocklist, + }, +}); + +tracer.use('next', { + blocklist, +}); + +tracer.use('net', false); +tracer.use('dns', false); diff --git a/helm/environments/local/lifecycle.yaml b/helm/environments/local/lifecycle.yaml index efcf00e..acc64bb 100644 --- a/helm/environments/local/lifecycle.yaml +++ b/helm/environments/local/lifecycle.yaml @@ -66,6 +66,8 @@ components: value: web - name: PORT value: '80' + - name: DD_TRACE_ENABLED + value: 'false' ports: - name: http containerPort: 80 @@ -122,6 +124,8 @@ components: value: '10000' - name: LIFECYCLE_UI_HOSTHAME_WITH_SCHEME value: 'http://localhost:8000' + - name: DD_TRACE_ENABLED + value: 'false' ports: - name: http containerPort: 80 diff --git a/next-env.d.ts b/next-env.d.ts index 4f11a03..725dd6f 100644 --- a/next-env.d.ts +++ b/next-env.d.ts @@ -1,5 +1,6 @@ /// /// +/// // NOTE: This file should not be edited -// see https://nextjs.org/docs/basic-features/typescript for more information. +// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information. diff --git a/package.json b/package.json index 8404b38..8c4a388 100644 --- a/package.json +++ b/package.json @@ -7,17 +7,17 @@ ], "scripts": { "babel-node": "babel-node --extensions '.ts'", - "dev": "LOG_LEVEL=debug ts-node -r tsconfig-paths/register --project tsconfig.server.json ws-server.ts | pino-pretty -c -t HH:MM -i pid,hostname,filename -o '{msg}'", + "dev": "LOG_LEVEL=debug ts-node -r ./dd-trace.js -r tsconfig-paths/register --project tsconfig.server.json ws-server.ts | pino-pretty -c -t HH:MM -i pid,hostname,filename -o '{msg}'", "build": "next build && tsc --project tsconfig.server.json && tsc-alias -p tsconfig.server.json", - "start": "NEXT_MANUAL_SIG_HANDLE=true NODE_ENV=production node .next/ws-server.js", + "start": "NEXT_MANUAL_SIG_HANDLE=true NODE_ENV=production node -r ./dd-trace.js .next/ws-server.js", "run-prod": "port=5001 pnpm run start", "knex": "pnpm run knex", "test": "NODE_ENV=test jest --maxWorkers=75%", "lint": "eslint --ext .ts src", "lint:fix": "pnpm run lint --fix", "ts-check": "tsc --project tsconfig.json", - "db:migrate": "NODE_OPTIONS='--loader ts-node/esm' knex migrate:latest", - "db:rollback": "NODE_OPTIONS='--loader ts-node/esm' knex migrate:rollback", + "db:migrate": "tsx node_modules/knex/bin/cli.js migrate:latest", + "db:rollback": "tsx node_modules/knex/bin/cli.js migrate:rollback", "db:seed": "knex seed:run", "prepare": "husky install", "generate:jsonschemas": "tsx ./scripts/generateSchemas.ts generatejson", diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts index d159f65..163b500 100644 --- a/src/pages/api/v1/admin/ttl/cleanup.ts +++ b/src/pages/api/v1/admin/ttl/cleanup.ts @@ -15,14 +15,11 @@ */ import { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { nanoid } from 'nanoid'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import GlobalConfigService from 'server/services/globalConfig'; import TTLCleanupService from 'server/services/ttlCleanup'; -const logger = rootLogger.child({ - filename: 'v1/admin/ttl/cleanup.ts', -}); - /** * @openapi * /api/v1/admin/ttl/cleanup: @@ -160,7 +157,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - logger.error(`Error occurred on TTL cleanup operation: \n ${error}`); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error occurred on TTL cleanup operation' + ); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -172,39 +172,51 @@ async function getTTLConfig(res: NextApiResponse) { const ttlConfig = globalConfig.ttl_cleanup; if (!ttlConfig) { - logger.warn('[API] TTL cleanup configuration not found in global config'); + getLogger().warn('TTL cleanup configuration not found in global config'); return res.status(404).json({ error: 'TTL cleanup configuration not found' }); } return res.status(200).json({ config: ttlConfig }); } catch (error) { - logger.error(`[API] Error occurred retrieving TTL cleanup config: \n ${error}`); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error occurred retrieving TTL cleanup config' + ); return res.status(500).json({ error: 'Unable to retrieve TTL cleanup configuration' }); } } async function triggerTTLCleanup(req: NextApiRequest, res: NextApiResponse) { - try { - const { dryRun = false } = req.body || {}; + const correlationId = `api-ttl-cleanup-${Date.now()}-${nanoid(8)}`; - // Validate dryRun parameter type - if (typeof dryRun !== 'boolean') { - return res.status(400).json({ error: 'dryRun must be a boolean value' }); - } + return withLogContext({ correlationId }, async () => { + try { + const { dryRun = false } = req.body || {}; - // Create new service instance and add job to queue - const ttlCleanupService = new TTLCleanupService(); - const job = await ttlCleanupService.ttlCleanupQueue.add('manual-ttl-cleanup', { dryRun }); + // Validate dryRun parameter type + if (typeof dryRun !== 'boolean') { + return res.status(400).json({ error: 'dryRun must be a boolean value' }); + } - logger.info(`[API] TTL cleanup job triggered manually (job ID: ${job.id}, dryRun: ${dryRun})`); + // Create new service instance and add job to queue + const ttlCleanupService = new TTLCleanupService(); + const job = await ttlCleanupService.ttlCleanupQueue.add('manual-ttl-cleanup', { dryRun, correlationId }); - return res.status(200).json({ - message: 'TTL cleanup job triggered successfully', - jobId: job.id, - dryRun, - }); - } catch (error) { - logger.error(`[API] Error occurred triggering TTL cleanup: \n ${error}`); - return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' }); - } + getLogger({ stage: LogStage.CLEANUP_STARTING }).info( + `TTL cleanup job triggered manually: jobId=${job.id} dryRun=${dryRun}` + ); + + return res.status(200).json({ + message: 'TTL cleanup job triggered successfully', + jobId: job.id, + dryRun, + }); + } catch (error) { + getLogger({ stage: LogStage.CLEANUP_FAILED }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error occurred triggering TTL cleanup' + ); + return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' }); + } + }); } diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts index 66ba02b..9a30387 100644 --- a/src/pages/api/v1/ai/chat.ts +++ b/src/pages/api/v1/ai/chat.ts @@ -20,9 +20,7 @@ import AIAgentContextService from 'server/services/ai/context/gatherer'; import AIAgentConversationService from 'server/services/ai/conversation/storage'; import AIAgentService from 'server/services/aiAgent'; import GlobalConfigService from 'server/services/globalConfig'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ filename: 'api/v2/debug/chat' }); +import { getLogger } from 'server/lib/logger/index'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'POST') { @@ -56,8 +54,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) }); } - const logger = rootLogger.child({ filename: 'api/v2/debug/chat', buildUuid }); - const aiAgentContextService = new AIAgentContextService(defaultDb, defaultRedis); const conversationService = new AIAgentConversationService(defaultDb, defaultRedis); const llmService = new AIAgentService(defaultDb, defaultRedis); @@ -69,7 +65,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) await llmService.initialize(); } } catch (error) { - logger.error({ error }, 'Failed to initialize LLM service'); + getLogger({ buildUuid }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Failed to initialize LLM service' + ); res.write( `data: ${JSON.stringify({ error: error.message, @@ -90,7 +89,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) try { context = await aiAgentContextService.gatherFullContext(buildUuid); } catch (error) { - logger.error({ error, buildUuid }, 'Failed to gather context'); + getLogger({ buildUuid }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Failed to gather context' + ); res.write( `data: ${JSON.stringify({ error: `Build not found: ${error.message}`, @@ -105,7 +107,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) let totalInvestigationTimeMs = 0; try { const mode = await llmService.classifyUserIntent(message, conversationHistory); - logger.info(`Classified user intent as: ${mode}`); + getLogger({ buildUuid }).info(`Classified user intent as: ${mode}`); const result = await llmService.processQueryStream( message, @@ -168,9 +170,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } } } catch (e) { - // If JSON parsing/validation fails, log error and send plain text fallback - logger.error( - { error: e, responseLength: aiResponse.length }, + getLogger({ buildUuid }).error( + { error: e instanceof Error ? e.message : String(e), responseLength: aiResponse.length }, 'JSON validation failed for investigation response' ); // Convert to plain text message @@ -186,7 +187,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } } } catch (error: any) { - logger.error({ error, errorMessage: error?.message, errorStack: error?.stack }, 'LLM query failed'); + getLogger({ buildUuid }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'LLM query failed' + ); // Check if it's a rate limit error if ( @@ -231,8 +235,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`); res.end(); } catch (error: any) { - logger.error( - { error, errorMessage: error?.message, errorStack: error?.stack }, + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, 'Unexpected error in AI agent chat' ); res.write(`data: ${JSON.stringify({ error: error?.message || 'Internal error' })}\n\n`); diff --git a/src/pages/api/v1/ai/models.ts b/src/pages/api/v1/ai/models.ts index 889f009..c87c49a 100644 --- a/src/pages/api/v1/ai/models.ts +++ b/src/pages/api/v1/ai/models.ts @@ -16,9 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import GlobalConfigService from 'server/services/globalConfig'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ filename: 'api/v1/ai/models' }); +import { getLogger } from 'server/lib/logger/index'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'GET') { @@ -34,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } if (!aiAgentConfig.providers || !Array.isArray(aiAgentConfig.providers)) { - logger.warn('aiAgent config missing providers array'); + getLogger().warn('aiAgent config missing providers array'); return res.status(200).json({ models: [] }); } @@ -58,7 +56,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) return res.status(200).json({ models }); } catch (error: any) { - logger.error({ error, errorMessage: error?.message }, 'Failed to fetch available models'); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Failed to fetch available models' + ); return res.status(500).json({ error: 'Failed to fetch available models' }); } } diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index 4f170aa..2c3f8df 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -15,15 +15,11 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import { Build } from 'server/models'; import { nanoid } from 'nanoid'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/deploy.ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/deploy: @@ -88,36 +84,48 @@ const logger = rootLogger.child({ */ // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { - if (req.method !== 'POST') { - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + const correlationId = `api-redeploy-${Date.now()}-${nanoid(8)}`; + + return withLogContext({ correlationId }, async () => { + if (req.method !== 'POST') { + return res.status(405).json({ error: `${req.method} is not allowed` }); + } - const { uuid } = req.query; + const { uuid } = req.query; - try { - const buildService = new BuildService(); - const build: Build = await buildService.db.models.Build.query() - .findOne({ uuid }) - .withGraphFetched('deploys.deployable'); + try { + getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Redeploy requested for build ${uuid}`); - if (!build) { - logger.info(`Build with UUID ${uuid} not found`); - return res.status(404).json({ error: `Build not found for ${uuid}` }); - } + const buildService = new BuildService(); + const build: Build = await buildService.db.models.Build.query() + .findOne({ uuid }) + .withGraphFetched('deploys.deployable'); - const buildId = build.id; - const runUUID = nanoid(); - await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', { - buildId, - runUUID, - }); + if (!build) { + getLogger({ buildUuid: uuid as string }).debug('Build not found'); + return res.status(404).json({ error: `Build not found for ${uuid}` }); + } - return res.status(200).json({ - status: 'success', - message: `Redeploy for build ${uuid} has been queued`, - }); - } catch (error) { - logger.error(`Unable to proceed with redeploy for build ${uuid}. Error: \n ${error}`); - return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); - } + const buildId = build.id; + const runUUID = nanoid(); + await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', { + buildId, + runUUID, + correlationId, + }); + + getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: build.uuid }).info(`Redeploy queued for build ${uuid}`); + + return res.status(200).json({ + status: 'success', + message: `Redeploy for build ${uuid} has been queued`, + }); + } catch (error) { + getLogger({ stage: LogStage.BUILD_FAILED }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Unable to proceed with redeploy for build ${uuid}` + ); + return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); + } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts index 25a31da..73916d1 100644 --- a/src/pages/api/v1/builds/[uuid]/graph.ts +++ b/src/pages/api/v1/builds/[uuid]/graph.ts @@ -16,14 +16,10 @@ import { NextApiRequest, NextApiResponse } from 'next/types'; import { generateGraph } from 'server/lib/dependencyGraph'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Build } from 'server/models'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/graph.ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/graph: @@ -111,7 +107,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { dependencyGraph: build.dependencyGraph, }); } catch (error) { - logger.error(`Eorror fetching dependency graph for ${uuid}: ${error}`); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error fetching dependency graph' + ); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts index 3df9d70..34c1591 100644 --- a/src/pages/api/v1/builds/[uuid]/index.ts +++ b/src/pages/api/v1/builds/[uuid]/index.ts @@ -16,15 +16,11 @@ import { nanoid } from 'nanoid'; import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import { Build } from 'server/models'; import BuildService from 'server/services/build'; import OverrideService from 'server/services/override'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/index.ts', -}); - async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { const { uuid } = req.query; @@ -52,23 +48,26 @@ async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { ); if (!build) { - logger.info(`Build with UUID ${uuid} not found`); + getLogger({ buildUuid: uuid as string }).debug('Build not found'); return res.status(404).json({ error: 'Build not found' }); } return res.status(200).json(build); } catch (error) { - logger.error(`Error fetching build ${uuid}:`, error); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error fetching build' + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } } -async function updateBuild(req: NextApiRequest, res: NextApiResponse) { +async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlationId: string) { const { uuid } = req.query; const { uuid: newUuid } = req.body; if (!newUuid || typeof newUuid !== 'string') { - logger.info(`[${uuid}] Missing or invalid uuid in request body`); + getLogger({ buildUuid: uuid as string }).debug('Missing or invalid uuid in request body'); return res.status(400).json({ error: 'uuid is required' }); } @@ -78,27 +77,29 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse) { const build: Build = await override.db.models.Build.query().findOne({ uuid }).withGraphFetched('pullRequest'); if (!build) { - logger.info(`[${uuid}] Build not found, cannot patch uuid.`); + getLogger({ buildUuid: uuid as string }).debug('Build not found, cannot patch uuid'); return res.status(404).json({ error: 'Build not found' }); } if (newUuid === build.uuid) { - logger.info(`[${uuid}] Attempted to update UUID to same value: ${newUuid}`); + getLogger({ buildUuid: uuid as string }).debug(`Attempted to update UUID to same value: newUuid=${newUuid}`); return res.status(400).json({ error: 'UUID must be different' }); } const validation = await override.validateUuid(newUuid); if (!validation.valid) { - logger.info(`[${uuid}] UUID validation failed on attempt to change: ${validation.error}`); + getLogger({ buildUuid: uuid as string }).debug(`UUID validation failed: error=${validation.error}`); return res.status(400).json({ error: validation.error }); } const result = await override.updateBuildUuid(build, newUuid); if (build.pullRequest?.deployOnUpdate) { + getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: build.uuid }).info(`Triggering redeploy after UUID update`); await new BuildService().resolveAndDeployBuildQueue.add('resolve-deploy', { buildId: build.id, runUUID: nanoid(), + correlationId, }); } @@ -108,7 +109,10 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse) { }, }); } catch (error) { - logger.error({ error }, `[${uuid}] Error updating UUID to ${newUuid}: ${error}`); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Error updating UUID to newUuid=${newUuid}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } } @@ -346,11 +350,17 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(400).json({ error: 'Invalid UUID' }); } + // Only PATCH needs correlationId for queue operations + if (req.method === 'PATCH') { + const correlationId = `api-build-update-${Date.now()}-${nanoid(8)}`; + return withLogContext({ correlationId }, async () => { + return updateBuild(req, res, correlationId); + }); + } + switch (req.method) { case 'GET': return retrieveBuild(req, res); - case 'PATCH': - return updateBuild(req, res); default: return res.status(405).json({ error: `${req.method} is not allowed` }); } diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts index 6ea5e87..4a2c275 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts @@ -142,14 +142,10 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; -const logger = rootLogger.child({ - filename: __filename, -}); - interface K8sEvent { name: string; namespace: string; @@ -170,7 +166,7 @@ interface EventsResponse { events: K8sEvent[]; } -async function getJobEvents(jobName: string, namespace: string): Promise { +async function getJobEvents(jobName: string, namespace: string, buildUuid: string): Promise { const kc = new k8s.KubeConfig(); kc.loadFromDefault(); const coreV1Api = kc.makeApiClient(k8s.CoreV1Api); @@ -220,29 +216,33 @@ async function getJobEvents(jobName: string, namespace: string): Promise { + const { uuid, jobName } = req.query; + const logger = getLogger({ buildUuid: uuid as string }); + if (req.method !== 'GET') { - logger.warn({ method: req.method }, 'Method not allowed'); + logger.warn(`method=${req.method} Method not allowed`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } - const { uuid, jobName } = req.query; - if (typeof uuid !== 'string' || typeof jobName !== 'string') { - logger.warn({ uuid, jobName }, 'Missing or invalid query parameters'); + logger.warn(`uuid=${uuid} jobName=${jobName} Missing or invalid query parameters`); return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' }); } try { const namespace = `env-${uuid}`; - const events = await getJobEvents(jobName, namespace); + const events = await getJobEvents(jobName, namespace, uuid); const response: EventsResponse = { events, @@ -250,7 +250,10 @@ const eventsHandler = async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error({ err: error }, `Error getting events for job ${jobName} in environment ${uuid}.`); + logger.error( + { error: error instanceof Error ? error.message : String(error) }, + `jobName=${jobName} Error getting events` + ); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts index ebb62b1..181938f 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts @@ -15,13 +15,9 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../../services/[name]/logs/[jobName]'; -const logger = rootLogger.child({ - filename: __filename, -}); - /** * @openapi * /api/v1/builds/{uuid}/jobs/{jobName}/logs: @@ -101,14 +97,13 @@ const logger = rootLogger.child({ * description: Internal server error */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { - logger.info( - `method=${req.method} jobName=${req.query.jobName} message="Job logs endpoint called, delegating to unified handler"` + const { uuid, jobName } = req.query; + getLogger({ buildUuid: uuid as string }).info( + `method=${req.method} jobName=${jobName} Job logs endpoint called, delegating to unified handler` ); - // Set type to 'webhook' for job logs req.query.type = 'webhook'; - // Set name to undefined since it's not required for webhook jobs req.query.name = undefined; return unifiedLogStreamHandler(req, res); diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts index 450199d..e9371bc 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; import GithubService from 'server/services/github'; import { Build } from 'server/models'; import DeployService from 'server/services/deploy'; @@ -23,10 +23,6 @@ import { DeployStatus } from 'shared/constants'; import { nanoid } from 'nanoid'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/services/[name]/build.ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/services/{name}/build: @@ -102,59 +98,71 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { } const { uuid, name } = req.query; + const correlationId = `api-service-redeploy-${Date.now()}-${nanoid(8)}`; - try { - const githubService = new GithubService(); - const build: Build = await githubService.db.models.Build.query() - .findOne({ - uuid, - }) - .withGraphFetched('deploys.deployable'); + return withLogContext({ correlationId }, async () => { + try { + const githubService = new GithubService(); + const build: Build = await githubService.db.models.Build.query() + .findOne({ + uuid, + }) + .withGraphFetched('deploys.deployable'); - const buildId = build.id; + const buildId = build.id; - if (!build) { - logger.info(`Build with UUID ${uuid} not found`); - return res.status(404).json({ error: `Build not found for ${uuid}` }); - } + if (!build) { + getLogger({ buildUuid: uuid as string }).debug(`Build not found`); + return res.status(404).json({ error: `Build not found for ${uuid}` }); + } - const deploy = build.deploys.find((deploy) => deploy.deployable.name === name); + const deploy = build.deploys.find((deploy) => deploy.deployable.name === name); - if (!deploy) { - logger.info(`Deployable ${name} not found in build ${uuid}`); - res.status(404).json({ error: `${name} service is not found in ${uuid} build.` }); - return; - } + if (!deploy) { + getLogger({ buildUuid: uuid as string }).debug(`Deployable not found: service=${name}`); + res.status(404).json({ error: `${name} service is not found in ${uuid} build.` }); + return; + } - const githubRepositoryId = deploy.deployable.repositoryId; + const githubRepositoryId = deploy.deployable.repositoryId; - const runUUID = nanoid(); - const buildService = new BuildService(); - await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', { - buildId, - githubRepositoryId, - runUUID, - }); + const runUUID = nanoid(); + const buildService = new BuildService(); + await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', { + buildId, + githubRepositoryId, + runUUID, + ...extractContextForQueue(), + }); - const deployService = new DeployService(); + getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: uuid as string }).info( + `Service redeploy queued: service=${name}` + ); - await deploy.$query().patchAndFetch({ - runUUID, - }); + const deployService = new DeployService(); - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.QUEUED, - }, - runUUID - ); - return res.status(200).json({ - status: 'success', - message: `Redeploy for service ${name} in build ${uuid} has been queued`, - }); - } catch (error) { - logger.error(`Unable to proceed with redeploy for services ${name} in build ${uuid}. Error: \n ${error}`); - return res.status(500).json({ error: `Unable to proceed with redeploy for services ${name} in build ${uuid}.` }); - } + await deploy.$query().patchAndFetch({ + runUUID, + }); + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.QUEUED, + }, + runUUID, + githubRepositoryId + ); + return res.status(200).json({ + status: 'success', + message: `Redeploy for service ${name} in build ${uuid} has been queued`, + }); + } catch (error) { + getLogger({ stage: LogStage.BUILD_FAILED }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Unable to proceed with redeploy for services ${name} in build ${uuid}` + ); + return res.status(500).json({ error: `Unable to proceed with redeploy for services ${name} in build ${uuid}.` }); + } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts index fe281b6..9a7ab3f 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts @@ -15,14 +15,10 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { HttpError } from '@kubernetes/client-node'; import { BuildJobInfo, getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs'; -const logger = rootLogger.child({ - filename: __filename, -}); - interface BuildLogsListResponse { builds: BuildJobInfo[]; } @@ -146,16 +142,17 @@ interface BuildLogsListResponse { */ // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { + const { uuid, name } = req.query; + const logger = getLogger({ buildUuid: uuid as string }); + if (req.method !== 'GET') { - logger.warn({ method: req.method }, 'Method not allowed'); + logger.warn(`API: method not allowed method=${req.method}`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } - const { uuid, name } = req.query; - if (typeof uuid !== 'string' || typeof name !== 'string') { - logger.warn({ uuid, name }, 'Missing or invalid query parameters'); + logger.warn(`API: invalid parameters uuid=${uuid} name=${name}`); return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); } @@ -170,7 +167,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error({ err: error }, `Error getting build logs for service ${name} in environment ${uuid}.`); + logger.error({ error }, `API: build logs fetch failed service=${name}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts index 0c499ff..501049a 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts @@ -15,13 +15,9 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../logs/[jobName]'; -const logger = rootLogger.child({ - filename: 'buildLogs/[jobName].ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/services/{name}/buildLogs/{jobName}: @@ -101,8 +97,9 @@ const logger = rootLogger.child({ * description: Internal server error */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { - logger.info( - `method=${req.method} jobName=${req.query.jobName} message="Build logs endpoint called, delegating to unified handler"` + const { uuid, jobName } = req.query; + getLogger({ buildUuid: uuid as string }).info( + `method=${req.method} jobName=${jobName} Build logs endpoint called, delegating to unified handler` ); req.query.type = 'build'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts index ad69c3b..483f67c 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts @@ -15,14 +15,10 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { HttpError } from '@kubernetes/client-node'; import { DeploymentJobInfo, getDeploymentJobs } from 'server/lib/kubernetes/getDeploymentJobs'; -const logger = rootLogger.child({ - filename: __filename, -}); - interface DeployLogsListResponse { deployments: DeploymentJobInfo[]; } @@ -112,41 +108,43 @@ interface DeployLogsListResponse { * description: Internal server error */ const deployLogsHandler = async (req: NextApiRequest, res: NextApiResponse) => { - if (req.method !== 'GET') { - logger.warn({ method: req.method }, 'Method not allowed'); - res.setHeader('Allow', ['GET']); - return res.status(405).json({ error: `${req.method} is not allowed` }); - } - const { uuid, name } = req.query; - if (typeof uuid !== 'string' || typeof name !== 'string') { - logger.warn({ uuid, name }, 'Missing or invalid query parameters'); - return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + if (req.method !== 'GET') { + getLogger().warn(`API: method not allowed method=${req.method}`); + res.setHeader('Allow', ['GET']); + return res.status(405).json({ error: `${req.method} is not allowed` }); + } - try { - const namespace = `env-${uuid}`; + if (typeof uuid !== 'string' || typeof name !== 'string') { + getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`); + return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); + } - const deployments = await getDeploymentJobs(name, namespace); + try { + const namespace = `env-${uuid}`; - const response: DeployLogsListResponse = { - deployments, - }; + const deployments = await getDeploymentJobs(name, namespace); - return res.status(200).json(response); - } catch (error) { - logger.error({ err: error }, `Error getting deploy logs for service ${name} in environment ${uuid}.`); + const response: DeployLogsListResponse = { + deployments, + }; - if (error instanceof HttpError) { - if (error.response?.statusCode === 404) { - return res.status(404).json({ error: 'Environment or service not found.' }); + return res.status(200).json(response); + } catch (error) { + getLogger().error({ error }, `API: deploy logs fetch failed service=${name}`); + + if (error instanceof HttpError) { + if (error.response?.statusCode === 404) { + return res.status(404).json({ error: 'Environment or service not found.' }); + } + return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); } - return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); - } - return res.status(500).json({ error: 'Internal server error occurred.' }); - } + return res.status(500).json({ error: 'Internal server error occurred.' }); + } + }); }; export default deployLogsHandler; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts index 0750189..d21d04c 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts @@ -131,16 +131,13 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../logs/[jobName]'; -const logger = rootLogger.child({ - filename: __filename, -}); - const deployLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => { - logger.info( - `method=${req.method} jobName=${req.query.jobName} message="Deploy logs endpoint called, delegating to unified handler"` + const { uuid, jobName } = req.query; + getLogger({ buildUuid: uuid as string }).info( + `method=${req.method} jobName=${jobName} Deploy logs endpoint called, delegating to unified handler` ); req.query.type = 'deploy'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts index a6b3f96..193793b 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts @@ -15,15 +15,11 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; import { Deploy } from 'server/models'; -const logger = rootLogger.child({ - filename: __filename, -}); - const kc = new k8s.KubeConfig(); kc.loadFromDefault(); @@ -47,12 +43,12 @@ async function getHelmDeploymentDetails(namespace: string, deployUuid: string): try { const secretName = `sh.helm.release.v1.${deployUuid}.v1`; - logger.debug(`Checking for Helm secret: ${secretName} in namespace ${namespace}`); + getLogger({}).debug(`Checking for Helm secret: secretName=${secretName} namespace=${namespace}`); const secret = await coreV1Api.readNamespacedSecret(secretName, namespace); if (!secret.body.data?.release) { - logger.debug(`Helm secret ${secretName} found but no release data`); + getLogger({}).debug(`Helm secret found but no release data: secretName=${secretName}`); return null; } @@ -78,8 +74,8 @@ async function getHelmDeploymentDetails(namespace: string, deployUuid: string): try { release = JSON.parse(releaseData.toString()); } catch (parseError: any) { - logger.warn( - `Failed to parse Helm release data for ${deployUuid}: decompress_error=${decompressError.message} parse_error=${parseError.message}` + getLogger({}).warn( + `Failed to parse Helm release data: deployUuid=${deployUuid} decompress_error=${decompressError.message} parse_error=${parseError.message}` ); return null; } @@ -247,16 +243,16 @@ async function getGitHubDeploymentDetails( * type: string */ const handler = async (req: NextApiRequest, res: NextApiResponse) => { + const { uuid, name } = req.query; + if (req.method !== 'GET') { - logger.warn({ method: req.method }, 'Method not allowed'); + getLogger({ buildUuid: uuid as string }).warn(`Method not allowed: method=${req.method}`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } - const { uuid, name } = req.query; - if (typeof uuid !== 'string' || typeof name !== 'string') { - logger.warn({ uuid, name }, 'Missing or invalid query parameters'); + getLogger({ buildUuid: uuid as string }).warn(`Missing or invalid query parameters: uuid=${uuid} name=${name}`); return res.status(400).json({ error: 'Missing or invalid parameters' }); } @@ -265,24 +261,29 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { try { const namespace = `env-${uuid}`; - logger.info(`Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}`); + getLogger({ buildUuid: uuid }).debug( + `Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}` + ); const helmDetails = await getHelmDeploymentDetails(namespace, deployUuid); if (helmDetails) { - logger.info(`Found Helm deployment details for ${deployUuid}`); + getLogger({ buildUuid: uuid }).debug(`Found Helm deployment details: deployUuid=${deployUuid}`); return res.status(200).json(helmDetails); } const githubDetails = await getGitHubDeploymentDetails(namespace, deployUuid); if (githubDetails) { - logger.info(`Found GitHub-type deployment details for ${deployUuid}`); + getLogger({ buildUuid: uuid }).debug(`Found GitHub-type deployment details: deployUuid=${deployUuid}`); return res.status(200).json(githubDetails); } - logger.warn(`No deployment details found for ${deployUuid}`); + getLogger({ buildUuid: uuid }).warn(`No deployment details found: deployUuid=${deployUuid}`); return res.status(404).json({ error: 'Deployment not found' }); } catch (error) { - logger.error({ err: error }, `Error getting deployment details for ${deployUuid}`); + getLogger({ buildUuid: uuid }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Error getting deployment details: deployUuid=${deployUuid}` + ); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts index 13dd994..8c6611f 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts @@ -159,44 +159,35 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { LogStreamingService } from 'server/services/logStreaming'; import { HttpError } from '@kubernetes/client-node'; -const logger = rootLogger.child({ - filename: __filename, -}); - const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => { + const { uuid, name, jobName, type } = req.query; + const logger = getLogger({ buildUuid: uuid as string }); + if (req.method !== 'GET') { - logger.warn(`method=${req.method} message="Method not allowed"`); + logger.warn(`API: method not allowed method=${req.method}`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } - const { uuid, name, jobName, type } = req.query; - - // 1. Request Validation const isWebhookRequest = type === 'webhook'; if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) { - logger.warn( - `uuid=${uuid} name=${name} jobName=${jobName} type=${type} message="Missing or invalid query parameters"` - ); + logger.warn(`API: invalid parameters uuid=${uuid} name=${name} jobName=${jobName} type=${type}`); return res.status(400).json({ error: 'Missing or invalid parameters' }); } if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) { - logger.warn(`type=${type} message="Invalid type parameter"`); + logger.warn(`API: invalid type parameter type=${type}`); return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' }); } try { - // 2. Call the Service const logService = new LogStreamingService(); - // We cast name and type to strings/undefined safely here because of validation above - const response = await logService.getLogStreamInfo( uuid, jobName, @@ -206,11 +197,8 @@ const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse return res.status(200).json(response); } catch (error: any) { - logger.error( - `jobName=${jobName} uuid=${uuid} name=${name} error="${error}" message="Error getting log streaming info"` - ); + logger.error({ error }, `API: log stream info fetch failed jobName=${jobName} service=${name}`); - // 3. Error Mapping if (error.message === 'Build not found') { return res.status(404).json({ error: 'Build not found' }); } diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts index d9991d7..657dc6c 100644 --- a/src/pages/api/v1/builds/[uuid]/torndown.ts +++ b/src/pages/api/v1/builds/[uuid]/torndown.ts @@ -15,16 +15,12 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Build } from 'server/models'; import { BuildStatus, DeployStatus } from 'shared/constants'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/torndown.ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/torndown: @@ -101,7 +97,7 @@ const logger = rootLogger.child({ // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { if (req.method !== 'PATCH') { - logger.info({ method: req.method }, `[${req.method}] Method not allowed`); + getLogger({}).debug(`Method not allowed: method=${req.method}`); return res.status(405).json({ error: `${req.method} is not allowed` }); } @@ -109,7 +105,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { try { if (!uuid) { - logger.info(`[${uuid}] The uuid is required`); + getLogger({}).debug('The uuid is required'); return res.status(500).json({ error: 'The uuid is required' }); } const buildService = new BuildService(); @@ -121,7 +117,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { .withGraphFetched('[deploys]'); if (build.isStatic || !build) { - logger.info(`[${uuid}] The build doesn't exist or is static environment`); + getLogger({ buildUuid: uuid as string }).debug('Build does not exist or is static environment'); return res.status(404).json({ error: `The build doesn't exist or is static environment` }); } @@ -145,7 +141,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { namespacesUpdated: updatedDeploys, }); } catch (error) { - logger.error({ error }, `[${uuid}] Error in cleanup API in`); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error in cleanup API' + ); return res.status(500).json({ error: 'An unexpected error occurred.' }); } }; diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts index 1f7c2e7..9753f5d 100644 --- a/src/pages/api/v1/builds/[uuid]/webhooks.ts +++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts @@ -15,15 +15,12 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { nanoid } from 'nanoid'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import GithubService from 'server/services/github'; import { Build } from 'server/models'; import WebhookService from 'server/services/webhook'; -const logger = rootLogger.child({ - filename: 'builds/[uuid]/webhooks.ts', -}); - /** * @openapi * /api/v1/builds/{uuid}/webhooks: @@ -209,46 +206,62 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - logger.error(`Error handling ${req.method} request for ${uuid}:`, error); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Error handling ${req.method} request` + ); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { const { uuid } = req.query; - try { - const githubService = new GithubService(); - const build: Build = await githubService.db.models.Build.query().findOne({ - uuid, - }); + const correlationId = `api-webhook-invoke-${Date.now()}-${nanoid(8)}`; - const buildId = build.id; + return withLogContext({ correlationId }, async () => { + try { + const githubService = new GithubService(); + const build: Build = await githubService.db.models.Build.query().findOne({ + uuid, + }); - if (!build) { - logger.info(`[API ${uuid}] Build not found`); - return res.status(404).json({ error: `Build not found for ${uuid}` }); - } + const buildId = build.id; + + if (!build) { + getLogger({ buildUuid: uuid as string }).debug('Build not found'); + return res.status(404).json({ error: `Build not found for ${uuid}` }); + } - if (!build.webhooksYaml) { - logger.info(`[API ${uuid}] No webhooks found for build`); - return res.status(204).json({ - status: 'no_content', - message: `No webhooks found for build ${uuid}.`, + if (!build.webhooksYaml) { + getLogger({ buildUuid: uuid as string }).debug('No webhooks found for build'); + return res.status(204).json({ + status: 'no_content', + message: `No webhooks found for build ${uuid}.`, + }); + } + + const webhookService = new WebhookService(); + await webhookService.webhookQueue.add('webhook', { + buildId, + correlationId, }); - } - const webhookService = new WebhookService(); - await webhookService.webhookQueue.add('webhook', { - buildId, - }); - return res.status(200).json({ - status: 'success', - message: `Webhook for build ${uuid} has been queued`, - }); - } catch (error) { - logger.error(`Unable to proceed with webook for build ${uuid}. Error: \n ${error}`); - return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` }); - } + getLogger({ stage: LogStage.WEBHOOK_PROCESSING, buildUuid: uuid as string }).info( + 'Webhook invocation queued via API' + ); + + return res.status(200).json({ + status: 'success', + message: `Webhook for build ${uuid} has been queued`, + }); + } catch (error) { + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error( + { error: error instanceof Error ? error.message : String(error) }, + `Unable to proceed with webhook for build ${uuid}` + ); + return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` }); + } + }); } async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) { @@ -284,7 +297,10 @@ async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) { }, }); } catch (error) { - logger.error(`Failed to retrieve webhooks for builds ${uuid}. Error: \n ${error}`); + getLogger({ buildUuid: uuid as string }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Failed to retrieve webhooks' + ); return res.status(500).json({ error: `Unable to retrieve webhooks for build ${uuid}.` }); } } diff --git a/src/pages/api/v1/builds/index.ts b/src/pages/api/v1/builds/index.ts index 671c6e8..aa206a7 100644 --- a/src/pages/api/v1/builds/index.ts +++ b/src/pages/api/v1/builds/index.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'api/v1/builds/index.ts', -}); - /** * @openapi * /api/v1/builds: @@ -208,7 +204,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error('Error fetching builds:', error); + getLogger({ error: error instanceof Error ? error.message : String(error) }).error('Failed to fetch builds'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/config/cache.ts b/src/pages/api/v1/config/cache.ts index 2eef2b9..3660bcc 100644 --- a/src/pages/api/v1/config/cache.ts +++ b/src/pages/api/v1/config/cache.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import GlobalConfigService from 'server/services/globalConfig'; -const logger = rootLogger.child({ - filename: 'v1/config/cache.ts', -}); - /** * @openapi * /api/v1/config/cache: @@ -111,7 +107,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - logger.error(`Error occurred on config cache operation: \n ${error}`); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error occurred on config cache operation' + ); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -122,7 +121,10 @@ async function getCachedConfig(res: NextApiResponse, refresh: boolean = false) { const configs = await configService.getAllConfigs(refresh); return res.status(200).json({ configs }); } catch (error) { - logger.error(`[API] Error occurred retrieving cache config: \n ${error}`); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Error occurred retrieving cache config' + ); return res.status(500).json({ error: `Unable to retrieve global config values` }); } } diff --git a/src/pages/api/v1/deploy-summary.ts b/src/pages/api/v1/deploy-summary.ts index 06cc692..de6750d 100644 --- a/src/pages/api/v1/deploy-summary.ts +++ b/src/pages/api/v1/deploy-summary.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'deploy-summary.ts', -}); - /** * @openapi * /api/v1/deploy-summary: @@ -130,7 +126,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id'); if (!build) { - logger.info(`Build with ID ${parsedBuildId} not found`); + getLogger().debug(`Build not found: buildId=${parsedBuildId}`); return res.status(404).json({ error: 'Build not found' }); } @@ -162,7 +158,9 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(result.rows); } catch (error) { - logger.error(`Error fetching deploy summary for build ${parsedBuildId}:`, error); + getLogger({ error: error instanceof Error ? error.message : String(error) }).error( + `Failed to fetch deploy summary: buildId=${parsedBuildId}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deployables.ts b/src/pages/api/v1/deployables.ts index dd4d5a0..f056fa5 100644 --- a/src/pages/api/v1/deployables.ts +++ b/src/pages/api/v1/deployables.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'deployables.ts', -}); - /** * @openapi * /api/v1/deployables: @@ -132,7 +128,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id'); if (!build) { - logger.info(`Build with ID ${parsedBuildId} not found`); + getLogger().debug(`Build not found: buildId=${parsedBuildId}`); return res.status(404).json({ error: 'Build not found' }); } @@ -163,7 +159,9 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deployables); } catch (error) { - logger.error(`Error fetching deployables for build ${parsedBuildId}:`, error); + getLogger({ error: error instanceof Error ? error.message : String(error) }).error( + `Failed to fetch deployables: buildId=${parsedBuildId}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts index e122f9c..e487541 100644 --- a/src/pages/api/v1/deploys.ts +++ b/src/pages/api/v1/deploys.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BuildService from 'server/services/build'; -const logger = rootLogger.child({ - filename: 'api/v1/deploys.ts', -}); - /** * @openapi * /api/v1/deploys: @@ -177,7 +173,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id'); if (!build) { - logger.info(`Build with ID ${parsedBuildId} not found`); + getLogger().debug(`Build not found: buildId=${parsedBuildId}`); return res.status(404).json({ error: 'Build not found' }); } @@ -226,7 +222,9 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deploys); } catch (error) { - logger.error(`Error fetching deploys for build ${parsedBuildId}:`, error); + getLogger({ error: error instanceof Error ? error.message : String(error) }).error( + `Failed to fetch deploys: buildId=${parsedBuildId}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/builds.ts b/src/pages/api/v1/pull-requests/[id]/builds.ts index 222e521..c3ad473 100644 --- a/src/pages/api/v1/pull-requests/[id]/builds.ts +++ b/src/pages/api/v1/pull-requests/[id]/builds.ts @@ -15,14 +15,10 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BuildService from 'server/services/build'; import PullRequestService from 'server/services/pullRequest'; -const logger = rootLogger.child({ - filename: 'pull-requests/[id]/builds.ts', -}); - /** * @openapi * /api/v1/pull-requests/{id}/builds: @@ -133,7 +129,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const pullRequest = await pullRequestService.db.models.PullRequest.query().findById(parsedId).select('id'); if (!pullRequest) { - logger.info(`Pull request with ID ${parsedId} not found`); + getLogger().debug(`Pull request not found: id=${parsedId}`); return res.status(404).json({ error: 'Pull request not found' }); } @@ -159,7 +155,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(builds); } catch (error) { - logger.error(`Error fetching builds for pull request ${parsedId}:`, error); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + `Failed to fetch builds for pull request: id=${parsedId}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/index.ts b/src/pages/api/v1/pull-requests/[id]/index.ts index e301801..fe52c29 100644 --- a/src/pages/api/v1/pull-requests/[id]/index.ts +++ b/src/pages/api/v1/pull-requests/[id]/index.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import PullRequestService from 'server/services/pullRequest'; -const logger = rootLogger.child({ - filename: 'api/v1/pull-requests/[id].ts', -}); - /** * @openapi * /api/v1/pull-requests/{id}: @@ -142,13 +138,16 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { ); if (!pullRequest) { - logger.info(`Pull request with ID ${parsedId} not found`); + getLogger().debug(`Pull request not found: id=${parsedId}`); return res.status(404).json({ error: 'Pull request not found' }); } return res.status(200).json(pullRequest); } catch (error) { - logger.error(`Error fetching pull request ${parsedId}:`, error); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + `Failed to fetch pull request: id=${parsedId}` + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/index.ts b/src/pages/api/v1/pull-requests/index.ts index 908cf06..57db542 100644 --- a/src/pages/api/v1/pull-requests/index.ts +++ b/src/pages/api/v1/pull-requests/index.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import PullRequestService from 'server/services/pullRequest'; -const logger = rootLogger.child({ - filename: 'api/v1/pull-requests/index.ts', -}); - /** * @openapi * /api/v1/pull-requests: @@ -275,7 +271,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error('Error fetching pull requests:', error); + getLogger().error( + { error: error instanceof Error ? error.message : String(error) }, + 'Failed to fetch pull requests' + ); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/repos/index.ts b/src/pages/api/v1/repos/index.ts index 55d0b79..0f9b8d6 100644 --- a/src/pages/api/v1/repos/index.ts +++ b/src/pages/api/v1/repos/index.ts @@ -15,13 +15,9 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import PullRequestService from 'server/services/pullRequest'; -const logger = rootLogger.child({ - filename: 'api/v1/repos/index.ts', -}); - /** * @openapi * /api/v1/repos: @@ -184,7 +180,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error('Error fetching repos:', error); + getLogger().error({ error: error instanceof Error ? error.message : String(error) }, 'Error fetching repos'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts index 29d08af..b9edebc 100644 --- a/src/pages/api/v1/schema/validate.ts +++ b/src/pages/api/v1/schema/validate.ts @@ -103,14 +103,10 @@ type Response = ValidationResponse | ErrorResponse; import { NextApiRequest, NextApiResponse } from 'next/types'; import { getYamlFileContentFromBranch } from 'server/lib/github'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { YamlConfigParser, ParsingError } from 'server/lib/yamlConfigParser'; import { YamlConfigValidator, ValidationError } from 'server/lib/yamlConfigValidator'; -const logger = rootLogger.child({ - filename: 'v1/schema/validate', -}); - const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse) => { if (req.method !== 'POST') { return res.status(405).json({ error: `${req.method} is not allowed` }); @@ -134,7 +130,10 @@ const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse { return res.status(200).json(response); } catch (error) { - logger.error('Error fetching users:', error); + getLogger().error({ error: error instanceof Error ? error.message : String(error) }, 'Error fetching users'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index 9bfe94e..68face7 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -15,32 +15,49 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import rootLogger from 'server/lib/logger'; import * as github from 'server/lib/github'; import { LIFECYCLE_MODE } from 'shared/index'; import { stringify } from 'flatted'; import BootstrapJobs from 'server/jobs/index'; import createAndBindServices from 'server/services'; - -const logger = rootLogger.child({ - filename: 'webhooks/github.ts', -}); +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; const services = createAndBindServices(); /* Only want to listen on web nodes, otherwise no-op for safety */ // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { - const isVerified = github.verifyWebhookSignature(req); - if (!isVerified) throw new Error('Webhook not verified'); - if (!['web', 'all'].includes(LIFECYCLE_MODE)) return; - try { - if (LIFECYCLE_MODE === 'all') BootstrapJobs(services); - const message = stringify({ ...req, ...{ headers: req.headers } }); - await services.GithubService.webhookQueue.add('webhook', { message }); - res.status(200).end(); - } catch (error) { - logger.child({ error }).error(`Github Webhook failure: Error: ${error}`); - res.status(500).end(); - } + const correlationId = (req.headers['x-github-delivery'] as string) || `webhook-${Date.now()}`; + const sender = req.body?.sender?.login; + + return withLogContext({ correlationId, sender }, async () => { + const isVerified = github.verifyWebhookSignature(req); + if (!isVerified) throw new Error('Webhook not verified'); + + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${req.headers['x-github-event']}`); + + if (!['web', 'all'].includes(LIFECYCLE_MODE)) { + getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Skipped: wrong LIFECYCLE_MODE'); + return; + } + + try { + if (LIFECYCLE_MODE === 'all') BootstrapJobs(services); + const message = stringify({ ...req, ...{ headers: req.headers } }); + + await services.GithubService.webhookQueue.add('webhook', { + message, + ...extractContextForQueue(), + }); + + getLogger({ stage: LogStage.WEBHOOK_QUEUED }).info('Webhook queued for processing'); + res.status(200).end(); + } catch (error) { + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error( + { error: error instanceof Error ? error.message : String(error) }, + 'Webhook failure' + ); + res.status(500).end(); + } + }); }; diff --git a/src/server/jobs/index.ts b/src/server/jobs/index.ts index 1089768..770fb13 100644 --- a/src/server/jobs/index.ts +++ b/src/server/jobs/index.ts @@ -49,22 +49,6 @@ export default function bootstrapJobs(services: IServices) { }, }); - /* Run once per hour */ - services.PullRequest.cleanupClosedPRQueue.add( - 'cleanup', - {}, - { - repeat: { - every: 60000 * 60, // Once an hour - }, - } - ); - - queueManager.registerWorker(QUEUE_NAMES.CLEANUP, services.PullRequest.processCleanupClosedPRs, { - connection: redisClient.getConnection(), - concurrency: 1, - }); - services.GlobalConfig.setupCacheRefreshJob(); queueManager.registerWorker(QUEUE_NAMES.GLOBAL_CONFIG_CACHE_REFRESH, services.GlobalConfig.processCacheRefresh, { @@ -80,8 +64,6 @@ export default function bootstrapJobs(services: IServices) { concurrency: 1, }); - services.PullRequest.cleanupClosedPRQueue.add('cleanup', {}, {}); - queueManager.registerWorker(QUEUE_NAMES.INGRESS_MANIFEST, services.Ingress.createOrUpdateIngressForBuild, { connection: redisClient.getConnection(), concurrency: 1, diff --git a/src/server/lib/buildEnvVariables.ts b/src/server/lib/buildEnvVariables.ts index 79e69d2..7fde122 100644 --- a/src/server/lib/buildEnvVariables.ts +++ b/src/server/lib/buildEnvVariables.ts @@ -17,15 +17,10 @@ import { EnvironmentVariables } from 'server/lib/envVariables'; import { Build, Deploy } from 'server/models'; import { DeployTypes, FeatureFlags } from 'shared/constants'; -import rootLogger from 'server/lib/logger'; -import { LifecycleError } from './errors'; +import { getLogger } from 'server/lib/logger/index'; import { ValidationError } from './yamlConfigValidator'; import * as YamlService from 'server/models/yaml'; -const logger = rootLogger.child({ - filename: 'lib/buildEnvVariables.ts', -}); - export class BuildEnvironmentVariables extends EnvironmentVariables { /** * Retrieve Environment variables. Use lifecycle yaml file while exists; otherwise, falling back to the old LC services table env column. @@ -64,8 +59,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { error.uuid = deploy.uuid; throw error; } else { - logger.warn(error instanceof LifecycleError ? error.getMessage() : `${error}`); - logger.warn(`[${deploy.uuid}]: Failback using database Environment Variables`); + getLogger().warn({ error }, 'Fallback using database Environment Variables'); } } } @@ -112,8 +106,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { error.uuid = deploy.uuid; throw error; } else { - logger.warn(error instanceof LifecycleError ? error.getMessage() : `${error}`); - logger.warn(`[${deploy.uuid}]: Failback using database Init Environment Variables`); + getLogger().warn({ error }, 'Fallback using database Init Environment Variables'); } } } @@ -132,12 +125,16 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { * 2. Interpolate env from deploy parent service (via db or yaml definition for specific branch) * 3. Save to deploy * @param build Build model from associated PR + * @param githubRepositoryId Optional filter to only resolve env for deploys from a specific repo * @returns Map of env variables */ - public async resolve(build: Build): Promise> { + public async resolve(build: Build, githubRepositoryId?: number): Promise> { if (build != null) { await build?.$fetchGraph('[services, deploys.[service.[repository], deployable]]'); - const deploys = build?.deploys; + const allDeploys = build?.deploys; + const deploys = githubRepositoryId + ? allDeploys.filter((d) => d.githubRepositoryId === githubRepositoryId) + : allDeploys; const availableEnv = this.cleanup(await this.availableEnvironmentVariablesForBuild(build)); const useDeafulttUUID = @@ -158,7 +155,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { ), }) .catch((error) => { - logger.error(`[DEPLOY ${deploy.uuid}] Problem when preparing env variable: ${error}`); + getLogger().error({ error }, 'Problem when preparing env variable'); }); if (deploy.deployable?.initDockerfilePath || deploy.service?.initDockerfilePath) { @@ -177,7 +174,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { ), }) .catch((error) => { - logger.error(`[DEPLOY ${deploy.uuid}] Problem when preparing init env variable: ${error}`); + getLogger().error({ error }, 'Problem when preparing init env variable'); }); } }); diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index 25ebafd..931017d 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -18,14 +18,10 @@ import { merge } from 'lodash'; import { Build, Deploy, Service, Deployable } from 'server/models'; import { CLIDeployTypes, DeployTypes } from 'shared/constants'; import { shellPromise } from './shell'; -import rootLogger from './logger'; +import { getLogger } from './logger/index'; import GlobalConfigService from 'server/services/globalConfig'; import { DatabaseSettings } from 'server/services/types/globalConfig'; -const logger = rootLogger.child({ - filename: 'lib/cli.ts', -}); - /** * Deploys the build * @param build the build to deploy @@ -65,7 +61,8 @@ export async function cliDeploy(deploy: Deploy) { * @param deploy the deploy to run */ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Service, deployable: Deployable) { - logger.debug(`Invoking the codefresh CLI to deploy this deploy`); + const buildUuid = build?.uuid; + getLogger({ buildUuid }).debug('Invoking the codefresh CLI to deploy this deploy'); const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv); @@ -88,9 +85,9 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser const command = `codefresh run ${serviceDeployPipelineId} -b "${deploy.branchName}" ${variables.join( ' ' )} ${deployTrigger} -d`; - logger.debug(`About to run codefresh command: ${command}`); + getLogger({ buildUuid }).debug(`About to run codefresh command: command=${command}`); const output = await shellPromise(command); - logger.debug(`codefresh run output: ${output}`); + getLogger({ buildUuid }).debug(`Codefresh run output: output=${output}`); const id = output.trim(); return id; } @@ -100,7 +97,8 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser * @param deploy the deploy to run */ export async function codefreshDestroy(deploy: Deploy) { - logger.debug(`Invoking the codefresh CLI to delete this deploy`); + const buildUuid = deploy?.build?.uuid; + getLogger({ buildUuid }).debug('Invoking the codefresh CLI to delete this deploy'); try { /** Reset the SHA so we will re-run the pipelines post destroy */ @@ -111,7 +109,7 @@ export async function codefreshDestroy(deploy: Deploy) { /* Always pass in a BUILD UUID & BUILD SHA as those are critical keys */ const envVariables = merge( { - BUILD_UUID: deploy?.build?.uuid, + BUILD_UUID: buildUuid, BUILD_SHA: deploy?.build?.sha, }, deploy.env || {}, @@ -140,14 +138,12 @@ export async function codefreshDestroy(deploy: Deploy) { const command = `codefresh run ${destroyPipelineId} -b "${serviceBranchName}" ${variables.join( ' ' )} ${destroyTrigger} -d`; - logger.debug('Destroy Command: %s', command); + getLogger({ buildUuid }).debug(`Destroy command: command=${command}`); const output = await shellPromise(command); const id = output?.trim(); return id; } catch (error) { - logger - .child({ error }) - .error(`[BUILD ${deploy?.build?.uuid}][cli][codefreshDestroy] Error destroying Codefresh pipeline`); + getLogger({ buildUuid, error }).error('Error destroying Codefresh pipeline'); throw error; } } @@ -172,6 +168,7 @@ export async function waitForCodefresh(id: string) { * @param build the build to delete CLI services from */ export async function deleteBuild(build: Build) { + const buildUuid = build?.uuid; try { const buildId = build?.id; @@ -188,13 +185,13 @@ export async function deleteBuild(build: Build) { }) .map(async (deploy) => { const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type; - logger.info(`[DELETE ${deploy?.uuid}] Deleting CLI deploy`); + getLogger({ buildUuid }).info('Deleting CLI deploy'); return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy); }) ); - logger.info(`[DELETE ${build.uuid}] Deleted CLI resources`); + getLogger({ buildUuid }).info('Deleted CLI resources'); } catch (e) { - logger.error(`[DELETE ${build.uuid}] Error deleting CLI resources: ${e}`); + getLogger({ buildUuid, error: e }).error('Error deleting CLI resources'); } } diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts index ef35403..4407437 100644 --- a/src/server/lib/codefresh/index.ts +++ b/src/server/lib/codefresh/index.ts @@ -15,7 +15,7 @@ */ import { shellPromise } from 'server/lib/shell'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { generateCodefreshCmd, constructEcrTag, getCodefreshPipelineIdFromOutput } from 'server/lib/codefresh/utils'; import { waitUntil } from 'server/lib/utils'; import { ContainerBuildOptions } from 'server/lib/codefresh/types'; @@ -23,23 +23,17 @@ import { Metrics } from 'server/lib/metrics'; import { ENVIRONMENT } from 'shared/config'; import GlobalConfigService from 'server/services/globalConfig'; -const logger = rootLogger.child({ - filename: 'lib/codefresh/codefresh.ts', -}); - export const tagExists = async ({ tag, ecrRepo = 'lifecycle-deployments', uuid = '' }) => { const { lifecycleDefaults } = await GlobalConfigService.getInstance().getAllConfigs(); const repoName = ecrRepo; - // fetch the ecr registry id from ecrDomain value `acctid.dkr.ecr.us-west-2.amazonaws.com`. this is useful if registry is in a different account - // if its in the same account as lifecycle app, still passed for clarity here const registryId = (lifecycleDefaults.ecrDomain?.split?.('.') || [])[0] || ''; try { const command = `aws ecr describe-images --repository-name=${repoName} --image-ids=imageTag=${tag} --no-paginate --no-cli-auto-prompt --registry-id ${registryId}`; await shellPromise(command); - logger.info(`[BUILD ${uuid}] Image with tag:${tag} exists in ecr repo ${repoName}`); + getLogger().info(`ECR: tag=${tag} exists in ${repoName}`); return true; } catch (error) { - logger.info(`[BUILD ${uuid}] Image with tag:${tag} does not exist in ecr repo ${repoName}`); + getLogger().debug(`ECR: tag=${tag} not found in ${repoName}`); return false; } }; @@ -47,7 +41,6 @@ export const tagExists = async ({ tag, ecrRepo = 'lifecycle-deployments', uuid = export const buildImage = async (options: ContainerBuildOptions) => { const { repo: repositoryName, branch, uuid, revision: sha, tag } = options; const metrics = new Metrics('build.codefresh.image', { uuid, repositoryName, branch, sha }); - const prefix = uuid ? `[DEPLOY ${uuid}][buildImage]:` : '[DEPLOY][buildImage]:'; const suffix = `${repositoryName}/${branch}:${sha}`; const eventDetails = { title: 'Codefresh Build Image', @@ -62,7 +55,7 @@ export const buildImage = async (options: ContainerBuildOptions) => { metrics .increment('total', { error: 'error_with_cli_output', result: 'error', codefreshBuildId: '' }) .event(eventDetails.title, eventDetails.description); - logger.child({ output }).error(`${prefix}[noCodefreshBuildOutput] no output from Codefresh for ${suffix}`); + getLogger().error({ output }, `buildImage noCodefreshBuildOutput: no output from Codefresh for ${suffix}`); if (!hasOutput) throw Error('no output from Codefresh'); } const codefreshBuildId = getCodefreshPipelineIdFromOutput(output); @@ -77,7 +70,7 @@ export const buildImage = async (options: ContainerBuildOptions) => { .event(eventDetails.title, eventDetails.description); return codefreshBuildId; } catch (error) { - logger.child({ error }).error(`${prefix} failed for ${suffix}`); + getLogger().error({ error }, `buildImage failed for ${suffix}`); throw error; } }; diff --git a/src/server/lib/comment.ts b/src/server/lib/comment.ts index 5c09626..9b3c868 100644 --- a/src/server/lib/comment.ts +++ b/src/server/lib/comment.ts @@ -14,14 +14,10 @@ * limitations under the License. */ -import rootLogger from './logger'; +import { getLogger } from './logger/index'; import { CommentParser } from 'shared/constants'; import { compact, flatten, set } from 'lodash'; -const logger = rootLogger.child({ - filename: 'lib/comment.ts', -}); - export class CommentHelper { public static parseServiceBranches(comment: string): Array<{ active: boolean; @@ -75,7 +71,7 @@ export class CommentHelper { }); const obj = {}; envLines.forEach((line) => { - logger.debug('Parsing line: %s', line); + getLogger().debug(`Parsing environment override line=${line}`); const match = line.match(/ENV:([^:]*):(.*)/m); const key = match[1]; const value = match[2]; diff --git a/src/server/lib/configFileWebhookEnvVariables.ts b/src/server/lib/configFileWebhookEnvVariables.ts index 7c6eff2..41ef63d 100644 --- a/src/server/lib/configFileWebhookEnvVariables.ts +++ b/src/server/lib/configFileWebhookEnvVariables.ts @@ -16,14 +16,10 @@ import { EnvironmentVariables } from 'server/lib/envVariables'; import { Build } from 'server/models'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Webhook } from 'server/models/yaml'; import { FeatureFlags } from 'shared/constants'; -const logger = rootLogger.child({ - filename: 'lib/configFileWebhookEnvVariables.ts', -}); - export class ConfigFileWebhookEnvironmentVariables extends EnvironmentVariables { /** * Use lifecycle yaml file while exists; otherwise, falling back to the old LC services table env column. @@ -58,7 +54,7 @@ export class ConfigFileWebhookEnvironmentVariables extends EnvironmentVariables await build?.$fetchGraph('[services, deploys.service.repository]'); } else { - logger.fatal("Build and Webhook shouldn't be undefined."); + getLogger().fatal("Build and Webhook shouldn't be undefined"); } return result; diff --git a/src/server/lib/deploymentManager/deploymentManager.ts b/src/server/lib/deploymentManager/deploymentManager.ts index d5b0956..00be405 100644 --- a/src/server/lib/deploymentManager/deploymentManager.ts +++ b/src/server/lib/deploymentManager/deploymentManager.ts @@ -20,11 +20,10 @@ import { DeployStatus, DeployTypes, CLIDeployTypes } from 'shared/constants'; import { createKubernetesApplyJob, monitorKubernetesJob } from '../kubernetesApply/applyManifest'; import { nanoid, customAlphabet } from 'nanoid'; import DeployService from 'server/services/deploy'; -import rootLogger from 'server/lib/logger'; +import { getLogger, updateLogContext } from 'server/lib/logger/index'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; import { waitForDeployPodReady } from '../kubernetes'; -const logger = rootLogger.child({ filename: 'lib/deploymentManager/deploymentManager.ts' }); const generateJobId = customAlphabet('abcdefghijklmnopqrstuvwxyz0123456789', 6); export class DeploymentManager { @@ -78,7 +77,6 @@ export class DeploymentManager { level++; } - // Log final deployment order in a single line const orderSummary = Array.from({ length: this.deploymentLevels.size }, (_, i) => { const services = this.deploymentLevels @@ -88,7 +86,7 @@ export class DeploymentManager { return `L${i}=[${services}]`; }).join(' '); - logger.info(`DeploymentManager: Deployment order calculated levels=${this.deploymentLevels.size} ${orderSummary}`); + getLogger().info(`Deploy: ${this.deploymentLevels.size} levels ${orderSummary}`); } private removeInvalidDependencies(): void { @@ -102,8 +100,6 @@ export class DeploymentManager { } public async deploy(): Promise { - const buildUuid = this.deploys.values().next().value?.build?.uuid || 'unknown'; - for (const value of this.deploys.values()) { await value.$query().patch({ status: DeployStatus.QUEUED }); } @@ -116,9 +112,7 @@ export class DeploymentManager { const helmServices = helmDeploys.map((d) => d.deployable.name).join(','); const k8sServices = githubDeploys.map((d) => d.deployable.name).join(','); - logger.info( - `DeploymentManager: Deploying level=${level} buildUuid=${buildUuid} helm=[${helmServices}] k8s=[${k8sServices}]` - ); + getLogger().info(`Deploy: level ${level} helm=[${helmServices}] k8s=[${k8sServices}]`); await Promise.all([ helmDeploys.length > 0 ? deployHelm(helmDeploys) : Promise.resolve(), @@ -140,6 +134,7 @@ export class DeploymentManager { } private async deployManifests(deploy: Deploy): Promise { + updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }); const jobId = generateJobId(); const deployService = new DeployService(); const runUUID = deploy.runUUID || nanoid(); diff --git a/src/server/lib/envVariables.ts b/src/server/lib/envVariables.ts index 63b22fe..55cd144 100644 --- a/src/server/lib/envVariables.ts +++ b/src/server/lib/envVariables.ts @@ -27,15 +27,10 @@ import { NO_DEFAULT_ENV_UUID, } from 'shared/constants'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { LifecycleError } from './errors'; import GlobalConfigService from 'server/services/globalConfig'; -// eslint-disable-next-line no-unused-vars -const logger = rootLogger.child({ - filename: 'lib/envVariables.ts', -}); - const ALLOWED_PROPERTIES = [ 'branchName', 'ipAddress', @@ -379,9 +374,7 @@ export abstract class EnvironmentVariables { /_publicUrl$/, `-${globalConfig.lifecycleDefaults.defaultPublicUrl}` ); - logger.debug( - `[BUILD ${data['buildUUID']}] The publicUrl for ${serviceToUpdate} has been defaulted to ${defaultedPublicUrl} using the global_config table` - ); + getLogger().debug(`publicUrl for ${serviceToUpdate} defaulted to ${defaultedPublicUrl} using global_config`); template = template.replace(fullMatch, defaultedPublicUrl); } } diff --git a/src/server/lib/fastly.ts b/src/server/lib/fastly.ts index f0690d4..78c0afb 100644 --- a/src/server/lib/fastly.ts +++ b/src/server/lib/fastly.ts @@ -15,7 +15,7 @@ */ import * as FastlyInstance from 'fastly/dist/index.js'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Redis } from 'ioredis'; import { FASTLY_TOKEN } from 'shared/config'; import GlobalConfigService from 'server/services/globalConfig'; @@ -24,10 +24,6 @@ FastlyInstance.ApiClient.instance.authenticate(FASTLY_TOKEN); const fastlyService = new FastlyInstance.ServiceApi(); const fastlyPurge = new FastlyInstance.PurgeApi(); -const logger = rootLogger.child({ - filename: 'lib/fastly.ts', -}); - class Fastly { redis: Redis; @@ -68,7 +64,6 @@ class Fastly { const serviceName = `${fastlyServiceType}-${uuid}`; const FASTLY_URL = await this.getFastlyUrl(); const name = `${serviceName}.${FASTLY_URL}`; - const text = `[BUILD ${uuid}][fastly][refresh][serviceName ${name}]`; try { if (!name) throw new Error('Service name is missing'); const service = await fastlyService.searchService({ name }); @@ -84,7 +79,7 @@ class Fastly { this.redis.expire(cacheKey, 86400); return id; } catch (error) { - logger.child({ error }).warn(`${text} There is an issue to retrieve Fastly service id from Fastly`); + getLogger().warn({ error }, `Fastly lookup failed: serviceName=${name}`); } } @@ -108,16 +103,11 @@ class Fastly { * @param serviceId Fastly Service ID */ async purgeAllServiceCache(serviceId: string, uuid: string, fastlyServiceType: string) { - const text = `[BUILD ${uuid}][fastly][purgeAllServiceCache]`; try { if (!serviceId) throw new Error('Service ID is missing'); await fastlyPurge.purgeAll({ service_id: serviceId }); } catch (error) { - logger - .child({ error }) - .info( - `${text}[serviceid ${serviceId}] has an error with the ${fastlyServiceType} service with ${serviceId} service id` - ); + getLogger().info({ error }, `Fastly cache purge failed: serviceId=${serviceId} serviceType=${fastlyServiceType}`); } } diff --git a/src/server/lib/github/__tests__/index.test.ts b/src/server/lib/github/__tests__/index.test.ts index 3ff7c73..a2ab110 100644 --- a/src/server/lib/github/__tests__/index.test.ts +++ b/src/server/lib/github/__tests__/index.test.ts @@ -45,7 +45,15 @@ jest.mock('server/services/globalConfig', () => { jest.mock('axios'); jest.mock('server/lib/github/client'); jest.mock('server/lib/github/utils'); -jest.mock('server/lib/logger'); +jest.mock('server/lib/logger/index', () => ({ + getLogger: jest.fn().mockReturnValue({ + info: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + warn: jest.fn(), + }), +})); +import { getLogger } from 'server/lib/logger/index'; import logger from 'server/lib/logger'; test('createOrUpdatePullRequestComment success', async () => { @@ -138,7 +146,7 @@ test('getSHAForBranch failure', async () => { const mockError = new Error('error'); (utils.getRefForBranchName as jest.Mock).mockRejectedValue(mockError); await expect(getSHAForBranch('main', 'foo', 'bar')).rejects.toThrow('error'); - expect(logger.child).toHaveBeenCalledWith({ error: mockError }); + expect(getLogger).toHaveBeenCalledWith({ error: mockError, repo: 'foo/bar', branch: 'main' }); }); test('checkIfCommentExists to return true', async () => { diff --git a/src/server/lib/github/client.ts b/src/server/lib/github/client.ts index 3ab3ea3..7e52b68 100644 --- a/src/server/lib/github/client.ts +++ b/src/server/lib/github/client.ts @@ -18,13 +18,8 @@ import PQueue from 'p-queue'; import { constructOctokitClient, constructClientRequestData } from 'server/lib/github/utils'; import { CreateOctokitClientOptions } from 'server/lib/github/types'; import GlobalConfigService from 'server/services/globalConfig'; -import rootLogger from 'server/lib/logger'; import { Metrics } from 'server/lib/metrics'; -const initialLogger = rootLogger.child({ - filename: 'lib/github/client.ts', -}); - const queue = new PQueue({ concurrency: 100, intervalCap: 40, @@ -32,12 +27,7 @@ const queue = new PQueue({ carryoverConcurrencyCount: true, }); -export const createOctokitClient = async ({ - accessToken, - // eslint-disable-next-line no-unused-vars - logger = initialLogger, - caller = '', -}: CreateOctokitClientOptions = {}) => { +export const createOctokitClient = async ({ accessToken, caller = '' }: CreateOctokitClientOptions = {}) => { let token: string | undefined = await GlobalConfigService.getInstance().getGithubClientToken(); if (!token) token = accessToken; const octokit = constructOctokitClient({ token }); diff --git a/src/server/lib/github/deployments.ts b/src/server/lib/github/deployments.ts index a2f3f41..add150b 100644 --- a/src/server/lib/github/deployments.ts +++ b/src/server/lib/github/deployments.ts @@ -18,11 +18,7 @@ import { Deploy } from 'server/models'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { getPullRequest } from 'server/lib/github/index'; import { DeployStatus } from 'shared/constants'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'github/deployments.ts', -}); +import { getLogger } from 'server/lib/logger/index'; const githubDeploymentStatuses = { deployed: 'success', @@ -43,12 +39,9 @@ function lifecycleToGithubStatus(status: string) { } export async function createOrUpdateGithubDeployment(deploy: Deploy) { - const uuid = deploy.uuid; - const text = `[DEPLOY ${uuid}][createOrUpdateGithubDeployment]`; - const suffix = 'creating or updating github deployment'; - logger.debug(`${text} ${suffix}`); + getLogger().debug('Creating or updating github deployment'); try { - logger.child({ deploy }).info(`${text}[deploymentStatus] deploy status`); + getLogger().info('Deploy status update'); await deploy.$fetchGraph('build.pullRequest.repository'); const githubDeploymentId = deploy?.githubDeploymentId; const build = deploy?.build; @@ -63,9 +56,6 @@ export async function createOrUpdateGithubDeployment(deploy: Deploy) { if (hasDeployment) { const deploymentResp = await getDeployment(deploy); const deploymentSha = deploymentResp?.data?.sha; - /** - * @note If the last commit is different than the deploy sha, delete the deployment, time for a new deployment - **/ if (lastCommit !== deploymentSha) { await deleteGithubDeploymentAndEnvironment(deploy); } else { @@ -74,14 +64,11 @@ export async function createOrUpdateGithubDeployment(deploy: Deploy) { } } await createGithubDeployment(deploy, lastCommit); - /** - * @note this captures a redeployed deployment; sometimes it happens immediately - */ if (build?.status === 'deployed') { await updateDeploymentStatus(deploy, githubDeploymentId); } } catch (error) { - logger.child({ error }).error(`${text} error ${suffix}`); + getLogger({ error }).error('Error creating or updating github deployment'); throw error; } } @@ -95,7 +82,6 @@ export async function deleteGithubDeploymentAndEnvironment(deploy: Deploy) { export async function createGithubDeployment(deploy: Deploy, ref: string) { const environment = deploy.uuid; - const text = `[DEPLOY ${environment}][createGithubDeployment]`; const pullRequest = deploy?.build?.pullRequest; const repository = pullRequest?.repository; const fullName = repository?.fullName; @@ -115,13 +101,16 @@ export async function createGithubDeployment(deploy: Deploy, ref: string) { await deploy.$query().patch({ githubDeploymentId }); return resp; } catch (error) { - logger.child({ error }).error(`${text} Error creating github deployment`); + getLogger({ + error, + repo: fullName, + }).error('Error creating github deployment'); throw error; } } export async function deleteGithubDeployment(deploy: Deploy) { - logger.debug(`[DEPLOY ${deploy.uuid}] Deleting github deployment for deploy ${deploy.uuid}`); + getLogger().debug('Deleting github deployment'); if (!deploy?.build) await deploy.$fetchGraph('build.pullRequest.repository'); const resp = await cacheRequest( `DELETE /repos/${deploy.build.pullRequest.repository.fullName}/deployments/${deploy.githubDeploymentId}` @@ -133,13 +122,12 @@ export async function deleteGithubDeployment(deploy: Deploy) { } export async function deleteGithubEnvironment(deploy: Deploy) { - logger.debug(`[DEPLOY ${deploy.uuid}] Deleting github environment for deploy ${deploy.uuid}`); + getLogger().debug('Deleting github environment'); if (!deploy?.build) await deploy.$fetchGraph('build.pullRequest.repository'); const repository = deploy.build.pullRequest.repository; try { await cacheRequest(`DELETE /repos/${repository.fullName}/environments/${deploy.uuid}`); } catch (e) { - // If the environment doesn't exist, we don't care if (e.status !== 404) { throw e; } @@ -147,7 +135,7 @@ export async function deleteGithubEnvironment(deploy: Deploy) { } export async function updateDeploymentStatus(deploy: Deploy, deploymentId: number) { - logger.debug(`[DEPLOY ${deploy.uuid}] Updating github deployment status for deploy ${deploy.uuid}`); + getLogger().debug('Updating github deployment status'); const repository = deploy.build.pullRequest.repository; let buildStatus = determineStatus(deploy); const resp = await cacheRequest(`POST /repos/${repository.fullName}/deployments/${deploymentId}/statuses`, { diff --git a/src/server/lib/github/index.ts b/src/server/lib/github/index.ts index 21e6f00..e189855 100644 --- a/src/server/lib/github/index.ts +++ b/src/server/lib/github/index.ts @@ -19,7 +19,7 @@ import crypto from 'crypto'; import { NextApiRequest } from 'next'; import { GITHUB_WEBHOOK_SECRET } from 'shared/config'; import { LifecycleError } from 'server/lib/errors'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { createOctokitClient } from 'server/lib/github/client'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { LIFECYCLE_FILE_NAME_REGEX } from 'server/lib/github/constants'; @@ -28,10 +28,6 @@ import { getRefForBranchName } from 'server/lib/github/utils'; import { Deploy } from 'server/models'; import { LifecycleYamlConfigOptions } from 'server/models/yaml/types'; -export const initialLogger = rootLogger.child({ - filename: 'lib/github/index.ts', -}); - export async function createOrUpdatePullRequestComment({ installationId, pullRequestNumber, @@ -51,7 +47,11 @@ export async function createOrUpdatePullRequestComment({ }); } catch (error) { const msg = 'Unable to create or update pull request comment'; - initialLogger.child({ error }).error(`[GITHUB ${fullName}/${pullRequestNumber}] ${msg} - original error: ${error}`); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + }).error(msg); throw new Error(error?.message || msg); } } @@ -74,41 +74,40 @@ export async function updatePullRequestLabels({ data: { labels }, }); } catch (error) { - initialLogger - .child({ error }) - .error( - `[GITHUB ${fullName}/${pullRequestNumber}] Unable to update pull request with '${labels.toString()}': ${error}` - ); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + labels: labels.toString(), + }).error('Unable to update pull request labels'); throw error; } } -export async function getPullRequest( - owner: string, - name: string, - pullRequestNumber: number, - _installationId: number, - logger = initialLogger -) { +export async function getPullRequest(owner: string, name: string, pullRequestNumber: number, _installationId: number) { try { return await cacheRequest(`GET /repos/${owner}/${name}/pulls/${pullRequestNumber}`); } catch (error) { const msg = 'Unable to retrieve pull request'; - logger.error(`[GITHUB ${owner}/${name}/pulls/${pullRequestNumber}] ${msg}: ${error}`); + getLogger({ + error, + repo: `${owner}/${name}`, + pr: pullRequestNumber, + }).error(msg); throw new Error(error?.message || msg); } } -export async function getPullRequestByRepositoryFullName( - fullName: string, - pullRequestNumber: number, - logger = initialLogger -) { +export async function getPullRequestByRepositoryFullName(fullName: string, pullRequestNumber: number) { try { return await cacheRequest(`GET /repos/${fullName}/pulls/${pullRequestNumber}`); } catch (error) { const msg = 'Unable to retrieve pull request'; - logger.error(`[GITHUB ${fullName}/pulls/${pullRequestNumber}] ${msg}: ${error}`); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + }).error(msg); throw new Error(error?.message || msg); } } @@ -134,12 +133,16 @@ export async function getPullRequestLabels({ const response = await client.request(`GET /repos/${fullName}/issues/${pullRequestNumber}`); return response.data.labels.map((label: any) => label.name); } catch (error) { - initialLogger.error(`[GITHUB ${fullName}/${pullRequestNumber}] Unable to fetch labels: ${error}`); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + }).error('Unable to fetch labels'); throw error; } } -export async function createDeploy({ owner, name, branch, installationId, logger = initialLogger }: RepoOptions) { +export async function createDeploy({ owner, name, branch, installationId }: RepoOptions) { try { const octokit = await createOctokitClient({ installationId, caller: 'createDeploy' }); return await octokit.request(`POST /repos/${owner}/${name}/builds`, { @@ -150,7 +153,11 @@ export async function createDeploy({ owner, name, branch, installationId, logger }); } catch (error) { const msg = 'Unable to create deploy'; - logger.child({ error }).error(`[GITHUB ${owner}/${name}/${branch}] ${msg}`); + getLogger({ + error, + repo: `${owner}/${name}`, + branch, + }).error(msg); throw new Error(error?.message || msg); } } @@ -187,24 +194,22 @@ export async function getShaForDeploy(deploy: Deploy) { } } -export async function getSHAForBranch( - branchName: string, - owner: string, - name: string, - logger = initialLogger -): Promise { +export async function getSHAForBranch(branchName: string, owner: string, name: string): Promise { try { const ref = await getRefForBranchName(owner, name, branchName); return ref?.data?.object?.sha; } catch (error) { const msg = 'Unable to retrieve SHA from branch'; - logger.child({ error }).warn(`[GITHUB ${owner}/${name}/${branchName}] ${msg}`); + getLogger({ + error, + repo: `${owner}/${name}`, + branch: branchName, + }).warn(msg); throw new Error(error?.message || msg); } } -export async function getYamlFileContent({ fullName, branch = '', sha = '', isJSON = false, logger = initialLogger }) { - const text = `[${fullName}:${branch}][getYamlFileContent]`; +export async function getYamlFileContent({ fullName, branch = '', sha = '', isJSON = false }) { try { const identifier = sha?.length > 0 ? sha : branch; const treeResp = await cacheRequest(`GET /repos/${fullName}/git/trees/${identifier}`); @@ -238,18 +243,13 @@ export async function getYamlFileContent({ fullName, branch = '', sha = '', isJS return configData; } catch (error) { - const msg = 'warning: no lifecycle yaml found or parsed'; - logger.child({ error }).warn(`${text}${msg}`); + const msg = 'No lifecycle yaml found or parsed'; + getLogger({ error, repo: fullName, branch }).warn(msg); throw new ConfigFileNotFound(error?.message || msg); } } -export async function getYamlFileContentFromPullRequest( - fullName: string, - pullRequestNumber: number, - logger = initialLogger -) { - const [owner, repo] = fullName.split('/'); +export async function getYamlFileContentFromPullRequest(fullName: string, pullRequestNumber: number) { try { const pullRequestResp = await getPullRequestByRepositoryFullName(fullName, pullRequestNumber); const branch = pullRequestResp?.data?.head?.ref; @@ -259,23 +259,29 @@ export async function getYamlFileContentFromPullRequest( return config; } catch (error) { const msg = 'Unable to retrieve YAML file content from pull request'; - logger.child({ error }).warn(`[GITHUB ${owner}/${repo}/pulls/${pullRequestNumber}] ${msg}`); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + }).warn(msg); throw new ConfigFileNotFound(error?.message || msg); } } export async function getYamlFileContentFromBranch( fullName: string, - branchName: string, - logger = initialLogger + branchName: string ): Promise { - const [owner, repo] = fullName.split('/'); try { const config = await getYamlFileContent({ fullName, branch: branchName }); return config; } catch (error) { const msg = 'Unable to retrieve YAML file content from branch'; - logger.child({ error }).warn(`[GITHUB ${owner}/${repo}/${branchName}] ${msg}`); + getLogger({ + error, + repo: fullName, + branch: branchName, + }).warn(msg); throw new ConfigFileNotFound(error?.message || msg); } } @@ -284,7 +290,6 @@ export async function checkIfCommentExists({ fullName, pullRequestNumber, commentIdentifier, - logger = initialLogger, }: CheckIfCommentExistsOptions) { try { const resp = await cacheRequest(`GET /repos/${fullName}/issues/${pullRequestNumber}/comments`); @@ -292,8 +297,11 @@ export async function checkIfCommentExists({ const isExistingComment = comments.find(({ body }) => body?.includes(commentIdentifier)) || false; return isExistingComment; } catch (error) { - const msg = 'Unable check for coments'; - logger.child({ error }).error(`[GITHUB ${fullName}][checkIfCommentExists] ${msg}`); + getLogger({ + error, + repo: fullName, + pr: pullRequestNumber, + }).error('Unable to check for comments'); return false; } } diff --git a/src/server/lib/github/types.ts b/src/server/lib/github/types.ts index 2bd7173..d86e41a 100644 --- a/src/server/lib/github/types.ts +++ b/src/server/lib/github/types.ts @@ -26,7 +26,6 @@ export interface RepoOptions { owner?: string; name?: string; githubPullRequestId?: number; - logger?: Logger; } export type DeployState = 'error' | 'failure' | 'inactive' | 'in_progress' | 'queued' | 'pending' | 'success'; @@ -40,7 +39,6 @@ export type GetAppTokenOptions = { export type CreateOctokitClientOptions = { accessToken?: string; installationId?: number; - logger?: Logger; caller?: string; cache?: typeof Redis; }; @@ -118,7 +116,6 @@ export interface CheckIfCommentExistsOptions { fullName: string; pullRequestNumber: number; commentIdentifier: string; - logger?: Logger; } export interface DetermineIfQueueIsNeededOptions { diff --git a/src/server/lib/helm/helm.ts b/src/server/lib/helm/helm.ts index b069382..2ca0eac 100644 --- a/src/server/lib/helm/helm.ts +++ b/src/server/lib/helm/helm.ts @@ -20,7 +20,7 @@ import Deploy from 'server/models/Deploy'; import GlobalConfigService from 'server/services/globalConfig'; import { TMP_PATH } from 'shared/config'; import { DeployStatus } from 'shared/constants'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { shellPromise } from 'server/lib/shell'; import { kubeContextStep } from 'server/lib/codefresh'; import Build from 'server/models/Build'; @@ -36,10 +36,6 @@ import { const CODEFRESH_PATH = `${TMP_PATH}/codefresh`; -const logger = rootLogger.child({ - filename: 'lib/helm/helm.ts', -}); - /** * Generates codefresh deployment step for public Helm charts. * We are manily using the `helm` column from deployable table. @@ -250,7 +246,7 @@ export async function deployHelm(deploys: Deploy[]) { */ export async function fetchUntilSuccess(url, retries, deploy, namespace) { - logger.info(`[Number of maxRetries: ${retries}] Trying to fetch the url: ${url}`); + getLogger().debug(`Helm: waiting for pods url=${url} maxRetries=${retries}`); for (let i = 0; i < retries; i++) { const pods = await shellPromise( `kubectl get deploy ${deploy} -n ${namespace} -o jsonpath='{.status.availableReplicas}'` @@ -258,14 +254,14 @@ export async function fetchUntilSuccess(url, retries, deploy, namespace) { try { const response = await fetch(url); if (1 <= parseInt(pods, 10)) { - logger.info(` [ On Deploy ${deploy} ] There's ${pods} pods available for deployment`); + getLogger().debug(`Pods: available deploy=${deploy} pods=${pods}`); return; } else { - logger.info(` [ On Deploy ${deploy} ] There's 0 pods available for deployment`); - logger.error(`[ REQUEST TO ${url}] Request failed and Status code number: ${response.status}`); + getLogger().debug(`Pods: unavailable deploy=${deploy}`); + getLogger().error(`Helm: request failed url=${url} status=${response.status}`); } } catch (error) { - logger.error(`[ Error function fetchUntilSuccess : ${error.message}`); + getLogger().error({ error }, `Helm: fetch failed url=${url}`); } await new Promise((resolve) => setTimeout(resolve, 10000)); } @@ -291,7 +287,7 @@ export async function generateCodefreshRunCommand(deploy: Deploy): Promise { deploy?.build.isStatic != undefined ) { ingressValues.push(`ingress.backendService=${deploy.uuid}-external-service`, 'ingress.port=8080'); - logger.info(`[INGRESS] Redirect ingress request to Keda proxy`); + getLogger().debug(`Helm: redirecting ingress to KEDA proxy`); } return ingressValues; @@ -547,9 +543,7 @@ export const constructHelmDeploysBuildMetaData = async (deploys: Deploy[]) => { error: '', }; } catch (error) { - logger - .child({ error }) - .error(`[BUILD][constructHelmDeploysBuildMetaData] Failed to construct Helm deploy metadata: ${error?.message}`); + getLogger().error({ error }, `Helm: metadata construction failed`); return { uuid: '', branchName: '', diff --git a/src/server/lib/kubernetes.ts b/src/server/lib/kubernetes.ts index b4d4312..527062e 100644 --- a/src/server/lib/kubernetes.ts +++ b/src/server/lib/kubernetes.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import rootLogger from './logger'; +import { getLogger } from './logger/index'; import yaml from 'js-yaml'; import _ from 'lodash'; import { Build, Deploy, Deployable, Service } from 'server/models'; @@ -31,10 +31,6 @@ import GlobalConfigService from 'server/services/globalConfig'; import { setupServiceAccountWithRBAC } from './kubernetes/rbac'; import { staticEnvTolerations } from './helm/constants'; -const logger = rootLogger.child({ - filename: 'lib/kubernetes.ts', -}); - interface VOLUME { name: string; emptyDir?: {}; @@ -51,7 +47,7 @@ async function namespaceExists(client: k8s.CoreV1Api, name: string): Promise { - // Check if this is a legacy deployment (has build.manifest) if (!build.manifest || build.manifest.trim().length === 0) { - // New deployments are handled by DeploymentManager - logger.info(`[Build ${build.uuid}] No build manifest found, using new deployment pattern via DeploymentManager`); + getLogger().info('Deploying via DeploymentManager'); return []; } - // Legacy deployment path - apply manifest directly - logger.info(`[Build ${build.uuid}] Using legacy deployment pattern with build.manifest`); + getLogger().info('Deploying via legacy manifest'); const kc = new k8s.KubeConfig(); kc.loadFromDefault(); @@ -383,12 +370,14 @@ export async function applyManifests(build: Build): Promise 0) { - logger.info(`${buildTaxonomy} Pods created ${gitTaxonomy}`); + getLogger(logCtx).info('Pods: created'); break; } else if (retries < 60) { - // wait for 5 minutes for pods to be created retries += 1; await new Promise((r) => setTimeout(r, 5000)); } else { - logger.warn(`${buildTaxonomy} No pods found within 5 minutes ${gitTaxonomy}. `); + getLogger(logCtx).warn('No pods found within 5 minutes'); break; } } retries = 0; - logger.info(`${buildTaxonomy} Waiting 15 minutes for pods to be ready ${gitTaxonomy}`); + getLogger(logCtx).info('Pods: waiting for ready state'); // eslint-disable-next-line no-constant-condition while (true) { let isReady = false; try { const pods = await getPods({ uuid, namespace }); - // only check pods that are not managed by Helm const matches = pods?.filter( (pod) => @@ -553,18 +545,20 @@ export async function waitForPodReady(build: Build) { return conditions.some((condition) => condition?.type === 'Ready' && condition?.status === 'True'); }); } catch (error) { - logger.child({ error, isReady }).warn(`${buildTaxonomy} error checking pod readiness ${gitTaxonomy}`); + getLogger({ ...logCtx, error, isReady }).warn('Error checking pod readiness'); } if (isReady) { - logger.info(`${buildTaxonomy} Pods are ready ${gitTaxonomy}`); + getLogger(logCtx).info('Pods: ready'); return true; } if (retries < 180) { retries += 1; await new Promise((r) => setTimeout(r, 5000)); } else { - throw new Error(`${buildTaxonomy} Pods for build not ready after 15 minutes ${gitTaxonomy}`); + throw new Error( + `Pods for build not ready after 15 minutes buildUuid=${uuid} repo=${fullName} branch=${branchName}` + ); } } } @@ -578,9 +572,12 @@ export async function deleteBuild(build: Build) { await shellPromise( `kubectl delete all,pvc,mapping,Httpscaledobjects -l lc_uuid=${build.uuid} --namespace ${build.namespace}` ); - logger.info(`[DELETE ${build.uuid}] Deleted kubernetes resources`); + getLogger({ namespace: build.namespace }).info('Kubernetes: resources deleted'); } catch (e) { - logger.error(`[DELETE ${build.uuid}] Error deleting kubernetes resources: ${e}`); + getLogger({ + namespace: build.namespace, + error: e, + }).error('Error deleting kubernetes resources'); } } @@ -589,21 +586,16 @@ export async function deleteBuild(build: Build) { * @param name namespace to delete */ export async function deleteNamespace(name: string) { - // this is a final safety check to only delete namespaces that start with `env-` if (!name.startsWith('env-')) return; try { - // Native helm now uses namespace-scoped RBAC (Role/RoleBinding) which gets deleted with the namespace - // No need for manual cleanup of cluster-level resources - - // adding a grace-period to make sure resources and finalizers are gone before we delete the namespace await shellPromise(`kubectl delete ns ${name} --grace-period 120`); - logger.info(`[DELETE ${name}] Deleted namespace`); + getLogger({ namespace: name }).info('Namespace: deleted'); } catch (e) { if (e.includes('Error from server (NotFound): namespaces')) { - logger.info(`[DELETE ${name}] Namespace not found, skipping deletion.`); + getLogger({ namespace: name }).info('Namespace: not found, skipping'); } else { - logger.error(`[DELETE ${name}] Error deleting namespace: ${e}`); + getLogger({ namespace: name, error: e }).error('Error deleting namespace'); } } } @@ -657,7 +649,7 @@ export function generateManifest({ const manifest = `${disks}---\n${builds}---\n${nodePorts}---\n${grpcMappings}---\n${loadBalancers}---\n${externalNameServices}`; const isDev = APP_ENV?.includes('dev') ?? false; if (!isDev) { - logger.child({ manifest }).info(`[BUILD ${build.uuid}][lifecycleConfigLog][kubernetesManifest] Generated manifest`); + getLogger({ manifest }).info('Generated kubernetes manifest'); } return manifest; } @@ -1120,13 +1112,13 @@ export function generateDeployManifests( }); break; default: - logger.warn(`Unknown disk medium type: ${disk.medium}`); + getLogger({ medium: disk.medium }).warn('Unknown disk medium type'); } }); } } } else { - logger.debug('Service disks: %j', service.serviceDisks); + getLogger({ serviceDisks: service.serviceDisks }).debug('Processing service disks'); if (service.serviceDisks && service.serviceDisks.length > 0) { strategy = { // @ts-ignore @@ -1199,7 +1191,7 @@ export function generateDeployManifests( 'tags.datadoghq.com/version': buildUUID, }; - if (build.isStatic) logger.info(`${buildUUID} building static environment`); + if (build.isStatic) getLogger().info('Building static environment'); const yamlManifest = yaml.dump( { @@ -1484,13 +1476,13 @@ export function generateExternalNameManifests(deploys: Deploy[], buildUUID: stri return deploys .filter((deploy) => { if (deploy.active) { - logger.debug(`Deploy ${deploy.id} ${deploy.cname}`); + getLogger({ deployId: deploy.id, cname: deploy.cname }).debug('Checking deploy for external service'); return deploy.cname !== undefined && deploy.cname !== null; } }) .map((deploy) => { const name = deploy.uuid; - logger.debug(`Creating external service for ${name}`); + getLogger().debug('Creating external service'); return yaml.dump( { apiVersion: 'v1', @@ -1575,7 +1567,7 @@ export async function checkKubernetesStatus(build: Build) { try { status += (await shellPromise(command)) + '\n'; } catch (err) { - logger.debug(`[${build.uuid}] ${command} ==> ${err}`); + getLogger({ command, error: err }).debug('Error executing kubectl command'); } return status; @@ -1598,7 +1590,7 @@ async function getExistingIngress(ingressName: string, namespace: string): Promi const response = await k8sApi.readNamespacedIngress(ingressName, namespace); return response.body; } catch (error) { - logger.warn(`Failed to get existing ingress ${ingressName}: ${error}`); + getLogger({ ingressName, namespace, error }).warn('Failed to get existing ingress'); return null; } } @@ -1647,9 +1639,9 @@ export async function patchIngress(ingressName: string, bannerSnippet: any, name `kubectl patch ingress ${ingressName} --namespace ${namespace} --type merge --patch-file ${localPath}` ); - logger.info(`Successfully patched ingress ${ingressName}`); + getLogger({ ingressName, namespace }).info('Successfully patched ingress'); } catch (error) { - logger.warn(`Unable to patch ingress ${ingressName}, banner might not work: ${error}`); + getLogger({ ingressName, namespace, error }).warn('Unable to patch ingress, banner might not work'); throw error; } } @@ -1677,7 +1669,7 @@ export async function updateSecret(secretName: string, secretData: Record { const { namespace } = build; const deployableName = deploy.deployable?.name || deploy.service?.name || 'unknown'; + const logCtx = { deployUuid: uuid, service: deployableName, namespace }; + let retries = 0; - logger.info(`[DEPLOY ${uuid}] Waiting for pods service=${deployableName} namespace=${namespace}`); + getLogger(logCtx).info('Waiting for pods'); - // Wait up to 5 minutes for pods to be created while (retries < 60) { const k8sApi = getK8sApi(); const resp = await k8sApi?.listNamespacedPod( @@ -2180,7 +2173,6 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { `deploy_uuid=${uuid}` ); const allPods = resp?.body?.items || []; - // Filter out job pods - we only want deployment/statefulset pods const pods = allPods.filter((pod) => !pod.metadata?.name?.includes('-deploy-')); if (pods.length > 0) { @@ -2192,13 +2184,12 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { } if (retries >= 60) { - logger.warn(`[DEPLOY ${uuid}] No pods found within 5 minutes service=${deployableName}`); + getLogger(logCtx).warn('No pods found within 5 minutes'); return false; } retries = 0; - // Wait up to 15 minutes for pods to be ready while (retries < 180) { const k8sApi = getK8sApi(); const resp = await k8sApi?.listNamespacedPod( @@ -2210,11 +2201,10 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { `deploy_uuid=${uuid}` ); const allPods = resp?.body?.items || []; - // Filter out job pods - we only want deployment/statefulset pods const pods = allPods.filter((pod) => !pod.metadata?.name?.includes('-deploy-')); if (pods.length === 0) { - logger.warn(`[DEPLOY ${uuid}] No deployment pods found service=${deployableName}`); + getLogger(logCtx).warn('No deployment pods found'); return false; } @@ -2225,7 +2215,7 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { }); if (allReady) { - logger.info(`[DEPLOY ${uuid}] Pods ready service=${deployableName} count=${pods.length}`); + getLogger({ ...logCtx, podCount: pods.length }).info('Pods ready'); return true; } @@ -2233,6 +2223,6 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { await new Promise((r) => setTimeout(r, 5000)); } - logger.warn(`[DEPLOY ${uuid}] Pods not ready within 15 minutes service=${deployableName}`); + getLogger(logCtx).warn('Pods not ready within 15 minutes'); return false; } diff --git a/src/server/lib/kubernetes/JobMonitor.ts b/src/server/lib/kubernetes/JobMonitor.ts index 213c93c..52a3fe3 100644 --- a/src/server/lib/kubernetes/JobMonitor.ts +++ b/src/server/lib/kubernetes/JobMonitor.ts @@ -15,7 +15,7 @@ */ import { shellPromise } from '../shell'; -import logger from '../logger'; +import { getLogger } from '../logger/index'; export interface JobStatus { logs: string; @@ -70,7 +70,7 @@ export class JobMonitor { status, }; } catch (error) { - logger.error(`Error monitoring job ${this.jobName}: ${error.message}`); + getLogger().error(`Error monitoring job ${this.jobName}: ${error.message}`); return { logs: logs || `Job monitoring failed: ${error.message}`, success: false, @@ -146,12 +146,12 @@ export class JobMonitor { ); logs += `\n=== Init Container Logs (${initName}) ===\n${initLogs}\n`; } catch (err: any) { - logger.debug(`Could not get logs for init container ${initName}: ${err.message || 'Unknown error'}`); + getLogger().debug(`Could not get logs for init container ${initName}: ${err.message || 'Unknown error'}`); } } } } catch (error: any) { - logger.debug(`No init containers found for pod ${podName}: ${error.message || 'Unknown error'}`); + getLogger().debug(`No init containers found for pod ${podName}: ${error.message || 'Unknown error'}`); } return logs; @@ -175,7 +175,7 @@ export class JobMonitor { if (!allContainersReady) { const waiting = statuses.find((s: any) => s.state.waiting); if (waiting && waiting.state.waiting.reason) { - logger.info( + getLogger().info( `Container ${waiting.name} is waiting: ${waiting.state.waiting.reason} - ${ waiting.state.waiting.message || 'no message' }` @@ -209,7 +209,7 @@ export class JobMonitor { containerNames = containerNames.filter((name) => containerFilters.includes(name)); } } catch (error) { - logger.warn(`Could not get container names: ${error}`); + getLogger().warn(`Could not get container names: ${error}`); } for (const containerName of containerNames) { @@ -223,7 +223,7 @@ export class JobMonitor { logs += `\n=== Container Logs (${containerName}) ===\n${containerLog}\n`; } } catch (error: any) { - logger.warn(`Error getting logs from container ${containerName}: ${error.message}`); + getLogger().warn(`Error getting logs from container ${containerName}: ${error.message}`); logs += `\n=== Container Logs (${containerName}) ===\nError retrieving logs: ${error.message}\n`; } } @@ -252,7 +252,9 @@ export class JobMonitor { await this.sleep(JobMonitor.POLL_INTERVAL); } } catch (error: any) { - logger.debug(`Job status check failed for ${this.jobName}, will retry: ${error.message || 'Unknown error'}`); + getLogger().debug( + `Job status check failed for ${this.jobName}, will retry: ${error.message || 'Unknown error'}` + ); await this.sleep(JobMonitor.POLL_INTERVAL); } } @@ -276,7 +278,7 @@ export class JobMonitor { ); if (failedStatus.trim() === 'True') { - logger.error(`Job ${this.jobName} failed`); + getLogger().error(`Job ${this.jobName} failed`); // Check if job was superseded try { @@ -286,12 +288,12 @@ export class JobMonitor { ); if (annotations === 'superseded-by-retry') { - logger.info(`${logPrefix || ''} Job ${this.jobName} superseded by newer deployment`); + getLogger().info(`${logPrefix || ''} Job ${this.jobName} superseded by newer deployment`); success = true; status = 'superseded'; } } catch (annotationError: any) { - logger.debug( + getLogger().debug( `Could not check supersession annotation for job ${this.jobName}: ${ annotationError.message || 'Unknown error' }` @@ -302,7 +304,7 @@ export class JobMonitor { status = 'succeeded'; } } catch (error) { - logger.error(`Failed to check job status for ${this.jobName}:`, error); + getLogger().error(`Failed to check job status for ${this.jobName}:`, error); } return { success, status }; diff --git a/src/server/lib/kubernetes/getDeploymentJobs.ts b/src/server/lib/kubernetes/getDeploymentJobs.ts index 48294d1..0f65b2e 100644 --- a/src/server/lib/kubernetes/getDeploymentJobs.ts +++ b/src/server/lib/kubernetes/getDeploymentJobs.ts @@ -1,5 +1,5 @@ import * as k8s from '@kubernetes/client-node'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; export interface DeploymentJobInfo { jobName: string; @@ -14,10 +14,6 @@ export interface DeploymentJobInfo { deploymentType: 'helm' | 'github'; } -const logger = rootLogger.child({ - filename: __filename, -}); - export async function getDeploymentJobs(serviceName: string, namespace: string): Promise { const kc = new k8s.KubeConfig(); kc.loadFromDefault(); @@ -111,7 +107,7 @@ export async function getDeploymentJobs(serviceName: string, namespace: string): } } } catch (podError) { - logger.warn(`Failed to get pods for job ${jobName}:`, podError); + getLogger().warn({ error: podError }, `K8s: failed to get pods jobName=${jobName}`); } } @@ -137,7 +133,7 @@ export async function getDeploymentJobs(serviceName: string, namespace: string): return deploymentJobs; } catch (error) { - logger.error(`Error listing deployment jobs for service ${serviceName}:`, error); + getLogger().error({ error }, `K8s: failed to list deployment jobs service=${serviceName}`); throw error; } } diff --git a/src/server/lib/kubernetes/rbac.ts b/src/server/lib/kubernetes/rbac.ts index 6f8d6ab..f9e86ec 100644 --- a/src/server/lib/kubernetes/rbac.ts +++ b/src/server/lib/kubernetes/rbac.ts @@ -16,7 +16,7 @@ import { V1ServiceAccount, V1Role, V1RoleBinding } from '@kubernetes/client-node'; import * as k8s from '@kubernetes/client-node'; -import logger from '../logger'; +import { getLogger } from '../logger/index'; export interface RBACConfig { namespace: string; @@ -77,7 +77,7 @@ export async function setupServiceAccountWithRBAC(config: RBACConfig): Promise setTimeout(resolve, 5000)); attempts++; } catch (error) { - logger.error(`Error monitoring job ${jobName}: ${error}`); + getLogger().error({ error }, `Error monitoring job: jobName=${jobName}`); throw error; } } diff --git a/src/server/lib/kubernetesApply/logs.ts b/src/server/lib/kubernetesApply/logs.ts index 882512f..4a7b20d 100644 --- a/src/server/lib/kubernetesApply/logs.ts +++ b/src/server/lib/kubernetesApply/logs.ts @@ -16,9 +16,7 @@ import * as k8s from '@kubernetes/client-node'; import { Deploy } from 'server/models'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ filename: 'lib/kubernetesApply/logs.ts' }); +import { getLogger } from 'server/lib/logger/index'; /** * Fetches logs from a Kubernetes apply job for a deploy @@ -105,14 +103,14 @@ export async function getKubernetesApplyLogs(deploy: Deploy, tail?: number): Pro allLogs.push(`=== Logs from pod ${podName} ===\n${podLogs.body}`); } } catch (podError) { - logger.error(`Failed to fetch logs from pod ${podName}: ${podError}`); + getLogger({ error: podError }).error(`Failed to fetch logs from pod: podName=${podName}`); allLogs.push(`=== Error fetching logs from pod ${podName} ===\n${(podError as Error).message || podError}`); } } return allLogs.join('\n\n') || 'No logs available'; } catch (error) { - logger.error(`Failed to fetch logs for deploy ${deploy.uuid}: ${error}`); + getLogger({ error }).error('Failed to fetch logs'); return `Failed to fetch logs: ${(error as Error).message || error}`; } } @@ -245,7 +243,7 @@ export async function streamKubernetesApplyLogs( onClose(); } } catch (error) { - logger.error(`Error polling logs for deploy ${deploy.uuid}: ${error}`); + getLogger({ error }).error('Error polling logs'); if ((error as any).response?.statusCode === 404) { // Pod was deleted, stop polling isActive = false; @@ -263,7 +261,7 @@ export async function streamKubernetesApplyLogs( clearInterval(pollInterval); }; } catch (error) { - logger.error(`Failed to start log stream for deploy ${deploy.uuid}: ${error}`); + getLogger({ error }).error('Failed to start log stream'); onError(error as Error); onClose(); return () => {}; diff --git a/src/server/lib/logger.ts b/src/server/lib/logger.ts index fca83df..7216527 100644 --- a/src/server/lib/logger.ts +++ b/src/server/lib/logger.ts @@ -31,9 +31,26 @@ const transport = { }, }; +const serializers = { + error: (value: unknown): string => { + if (value instanceof Error) { + return value.message; + } + if (typeof value === 'object' && value !== null) { + try { + return JSON.stringify(value); + } catch { + return '[Unserializable Object]'; + } + } + return String(value); + }, +}; + let rootLogger = pino({ level, enabled, + serializers, ...(pinoPretty ? transport : {}), }); diff --git a/src/server/lib/logger/__tests__/context.test.ts b/src/server/lib/logger/__tests__/context.test.ts new file mode 100644 index 0000000..1271f7a --- /dev/null +++ b/src/server/lib/logger/__tests__/context.test.ts @@ -0,0 +1,148 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from '../context'; + +describe('Logger Context', () => { + describe('getLogContext', () => { + it('should return empty object when no context is set', () => { + const context = getLogContext(); + expect(context).toEqual({}); + }); + }); + + describe('withLogContext', () => { + it('should set context and make it available inside the callback', async () => { + const correlationId = 'test-correlation-id'; + + await withLogContext({ correlationId }, async () => { + const context = getLogContext(); + expect(context.correlationId).toBe(correlationId); + }); + }); + + it('should merge parent context with new context', async () => { + const parentCorrelationId = 'parent-id'; + const buildUuid = 'build-123'; + + await withLogContext({ correlationId: parentCorrelationId }, async () => { + await withLogContext({ buildUuid }, async () => { + const context = getLogContext(); + expect(context.correlationId).toBe(parentCorrelationId); + expect(context.buildUuid).toBe(buildUuid); + }); + }); + }); + + it('should use child correlationId if provided', async () => { + const parentCorrelationId = 'parent-id'; + const childCorrelationId = 'child-id'; + + await withLogContext({ correlationId: parentCorrelationId }, async () => { + await withLogContext({ correlationId: childCorrelationId }, async () => { + const context = getLogContext(); + expect(context.correlationId).toBe(childCorrelationId); + }); + }); + }); + + it('should default to "unknown" correlationId if none provided', async () => { + await withLogContext({}, async () => { + const context = getLogContext(); + expect(context.correlationId).toBe('unknown'); + }); + }); + + it('should work with synchronous functions', () => { + const correlationId = 'sync-test'; + + const result = withLogContext({ correlationId }, () => { + const context = getLogContext(); + expect(context.correlationId).toBe(correlationId); + return 'sync-result'; + }); + + expect(result).toBe('sync-result'); + }); + + it('should return value from async callback', async () => { + const result = await withLogContext({ correlationId: 'test' }, async () => { + return 'async-result'; + }); + + expect(result).toBe('async-result'); + }); + }); + + describe('updateLogContext', () => { + it('should update context within withLogContext', async () => { + await withLogContext({ correlationId: 'initial' }, async () => { + updateLogContext({ buildUuid: 'new-build' }); + + const context = getLogContext(); + expect(context.correlationId).toBe('initial'); + expect(context.buildUuid).toBe('new-build'); + }); + }); + + it('should not throw when called outside withLogContext', () => { + expect(() => { + updateLogContext({ buildUuid: 'test' }); + }).not.toThrow(); + }); + }); + + describe('extractContextForQueue', () => { + it('should extract only queue-relevant fields', async () => { + await withLogContext( + { + correlationId: 'corr-123', + buildUuid: 'build-456', + deployUuid: 'deploy-789', + service: 'my-service', + stage: 'webhook.received', + repo: 'owner/repo', + pr: 42, + branch: 'feature-branch', + sha: 'abc1234', + }, + async () => { + const queueData = extractContextForQueue(); + + expect(queueData).toEqual({ + correlationId: 'corr-123', + buildUuid: 'build-456', + deployUuid: 'deploy-789', + repo: 'owner/repo', + pr: 42, + branch: 'feature-branch', + sha: 'abc1234', + }); + + expect(queueData).not.toHaveProperty('service'); + expect(queueData).not.toHaveProperty('stage'); + } + ); + }); + + it('should return undefined values for missing fields', () => { + const queueData = extractContextForQueue(); + + expect(queueData.correlationId).toBeUndefined(); + expect(queueData.buildUuid).toBeUndefined(); + }); + }); +}); diff --git a/src/server/lib/logger/__tests__/contextLogger.test.ts b/src/server/lib/logger/__tests__/contextLogger.test.ts new file mode 100644 index 0000000..a6cd452 --- /dev/null +++ b/src/server/lib/logger/__tests__/contextLogger.test.ts @@ -0,0 +1,170 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { withLogContext } from '../context'; + +const mockChild = jest.fn().mockReturnValue({ + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), +}); + +jest.mock('../../logger', () => ({ + __esModule: true, + default: { + child: (...args: unknown[]) => mockChild(...args), + }, +})); + +jest.mock('dd-trace', () => ({ + scope: jest.fn(() => ({ + active: jest.fn(() => null), + })), +})); + +import { getLogger } from '../contextLogger'; + +describe('contextLogger', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('getLogger', () => { + it('should pass AsyncLocalStorage context to logger.child()', async () => { + await withLogContext( + { + correlationId: 'test-corr-id', + buildUuid: 'build-123', + deployUuid: 'deploy-456', + repo: 'owner/repo', + pr: 42, + branch: 'feature-branch', + }, + async () => { + getLogger(); + + expect(mockChild).toHaveBeenCalledWith( + expect.objectContaining({ + correlationId: 'test-corr-id', + buildUuid: 'build-123', + deployUuid: 'deploy-456', + repo: 'owner/repo', + pr: 42, + branch: 'feature-branch', + }) + ); + } + ); + }); + + it('should merge extra params with async context', async () => { + await withLogContext( + { + correlationId: 'test-corr-id', + buildUuid: 'build-123', + }, + async () => { + getLogger({ stage: 'webhook.received', customField: 'custom-value' }); + + expect(mockChild).toHaveBeenCalledWith( + expect.objectContaining({ + correlationId: 'test-corr-id', + buildUuid: 'build-123', + stage: 'webhook.received', + customField: 'custom-value', + }) + ); + } + ); + }); + + it('should allow extra params to override async context stage', async () => { + await withLogContext( + { + correlationId: 'test-corr-id', + stage: 'original-stage', + }, + async () => { + getLogger({ stage: 'overridden-stage' }); + + expect(mockChild).toHaveBeenCalledWith( + expect.objectContaining({ + stage: 'overridden-stage', + }) + ); + } + ); + }); + + it('should filter out undefined values from context', async () => { + await withLogContext( + { + correlationId: 'test-corr-id', + }, + async () => { + getLogger(); + + const passedContext = mockChild.mock.calls[0][0]; + + expect(passedContext).toHaveProperty('correlationId', 'test-corr-id'); + expect(passedContext).not.toHaveProperty('buildUuid'); + expect(passedContext).not.toHaveProperty('deployUuid'); + expect(passedContext).not.toHaveProperty('service'); + } + ); + }); + + it('should work outside of withLogContext with minimal context', () => { + getLogger({ stage: 'test-stage' }); + + const passedContext = mockChild.mock.calls[0][0]; + + expect(passedContext).toHaveProperty('stage', 'test-stage'); + expect(passedContext).not.toHaveProperty('correlationId'); + }); + + it('should include dd-trace context when span is active', async () => { + const tracer = require('dd-trace'); + tracer.scope.mockReturnValueOnce({ + active: jest.fn(() => ({ + context: () => ({ + toTraceId: () => 'trace-123', + toSpanId: () => 'span-456', + }), + })), + }); + + getLogger(); + + expect(mockChild).toHaveBeenCalledWith( + expect.objectContaining({ + 'dd.trace_id': 'trace-123', + 'dd.span_id': 'span-456', + }) + ); + }); + + it('should not include dd-trace context when no span is active', () => { + getLogger(); + + const passedContext = mockChild.mock.calls[0][0]; + + expect(passedContext).not.toHaveProperty('dd.trace_id'); + expect(passedContext).not.toHaveProperty('dd.span_id'); + }); + }); +}); diff --git a/src/server/lib/logger/__tests__/spans.test.ts b/src/server/lib/logger/__tests__/spans.test.ts new file mode 100644 index 0000000..c6c297b --- /dev/null +++ b/src/server/lib/logger/__tests__/spans.test.ts @@ -0,0 +1,112 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { withSpan } from '../spans'; +import { withLogContext } from '../context'; + +const mockSetTag = jest.fn(); +const mockSpan = { + setTag: mockSetTag, +}; + +jest.mock('dd-trace', () => ({ + trace: jest.fn((_name, _options, fn) => fn(mockSpan)), +})); + +describe('spans', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('withSpan', () => { + it('should execute the function and return its result', async () => { + const result = await withSpan('test.operation', async () => { + return 'test-result'; + }); + + expect(result).toBe('test-result'); + }); + + it('should set success tag on successful completion', async () => { + await withSpan('test.operation', async () => { + return 'success'; + }); + + expect(mockSetTag).toHaveBeenCalledWith('lifecycle.success', true); + }); + + it('should set error tags on failure and rethrow', async () => { + const testError = new Error('Test error'); + + await expect( + withSpan('test.operation', async () => { + throw testError; + }) + ).rejects.toThrow('Test error'); + + expect(mockSetTag).toHaveBeenCalledWith('error', true); + expect(mockSetTag).toHaveBeenCalledWith('lifecycle.success', false); + expect(mockSetTag).toHaveBeenCalledWith('error.message', 'Test error'); + }); + + it('should include context from AsyncLocalStorage', async () => { + const tracer = require('dd-trace'); + + await withLogContext( + { + correlationId: 'corr-123', + buildUuid: 'build-456', + repo: 'owner/repo', + }, + async () => { + await withSpan('test.operation', async () => 'result'); + } + ); + + expect(tracer.trace).toHaveBeenCalledWith( + 'test.operation', + expect.objectContaining({ + tags: expect.objectContaining({ + 'lifecycle.correlation_id': 'corr-123', + 'lifecycle.build_uuid': 'build-456', + 'lifecycle.repo': 'owner/repo', + }), + }), + expect.any(Function) + ); + }); + + it('should accept custom resource and tags', async () => { + const tracer = require('dd-trace'); + + await withSpan('test.operation', async () => 'result', { + resource: 'custom-resource', + tags: { customTag: 'customValue' }, + }); + + expect(tracer.trace).toHaveBeenCalledWith( + 'test.operation', + expect.objectContaining({ + resource: 'custom-resource', + tags: expect.objectContaining({ + customTag: 'customValue', + }), + }), + expect.any(Function) + ); + }); + }); +}); diff --git a/src/server/lib/logger/__tests__/stages.test.ts b/src/server/lib/logger/__tests__/stages.test.ts new file mode 100644 index 0000000..d57f0c4 --- /dev/null +++ b/src/server/lib/logger/__tests__/stages.test.ts @@ -0,0 +1,118 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { LogStage, LogStageType } from '../stages'; + +describe('LogStage', () => { + it('should export all required webhook stages', () => { + const webhookStages = ['WEBHOOK_RECEIVED', 'WEBHOOK_QUEUED', 'WEBHOOK_PROCESSING', 'WEBHOOK_SKIPPED']; + for (const stage of webhookStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required build stages', () => { + const buildStages = [ + 'BUILD_CREATED', + 'BUILD_QUEUED', + 'BUILD_STARTING', + 'BUILD_IMAGE_BUILDING', + 'BUILD_IMAGE_PUSHING', + 'BUILD_COMPLETE', + 'BUILD_FAILED', + ]; + for (const stage of buildStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required deploy stages', () => { + const deployStages = [ + 'DEPLOY_QUEUED', + 'DEPLOY_STARTING', + 'DEPLOY_HELM_INSTALLING', + 'DEPLOY_HELM_COMPLETE', + 'DEPLOY_COMPLETE', + 'DEPLOY_FAILED', + ]; + for (const stage of deployStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required cleanup stages', () => { + const cleanupStages = ['CLEANUP_STARTING', 'CLEANUP_COMPLETE', 'CLEANUP_FAILED']; + for (const stage of cleanupStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required label stages', () => { + const labelStages = ['LABEL_PROCESSING', 'LABEL_COMPLETE', 'LABEL_FAILED']; + for (const stage of labelStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required comment stages', () => { + const commentStages = ['COMMENT_PROCESSING', 'COMMENT_COMPLETE', 'COMMENT_FAILED']; + for (const stage of commentStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required config stages', () => { + const configStages = ['CONFIG_REFRESH', 'CONFIG_FAILED']; + for (const stage of configStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should export all required ingress stages', () => { + const ingressStages = ['INGRESS_PROCESSING', 'INGRESS_COMPLETE', 'INGRESS_FAILED']; + for (const stage of ingressStages) { + expect(LogStage).toHaveProperty(stage); + expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string'); + } + }); + + it('should have stage values following dot-notation convention', () => { + const allValues = Object.values(LogStage); + + for (const value of allValues) { + expect(value).toMatch(/^[a-z]+\.[a-z.]+$/); + } + }); + + it('should allow LogStageType to accept any LogStage value', () => { + const assignStage = (stage: LogStageType): string => stage; + + expect(assignStage(LogStage.WEBHOOK_RECEIVED)).toBe('webhook.received'); + expect(assignStage(LogStage.BUILD_COMPLETE)).toBe('build.complete'); + expect(assignStage(LogStage.DEPLOY_FAILED)).toBe('deploy.failed'); + expect(assignStage(LogStage.LABEL_PROCESSING)).toBe('label.processing'); + expect(assignStage(LogStage.COMMENT_COMPLETE)).toBe('comment.complete'); + expect(assignStage(LogStage.CONFIG_REFRESH)).toBe('config.refresh'); + expect(assignStage(LogStage.INGRESS_COMPLETE)).toBe('ingress.complete'); + }); +}); diff --git a/src/server/lib/logger/context.ts b/src/server/lib/logger/context.ts new file mode 100644 index 0000000..0d61f79 --- /dev/null +++ b/src/server/lib/logger/context.ts @@ -0,0 +1,102 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// eslint-disable-next-line @typescript-eslint/no-var-requires +const { AsyncLocalStorage } = require('async_hooks') as { + AsyncLocalStorage: new () => { + getStore(): T | undefined; + run(store: T, callback: () => R): R; + }; +}; +import type { LogContext, JobDataWithContext } from './types'; +import tracer from 'dd-trace'; + +const asyncLocalStorage = new AsyncLocalStorage(); + +export function getLogContext(): Partial { + return asyncLocalStorage.getStore() || {}; +} + +type ContextWithTrace = Partial & { _ddTraceContext?: Record }; + +export function withLogContext(context: ContextWithTrace, fn: () => T | Promise): T | Promise { + const parentContext = getLogContext(); + const mergedContext: LogContext = { + ...parentContext, + ...context, + correlationId: context.correlationId || parentContext.correlationId || 'unknown', + }; + + const runWithContext = () => asyncLocalStorage.run(mergedContext, fn); + + if ( + context._ddTraceContext && + Object.keys(context._ddTraceContext).length > 0 && + typeof tracer?.scope === 'function' + ) { + const parentSpanContext = tracer.extract('text_map', context._ddTraceContext); + if (parentSpanContext) { + const span = tracer.startSpan('queue.process', { childOf: parentSpanContext }); + span.setTag('correlationId', mergedContext.correlationId); + if (mergedContext.buildUuid) span.setTag('buildUuid', mergedContext.buildUuid); + if (mergedContext.deployUuid) span.setTag('deployUuid', mergedContext.deployUuid); + + return tracer.scope().activate(span, () => { + const result = runWithContext(); + if (result instanceof Promise) { + return result.finally(() => span.finish()) as T | Promise; + } + span.finish(); + return result; + }); + } + } + + return runWithContext(); +} + +export function updateLogContext(updates: Partial): void { + const current = asyncLocalStorage.getStore(); + if (current) { + Object.assign(current, updates); + } +} + +export function extractContextForQueue(): JobDataWithContext { + const ctx = getLogContext(); + + let traceContext: Record | undefined; + if (typeof tracer?.scope === 'function') { + const activeSpan = tracer.scope().active(); + if (activeSpan) { + traceContext = {}; + tracer.inject(activeSpan, 'text_map', traceContext); + } + } + + return { + correlationId: ctx.correlationId, + buildUuid: ctx.buildUuid, + deployUuid: ctx.deployUuid, + serviceName: ctx.serviceName, + sender: ctx.sender, + repo: ctx.repo, + pr: ctx.pr, + branch: ctx.branch, + sha: ctx.sha, + _ddTraceContext: traceContext, + }; +} diff --git a/src/server/lib/logger/contextLogger.ts b/src/server/lib/logger/contextLogger.ts new file mode 100644 index 0000000..8ec1737 --- /dev/null +++ b/src/server/lib/logger/contextLogger.ts @@ -0,0 +1,55 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import tracer from 'dd-trace'; +import rootLogger from '../logger'; +import { getLogContext } from './context'; +import type { LogContext } from './types'; + +function getTraceContext(): { traceId?: string; spanId?: string } { + if (typeof tracer?.scope !== 'function') return {}; + const span = tracer.scope()?.active(); + if (!span) return {}; + const context = span.context(); + return { + traceId: context.toTraceId(), + spanId: context.toSpanId(), + }; +} + +export function getLogger(extra?: Partial & Record) { + const asyncContext = getLogContext(); + const traceContext = getTraceContext(); + + const fullContext: Record = { + correlationId: asyncContext.correlationId, + buildUuid: asyncContext.buildUuid, + deployUuid: asyncContext.deployUuid, + serviceName: asyncContext.serviceName, + sender: asyncContext.sender, + stage: extra?.stage || asyncContext.stage, + repo: asyncContext.repo, + pr: asyncContext.pr, + branch: asyncContext.branch, + 'dd.trace_id': traceContext.traceId, + 'dd.span_id': traceContext.spanId, + ...extra, + }; + + const cleanContext = Object.fromEntries(Object.entries(fullContext).filter(([_, v]) => v !== undefined)); + + return rootLogger.child(cleanContext); +} diff --git a/src/server/lib/logger/index.ts b/src/server/lib/logger/index.ts new file mode 100644 index 0000000..d4ea7be --- /dev/null +++ b/src/server/lib/logger/index.ts @@ -0,0 +1,23 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export { default as rootLogger } from '../logger'; +export { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from './context'; +export { getLogger } from './contextLogger'; +export { withSpan } from './spans'; +export { LogStage } from './stages'; +export type { LogContext, JobDataWithContext } from './types'; +export type { LogStageType } from './stages'; diff --git a/src/server/lib/logger/spans.ts b/src/server/lib/logger/spans.ts new file mode 100644 index 0000000..20d7623 --- /dev/null +++ b/src/server/lib/logger/spans.ts @@ -0,0 +1,58 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import tracer from 'dd-trace'; +import { getLogContext } from './context'; + +export interface SpanOptions { + resource?: string; + tags?: Record; +} + +export async function withSpan(operationName: string, fn: () => Promise, options: SpanOptions = {}): Promise { + if (typeof tracer?.trace !== 'function') { + return fn(); + } + + const context = getLogContext(); + + return tracer.trace( + operationName, + { + resource: options.resource, + tags: { + 'lifecycle.correlation_id': context.correlationId, + 'lifecycle.build_uuid': context.buildUuid, + 'lifecycle.deploy_uuid': context.deployUuid, + 'lifecycle.repo': context.repo, + 'lifecycle.pr': context.pr, + ...options.tags, + }, + }, + async (span) => { + try { + const result = await fn(); + span?.setTag('lifecycle.success', true); + return result; + } catch (error) { + span?.setTag('error', true); + span?.setTag('lifecycle.success', false); + span?.setTag('error.message', error instanceof Error ? error.message : String(error)); + throw error; + } + } + ); +} diff --git a/src/server/lib/logger/stages.ts b/src/server/lib/logger/stages.ts new file mode 100644 index 0000000..6491d59 --- /dev/null +++ b/src/server/lib/logger/stages.ts @@ -0,0 +1,59 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export const LogStage = { + WEBHOOK_RECEIVED: 'webhook.received', + WEBHOOK_QUEUED: 'webhook.queued', + WEBHOOK_PROCESSING: 'webhook.processing', + WEBHOOK_COMPLETE: 'webhook.complete', + WEBHOOK_SKIPPED: 'webhook.skipped', + + BUILD_CREATED: 'build.created', + BUILD_QUEUED: 'build.queued', + BUILD_STARTING: 'build.starting', + BUILD_IMAGE_BUILDING: 'build.image.building', + BUILD_IMAGE_PUSHING: 'build.image.pushing', + BUILD_COMPLETE: 'build.complete', + BUILD_FAILED: 'build.failed', + + DEPLOY_QUEUED: 'deploy.queued', + DEPLOY_STARTING: 'deploy.starting', + DEPLOY_HELM_INSTALLING: 'deploy.helm.installing', + DEPLOY_HELM_COMPLETE: 'deploy.helm.complete', + DEPLOY_COMPLETE: 'deploy.complete', + DEPLOY_FAILED: 'deploy.failed', + + CLEANUP_STARTING: 'cleanup.starting', + CLEANUP_COMPLETE: 'cleanup.complete', + CLEANUP_FAILED: 'cleanup.failed', + + LABEL_PROCESSING: 'label.processing', + LABEL_COMPLETE: 'label.complete', + LABEL_FAILED: 'label.failed', + + COMMENT_PROCESSING: 'comment.processing', + COMMENT_COMPLETE: 'comment.complete', + COMMENT_FAILED: 'comment.failed', + + CONFIG_REFRESH: 'config.refresh', + CONFIG_FAILED: 'config.failed', + + INGRESS_PROCESSING: 'ingress.processing', + INGRESS_COMPLETE: 'ingress.complete', + INGRESS_FAILED: 'ingress.failed', +} as const; + +export type LogStageType = (typeof LogStage)[keyof typeof LogStage]; diff --git a/src/server/lib/logger/types.ts b/src/server/lib/logger/types.ts new file mode 100644 index 0000000..936ee18 --- /dev/null +++ b/src/server/lib/logger/types.ts @@ -0,0 +1,41 @@ +/** + * Copyright 2025 GoodRx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export interface LogContext { + correlationId: string; + buildUuid?: string; + deployUuid?: string; + serviceName?: string; + sender?: string; + stage?: string; + repo?: string; + pr?: number; + branch?: string; + sha?: string; +} + +export interface JobDataWithContext { + correlationId?: string; + buildUuid?: string; + deployUuid?: string; + serviceName?: string; + sender?: string; + repo?: string; + pr?: number; + branch?: string; + sha?: string; + _ddTraceContext?: Record; +} diff --git a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts index 16d0f80..9c5c745 100644 --- a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts +++ b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts @@ -59,6 +59,7 @@ jest.mock('../../logger', () => { return { __esModule: true, default: mockLogger, + getLogger: jest.fn(() => mockLogger), }; }); diff --git a/src/server/lib/nativeBuild/engines.ts b/src/server/lib/nativeBuild/engines.ts index e752590..161ef1d 100644 --- a/src/server/lib/nativeBuild/engines.ts +++ b/src/server/lib/nativeBuild/engines.ts @@ -16,7 +16,7 @@ import { Deploy } from '../../models'; import { shellPromise } from '../shell'; -import logger from '../logger'; +import { getLogger } from '../logger/index'; import GlobalConfigService from '../../services/globalConfig'; import { waitForJobAndGetLogs, @@ -256,11 +256,7 @@ export async function buildWithEngine( const jobName = `${options.deployUuid}-build-${jobId}-${shortSha}`.substring(0, 63); const contextPath = `/workspace/repo-${shortRepoName}`; - logger.info( - `[${engine.name}] Building image(s) for ${options.deployUuid}: dockerfilePath=${ - options.dockerfilePath - }, initDockerfilePath=${options.initDockerfilePath || 'none'}, repo=${options.repo}` - ); + getLogger().debug(`Build: preparing ${engine.name} job dockerfile=${options.dockerfilePath}`); const githubToken = await getGitHubToken(); const gitUsername = 'x-access-token'; @@ -360,7 +356,7 @@ export async function buildWithEngine( options.ecrDomain ) ); - logger.info(`[${engine.name}] Job ${jobName} will build both main and init images in parallel`); + getLogger().debug('Build: including init image'); } await deploy.$fetchGraph('build'); @@ -394,16 +390,16 @@ export async function buildWithEngine( }); const jobYaml = yaml.dump(job, { quotingType: '"', forceQuotes: true }); - const applyResult = await shellPromise(`cat <<'EOF' | kubectl apply -f - + await shellPromise(`cat <<'EOF' | kubectl apply -f - ${jobYaml} EOF`); - logger.info(`Created ${engineName} job ${jobName} in namespace ${options.namespace}`, { applyResult }); + getLogger().debug(`Job: created ${jobName}`); try { const { logs, success } = await waitForJobAndGetLogs(jobName, options.namespace, jobTimeout); return { success, logs, jobName }; } catch (error) { - logger.error(`Error getting logs for ${engineName} job ${jobName}`, { error }); + getLogger().error(`Job: log retrieval failed job=${jobName} error=${error.message}`); try { const jobStatus = await shellPromise( @@ -412,11 +408,11 @@ EOF`); const jobSucceeded = jobStatus.trim() === 'True'; if (jobSucceeded) { - logger.info(`Job ${jobName} completed successfully despite log retrieval error`); + getLogger().debug(`Job: completed (logs unavailable) job=${jobName}`); return { success: true, logs: 'Log retrieval failed but job completed successfully', jobName }; } } catch (statusError) { - logger.error(`Failed to check job status for ${jobName}`, { statusError }); + getLogger().error(`Job: status check failed job=${jobName} error=${statusError.message}`); } return { success: false, logs: `Build failed: ${error.message}`, jobName }; diff --git a/src/server/lib/nativeBuild/index.ts b/src/server/lib/nativeBuild/index.ts index a5508ed..27e542e 100644 --- a/src/server/lib/nativeBuild/index.ts +++ b/src/server/lib/nativeBuild/index.ts @@ -15,7 +15,7 @@ */ import { Deploy } from '../../models'; -import logger from '../logger'; +import { getLogger, withSpan, updateLogContext } from '../logger/index'; import { ensureNamespaceExists } from './utils'; import { buildWithEngine, NativeBuildOptions } from './engines'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; @@ -29,47 +29,51 @@ export interface NativeBuildResult { } export async function buildWithNative(deploy: Deploy, options: NativeBuildOptions): Promise { - const startTime = Date.now(); - logger.info(`[Native Build] Starting build for ${options.deployUuid} in namespace ${options.namespace}`); + return withSpan( + 'lifecycle.build.image', + async () => { + updateLogContext({ deployUuid: options.deployUuid, serviceName: deploy.deployable?.name }); + const startTime = Date.now(); + getLogger().info('Build: starting (native)'); - try { - await ensureNamespaceExists(options.namespace); + try { + await ensureNamespaceExists(options.namespace); - const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build'); + const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build'); - const buildOptions = { - ...options, - serviceAccount: serviceAccountName, - }; + const buildOptions = { + ...options, + serviceAccount: serviceAccountName, + }; - await deploy.$fetchGraph('[deployable]'); - const builderEngine = deploy.deployable?.builder?.engine; + await deploy.$fetchGraph('[deployable]'); + updateLogContext({ serviceName: deploy.deployable?.name }); + const builderEngine = deploy.deployable?.builder?.engine; - let result: NativeBuildResult; + let result: NativeBuildResult; - if (builderEngine === 'buildkit' || builderEngine === 'kaniko') { - logger.info(`[Native Build] Using ${builderEngine} engine for ${options.deployUuid}`); - result = await buildWithEngine(deploy, buildOptions, builderEngine); - } else { - throw new Error(`Unsupported builder engine: ${builderEngine}`); - } + if (builderEngine === 'buildkit' || builderEngine === 'kaniko') { + getLogger().debug(`Build: using ${builderEngine} engine`); + result = await buildWithEngine(deploy, buildOptions, builderEngine); + } else { + throw new Error(`Unsupported builder engine: ${builderEngine}`); + } - const duration = Date.now() - startTime; - logger.info( - `[Native Build] Build completed for ${options.deployUuid}: jobName=${result.jobName}, success=${result.success}, duration=${duration}ms, namespace=${options.namespace}` - ); + const duration = Date.now() - startTime; + getLogger().info(`Build: completed success=${result.success} duration=${duration}ms`); - return result; - } catch (error) { - const duration = Date.now() - startTime; - logger.error( - `[Native Build] Build failed for ${options.deployUuid}: error=${error.message}, duration=${duration}ms, namespace=${options.namespace}` - ); + return result; + } catch (error) { + const duration = Date.now() - startTime; + getLogger().error(`Build: failed error=${error.message} duration=${duration}ms`); - return { - success: false, - logs: `Build error: ${error.message}`, - jobName: '', - }; - } + return { + success: false, + logs: `Build error: ${error.message}`, + jobName: '', + }; + } + }, + { resource: options.deployUuid } + ); } diff --git a/src/server/lib/nativeBuild/utils.ts b/src/server/lib/nativeBuild/utils.ts index 1ce9712..7af4ff5 100644 --- a/src/server/lib/nativeBuild/utils.ts +++ b/src/server/lib/nativeBuild/utils.ts @@ -16,7 +16,7 @@ import { V1Job } from '@kubernetes/client-node'; import { shellPromise } from '../shell'; -import logger from '../logger'; +import { getLogger } from '../logger/index'; import * as k8s from '@kubernetes/client-node'; import GlobalConfigService from '../../services/globalConfig'; import { createBuildJob } from '../kubernetes/jobFactory'; @@ -30,10 +30,10 @@ export async function ensureNamespaceExists(namespace: string): Promise { try { await coreV1Api.readNamespace(namespace); - logger.info(`Namespace ${namespace} already exists`); + getLogger().debug('Namespace: exists'); } catch (error) { if (error?.response?.statusCode === 404) { - logger.info(`Creating namespace ${namespace}`); + getLogger().debug('Namespace: creating'); await coreV1Api.createNamespace({ metadata: { name: namespace, diff --git a/src/server/lib/nativeHelm/helm.ts b/src/server/lib/nativeHelm/helm.ts index 8a6f1ac..c7181c2 100644 --- a/src/server/lib/nativeHelm/helm.ts +++ b/src/server/lib/nativeHelm/helm.ts @@ -18,7 +18,7 @@ import yaml from 'js-yaml'; import fs from 'fs'; import Deploy from 'server/models/Deploy'; import GlobalConfigService from 'server/services/globalConfig'; -import rootLogger from 'server/lib/logger'; +import { getLogger, withSpan, updateLogContext } from 'server/lib/logger/index'; import { shellPromise } from 'server/lib/shell'; import { randomAlphanumeric } from 'server/lib/random'; import { nanoid } from 'nanoid'; @@ -53,10 +53,6 @@ import { import { createHelmJob as createHelmJobFromFactory } from 'server/lib/kubernetes/jobFactory'; import { ensureServiceAccountForJob } from 'server/lib/kubernetes/common/serviceAccount'; -const logger = rootLogger.child({ - filename: 'lib/nativeHelm/helm.ts', -}); - export interface JobResult { completed: boolean; logs: string; @@ -250,10 +246,10 @@ export async function shouldUseNativeHelm(deploy: Deploy): Promise { } export async function deployNativeHelm(deploy: Deploy): Promise { - logger.info(`[HELM ${deploy.uuid}] Starting native helm deployment`); - const { deployable, build } = deploy; + getLogger().info('Helm: deploying (native)'); + if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) { await applyHttpScaleObjectManifestYaml(deploy, build.namespace); await applyExternalServiceManifestYaml(deploy, build.namespace); @@ -279,7 +275,12 @@ export async function deployNativeHelm(deploy: Deploy): Promise { await patchIngress(deploy.uuid, ingressBannerSnippet(deploy), build.namespace); } } catch (error) { - logger.warn(`[DEPLOY ${deploy.uuid}] Unable to patch ingress: ${error}`); + getLogger().warn( + { + error, + }, + 'Unable to patch ingress' + ); } if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) { @@ -310,7 +311,7 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService, const deployPipelineId = getCodefreshPipelineIdFromOutput(output); const statusMessage = 'Starting deployment via Helm'; - logger.info(`[DEPLOY ${deploy.uuid}] Deploying via codefresh build: ${deployPipelineId}`); + getLogger().info(`Helm: deploying (Codefresh) pipelineId=${deployPipelineId}`); await deployService.patchAndUpdateActivityFeed( deploy, @@ -330,7 +331,12 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService, await patchIngress(deploy.uuid, ingressBannerSnippet(deploy), build.namespace); } } catch (error) { - logger.warn(`[DEPLOY ${deploy.uuid}] Unable to patch ingress: ${error}`); + getLogger().warn( + { + error, + }, + 'Unable to patch ingress' + ); } if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) { @@ -345,61 +351,68 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService, } export async function deployHelm(deploys: Deploy[]): Promise { - logger.info(`[DEPLOY ${deploys.map((d) => d.uuid).join(', ')}] Deploying with helm`); - if (deploys?.length === 0) return; + getLogger().info(`Helm: deploying services=${deploys.map((d) => d.deployable?.name || d.uuid).join(',')}`); + await Promise.all( deploys.map(async (deploy) => { - const startTime = Date.now(); - const runUUID = deploy.runUUID ?? nanoid(); - const deployService = new DeployService(); - - try { - const useNative = await shouldUseNativeHelm(deploy); - const method = useNative ? 'Native Helm' : 'Codefresh Helm'; - - logger.info(`[DEPLOY ${deploy.uuid}] Using ${method} deployment`); - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOYING, - statusMessage: `Deploying via ${method}`, - }, - runUUID - ); - - if (useNative) { - await deployNativeHelm(deploy); - } else { - await deployCodefreshHelm(deploy, deployService, runUUID); - } - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.READY, - statusMessage: `Successfully deployed via ${method}`, - }, - runUUID - ); - - await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime); - } catch (error) { - await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message); - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOY_FAILED, - statusMessage: `Helm deployment failed: ${error.message}`, - }, - runUUID - ); - - throw error; - } + return withSpan( + 'lifecycle.helm.deploy', + async () => { + updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }); + const startTime = Date.now(); + const runUUID = deploy.runUUID ?? nanoid(); + const deployService = new DeployService(); + + try { + const useNative = await shouldUseNativeHelm(deploy); + const method = useNative ? 'Native Helm' : 'Codefresh Helm'; + + getLogger().debug(`Using ${method}`); + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOYING, + statusMessage: `Deploying via ${method}`, + }, + runUUID + ); + + if (useNative) { + await deployNativeHelm(deploy); + } else { + await deployCodefreshHelm(deploy, deployService, runUUID); + } + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.READY, + statusMessage: `Successfully deployed via ${method}`, + }, + runUUID + ); + + await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime); + } catch (error) { + await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message); + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOY_FAILED, + statusMessage: `Helm deployment failed: ${error.message}`, + }, + runUUID + ); + + throw error; + } + }, + { resource: deploy.uuid, tags: { 'deploy.uuid': deploy.uuid } } + ); }) ); } diff --git a/src/server/lib/nativeHelm/utils.ts b/src/server/lib/nativeHelm/utils.ts index 5791320..cfbd61e 100644 --- a/src/server/lib/nativeHelm/utils.ts +++ b/src/server/lib/nativeHelm/utils.ts @@ -26,14 +26,10 @@ import { setupDeployServiceAccountInNamespace, } from 'server/lib/kubernetes/rbac'; import { HelmConfigBuilder } from 'server/lib/config/ConfigBuilder'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { shellPromise } from 'server/lib/shell'; import { normalizeKubernetesLabelValue } from 'server/lib/kubernetes/utils'; -const logger = rootLogger.child({ - filename: 'lib/nativeHelm/utils.ts', -}); - export interface HelmReleaseState { status: 'deployed' | 'pending-install' | 'pending-upgrade' | 'pending-rollback' | 'failed' | 'unknown'; revision: number; @@ -54,7 +50,7 @@ export async function getHelmReleaseStatus(releaseName: string, namespace: strin if (error.message?.includes('release: not found')) { return null; } - logger.warn(`[HELM] Failed to get status for release ${releaseName}: ${error.message}`); + getLogger().warn({ error }, `Failed to get status for release: releaseName=${releaseName}`); return null; } } @@ -67,14 +63,14 @@ export async function isReleaseBlocked(releaseState: HelmReleaseState | null): P } export async function uninstallHelmRelease(releaseName: string, namespace: string): Promise { - logger.info(`[HELM] Uninstalling release ${releaseName} in namespace ${namespace}`); + getLogger().debug(`Helm: uninstalling release namespace=${namespace}`); try { await shellPromise(`helm uninstall ${releaseName} -n ${namespace} --wait --timeout 5m`); - logger.info(`[HELM] Successfully uninstalled release ${releaseName}`); + getLogger().debug('Helm: release uninstalled'); } catch (error) { if (error.message?.includes('release: not found')) { - logger.info(`[HELM] Release ${releaseName} not found, nothing to uninstall`); + getLogger().debug('Helm: release not found, skipping uninstall'); return; } throw error; @@ -82,7 +78,8 @@ export async function uninstallHelmRelease(releaseName: string, namespace: strin } export async function killHelmJobsAndPods(releaseName: string, namespace: string): Promise { - logger.info(`[HELM ${releaseName}] Checking for existing helm jobs`); + const log = getLogger(); + log.debug('Helm: checking existing jobs'); try { const existingJobs = await shellPromise( @@ -91,7 +88,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string const jobsData = JSON.parse(existingJobs); if (jobsData.items && jobsData.items.length > 0) { - logger.warn(`[HELM ${releaseName}] Found ${jobsData.items.length} existing job(s), terminating`); + log.warn(`Found ${jobsData.items.length} existing job(s), terminating`); for (const job of jobsData.items) { const jobName = job.metadata.name; @@ -104,7 +101,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string `--overwrite` ); } catch (annotateError) { - logger.warn(`[HELM ${releaseName}] Failed to annotate job ${jobName}: ${annotateError.message}`); + log.warn({ error: annotateError }, `Failed to annotate job: jobName=${jobName}`); } const podsOutput = await shellPromise(`kubectl get pods -n ${namespace} -l job-name=${jobName} -o json`); @@ -116,7 +113,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string try { await shellPromise(`kubectl delete pod ${podName} -n ${namespace} --force --grace-period=0`); } catch (podError) { - logger.warn(`[HELM ${releaseName}] Failed to delete pod ${podName}: ${podError.message}`); + log.warn({ error: podError }, `Failed to delete pod: podName=${podName}`); } } } @@ -124,17 +121,18 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string try { await shellPromise(`kubectl delete job ${jobName} -n ${namespace} --force --grace-period=0`); } catch (jobError) { - logger.warn(`[HELM ${releaseName}] Failed to delete job ${jobName}: ${jobError.message}`); + log.warn({ error: jobError }, `Failed to delete job: jobName=${jobName}`); } } } } catch (error) { - logger.warn(`[HELM ${releaseName}] Error checking for existing jobs: ${error.message}`); + log.warn({ error }, 'Error checking for existing jobs'); } } export async function resolveHelmReleaseConflicts(releaseName: string, namespace: string): Promise { - logger.info(`[HELM ${releaseName}] Resolving release conflicts`); + const log = getLogger(); + log.debug('Helm: resolving conflicts'); await killHelmJobsAndPods(releaseName, namespace); @@ -147,7 +145,7 @@ export async function resolveHelmReleaseConflicts(releaseName: string, namespace } if (await isReleaseBlocked(releaseState)) { - logger.warn(`[HELM ${releaseName}] Release blocked (${releaseState.status}), uninstalling`); + log.warn(`Release blocked: status=${releaseState.status}, uninstalling`); await uninstallHelmRelease(releaseName, namespace); @@ -177,7 +175,7 @@ export async function checkIfJobWasSuperseded(jobName: string, namespace: string return annotations === 'superseded-by-retry'; } catch (error) { - logger.debug(`Could not check job supersession status for ${jobName}: ${error.message}`); + getLogger().debug({ error }, `Could not check job supersession status: jobName=${jobName}`); return false; } } @@ -442,7 +440,7 @@ export async function setupServiceAccountInNamespace( ): Promise { await createServiceAccountUsingExistingFunction(namespace, serviceAccountName, role); await setupDeployServiceAccountInNamespace(namespace, serviceAccountName, role); - logger.info(`[RBAC] Setup complete for '${serviceAccountName}' in ${namespace}`); + getLogger().debug(`RBAC: configured serviceAccount=${serviceAccountName} namespace=${namespace}`); } export async function createNamespacedRoleAndBinding(namespace: string, serviceAccountName: string): Promise { @@ -499,8 +497,10 @@ export async function createNamespacedRoleAndBinding(namespace: string, serviceA }, }; + const log = getLogger(); + try { - logger.info(`[NS ${namespace}] Creating Role and RoleBinding for: ${serviceAccountName}`); + log.debug(`RBAC: creating role and binding namespace=${namespace} serviceAccount=${serviceAccountName}`); try { await rbacApi.readNamespacedRole(roleName, namespace); @@ -528,24 +528,24 @@ export async function createNamespacedRoleAndBinding(namespace: string, serviceA await rbacApi.readNamespacedRole(roleName, namespace); await rbacApi.readNamespacedRoleBinding(roleBindingName, namespace); } catch (verifyError) { - logger.error(`[NS ${namespace}] Failed to verify RBAC resources:`, verifyError.message); + log.error({ error: verifyError }, `Failed to verify RBAC resources: namespace=${namespace}`); } } catch (error) { - logger.warn(error); - logger.error(`[NS ${namespace}] Error creating namespace-scoped RBAC:`, { - error, - statusCode: error?.response?.statusCode, - statusMessage: error?.response?.statusMessage, - body: error?.response?.body, - serviceAccountName, - namespace, - roleName, - roleBindingName, - }); - - logger.warn( - `[NS ${namespace}] ⚠️ RBAC setup failed, helm deployment may have permission issues. Consider updating lifecycle-app service account permissions to allow Role/RoleBinding creation.` + log.warn({ error }, `Error creating namespace-scoped RBAC: namespace=${namespace}`); + log.error( + { + error, + statusCode: error?.response?.statusCode, + statusMessage: error?.response?.statusMessage, + serviceAccountName, + namespace, + roleName, + roleBindingName, + }, + `RBAC creation failed: namespace=${namespace}` ); + + log.warn(`RBAC setup failed, helm deployment may have permission issues: namespace=${namespace}`); } } diff --git a/src/server/lib/queueManager.ts b/src/server/lib/queueManager.ts index f541d53..62e6c98 100644 --- a/src/server/lib/queueManager.ts +++ b/src/server/lib/queueManager.ts @@ -16,11 +16,7 @@ import { Queue, Worker, QueueOptions, WorkerOptions, Processor } from 'bullmq'; import { Redis } from 'ioredis'; -import rootLogger from './logger'; - -const logger = rootLogger.child({ - filename: 'lib/queueManager.ts', -}); +import { getLogger } from 'server/lib/logger/index'; interface RegisteredQueue { queue: Queue; @@ -52,7 +48,7 @@ export default class QueueManager { return existing.queue; } - logger.debug(`Registering queue ${queueName}`); + getLogger().debug(`Registering queue: queueName=${queueName}`); const queue = new Queue(queueName, { connection: options.connection.duplicate ? options.connection.duplicate() : options.connection, @@ -76,7 +72,7 @@ export default class QueueManager { }; } ): Worker { - logger.debug(`Registering worker for queue ${queueName}`); + getLogger().debug(`Registering worker: queueName=${queueName}`); const workerConnection = options.connection.duplicate ? options.connection.duplicate() : options.connection; // ensure maxRetriesPerRequest is null for workers @@ -109,23 +105,23 @@ export default class QueueManager { public async emptyAndCloseAllQueues(): Promise { for (const { queue, worker } of this.registeredQueues) { if (worker) { - logger.debug(`Closing worker for queue: ${worker.name}`); + getLogger().debug(`Closing worker: queueName=${worker.name}`); try { await worker.close(); } catch (error) { - logger.warn(`⚠️ Error closing worker for queue ${worker.name}:`, error.message); + getLogger().warn({ error: error.message }, `Error closing worker: queueName=${worker.name}`); } } if (queue) { - logger.debug(`Closing queue: ${queue.name}`); + getLogger().debug(`Closing queue: queueName=${queue.name}`); try { await queue.close(); } catch (error) { - logger.warn(`⚠️ Error closing queue ${queue.name}:`, error.message); + getLogger().warn({ error: error.message }, `Error closing queue: queueName=${queue.name}`); } } } - logger.info('✅ All queues have been closed successfully.'); + getLogger().info('All queues closed successfully'); } } diff --git a/src/server/lib/redisClient.ts b/src/server/lib/redisClient.ts index 560b961..a0e71ff 100644 --- a/src/server/lib/redisClient.ts +++ b/src/server/lib/redisClient.ts @@ -17,11 +17,7 @@ import Redis from 'ioredis'; import Redlock from 'redlock'; import { REDIS_URL, APP_REDIS_HOST, APP_REDIS_PORT, APP_REDIS_PASSWORD, APP_REDIS_TLS } from 'shared/config'; -import rootLogger from './logger'; - -const logger = rootLogger.child({ - filename: 'lib/redisClient.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export class RedisClient { private static instance: RedisClient; @@ -97,9 +93,9 @@ export class RedisClient { public async close(): Promise { try { await Promise.all([this.redis.quit(), this.subscriber.quit(), this.bullConn.quit()]); - logger.info(' ✅All Redis connections closed successfully.'); + getLogger().info('All Redis connections closed successfully'); } catch (error) { - logger.warn(' ⚠️Error closing Redis connections. Forcing disconnect.', error); + getLogger().warn({ error }, 'Error closing Redis connections, forcing disconnect'); this.redis.disconnect(); this.subscriber.disconnect(); this.bullConn.disconnect(); diff --git a/src/server/lib/shell.ts b/src/server/lib/shell.ts index 0669f1a..a7b61ef 100644 --- a/src/server/lib/shell.ts +++ b/src/server/lib/shell.ts @@ -14,13 +14,9 @@ * limitations under the License. */ -import rootLogger from './logger'; +import { getLogger } from 'server/lib/logger/index'; import shell, { ExecOptions } from 'shelljs'; -const logger = rootLogger.child({ - filename: 'lib/shell.ts', -}); - interface Options extends ExecOptions { debug?: boolean; } @@ -39,7 +35,7 @@ export async function shellPromise(cmd: string, options: Options = {}): Promise< shell.exec(cmd, opts, (code, stdout, stderr) => { if (code !== 0) { if (stderr.length > 0) { - logger.debug(`Shell command failed: ${cmd} => ${stderr}`); + getLogger().debug(`Shell command failed: cmd=${cmd} stderr=${stderr}`); } const options = opts ? JSON.stringify(opts) : ''; reject( diff --git a/src/server/lib/tracer/index.ts b/src/server/lib/tracer/index.ts index 50e8b6c..2b088ac 100644 --- a/src/server/lib/tracer/index.ts +++ b/src/server/lib/tracer/index.ts @@ -51,10 +51,16 @@ export class Tracer { this.updateTags(tags); } else { this.tags = { name, ...tags }; - const span = tracer.startSpan(name, { tags: this.tags }); - tracer.scope().activate(span, () => { - span.finish(); - }); + if (typeof tracer?.startSpan === 'function') { + const span = tracer.startSpan(name, { tags: this.tags }); + if (typeof tracer?.scope === 'function') { + tracer.scope().activate(span, () => { + span.finish(); + }); + } else { + span.finish(); + } + } this.isInitialized = true; } return this; @@ -65,16 +71,19 @@ export class Tracer { } public wrap(name, fn, tags: TracerTags = {}): Function { + if (typeof tracer?.wrap !== 'function') return fn; const updatedTags = { ...this.tags, ...tags }; return tracer.wrap(name, updatedTags, fn); } public trace(name: string, fn, tags: TracerTags = {}): Function { + if (typeof tracer?.trace !== 'function') return fn; const updatedTags = { ...this.tags, ...tags }; return tracer.trace(name, updatedTags, fn); } - public startSpan(name: string, tags: TracerTags = {}): Span { + public startSpan(name: string, tags: TracerTags = {}): Span | undefined { + if (typeof tracer?.startSpan !== 'function') return undefined; const updatedTags = { ...this.tags, ...tags }; return tracer.startSpan(name, { tags: updatedTags }); } @@ -88,8 +97,7 @@ export class Tracer { const originalMethod = descriptor?.value; const profiler = Tracer.getInstance(); descriptor.value = function (...args: any[]) { - if (!profiler.isInitialized) { - logger.error(`[Tracer][Trace] Tracer not initialized`); + if (!profiler.isInitialized || typeof tracer?.trace !== 'function') { return originalMethod.apply(this, args); } const spanOptions = { tags: { ...profiler.tags, decorator: 'Trace' } }; @@ -97,7 +105,9 @@ export class Tracer { try { return originalMethod.apply(this, args); } catch (error) { - tracer.scope().active()?.setTag('error', true); + if (typeof tracer?.scope === 'function') { + tracer.scope().active()?.setTag('error', true); + } logger .child({ target, descriptor, error }) .error(`[Tracer][Trace] error decorating ${propertyKey.toString()}`); diff --git a/src/server/lib/webhook/index.ts b/src/server/lib/webhook/index.ts index 0f984dd..7ce4a62 100644 --- a/src/server/lib/webhook/index.ts +++ b/src/server/lib/webhook/index.ts @@ -22,13 +22,9 @@ import { createWebhookJob, WebhookJobConfig } from 'server/lib/kubernetes/webhoo import { shellPromise } from 'server/lib/shell'; import { waitForJobAndGetLogs } from 'server/lib/nativeBuild/utils'; import { ensureServiceAccountForJob } from 'server/lib/kubernetes/common/serviceAccount'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { nanoid } from 'nanoid'; -const logger = rootLogger.child({ - filename: 'lib/webhook/index.ts', -}); - const MANIFEST_PATH = process.env.MANIFEST_PATH || '/tmp/lifecycle/manifests'; export interface WebhookExecutionResult { @@ -102,12 +98,9 @@ export async function executeCommandWebhook( async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Promise { const executionId = nanoid(); - logger.info(`[WEBHOOK ${build.uuid}] Starting ${jobConfig.webhookType} webhook: ${jobConfig.webhookName}`, { - buildUuid: build.uuid, - webhookName: jobConfig.webhookName, - webhookType: jobConfig.webhookType, - executionId, - }); + getLogger().info( + `Starting ${jobConfig.webhookType} webhook: webhookName=${jobConfig.webhookName} executionId=${executionId}` + ); try { const job = createWebhookJob(jobConfig); @@ -121,12 +114,9 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro const jobResult = await waitForJobAndGetLogs(job.metadata.name, jobConfig.namespace, `[WEBHOOK ${build.uuid}]`); - logger.info(`[WEBHOOK ${build.uuid}] Webhook execution completed`, { - buildUuid: build.uuid, - webhookName: jobConfig.webhookName, - success: jobResult.success, - status: jobResult.status, - }); + getLogger().info( + `Webhook execution completed: webhookName=${jobConfig.webhookName} success=${jobResult.success} status=${jobResult.status}` + ); return { success: jobResult.success, @@ -136,19 +126,16 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro metadata: {}, }; } catch (error) { - logger.error(`[WEBHOOK ${build.uuid}] Webhook execution failed`, { - buildUuid: build.uuid, - webhookName: jobConfig.webhookName, - error: error.message, - }); + getLogger().error({ error }, `Webhook execution failed: webhookName=${jobConfig.webhookName}`); + const errorMessage = error instanceof Error ? error.message : String(error); return { success: false, jobName: '', - logs: error.message, + logs: errorMessage, status: 'failed', metadata: { - error: error.message, + error: errorMessage, }, }; } diff --git a/src/server/lib/yamlConfigValidator.ts b/src/server/lib/yamlConfigValidator.ts index e6748ae..a5839a4 100644 --- a/src/server/lib/yamlConfigValidator.ts +++ b/src/server/lib/yamlConfigValidator.ts @@ -19,11 +19,7 @@ import { LifecycleError } from './errors'; import JsonSchema from 'jsonschema'; import { BuildStatus, CAPACITY_TYPE, DiskAccessMode } from 'shared/constants'; import { schema_1_0_0 } from './yamlSchemas'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'models/yaml/YamlService.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export class ValidationError extends LifecycleError { constructor(msg: string, uuid: string = null, service: string = null) { @@ -89,7 +85,7 @@ export class YamlConfigValidator { throw new ValidationError('Config file is empty.'); } - logger.debug(`Validating config file with version: ${version}`); + getLogger().debug(`Validating config file with version: ${version}`); switch (version.toLowerCase()) { case '1.0.0': case 'latest': diff --git a/src/server/models/config/index.ts b/src/server/models/config/index.ts index 7cb628a..43947d2 100644 --- a/src/server/models/config/index.ts +++ b/src/server/models/config/index.ts @@ -24,11 +24,7 @@ import { } from 'server/models/config/utils'; import { LifecycleConfig, Service } from 'server/models/config/types'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'models/config/index.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export const isGithubServiceDockerConfig = (obj) => isInObj(obj, 'dockerfilePath'); export const isDockerServiceConfig = (obj) => isInObj(obj, 'dockerImage'); @@ -127,7 +123,10 @@ export const fetchLifecycleConfig = async (repositoryName: string, branchName: s const config = await fetchLifecycleConfigByRepository(repository, branchName); return config; } catch (err) { - logger.error(`Unable to fetch configuration from ${repositoryName}/${branchName}: ${err}`); + getLogger().error( + { error: err instanceof Error ? err.message : String(err) }, + `Failed to fetch config: repository=${repositoryName} branch=${branchName}` + ); } }; diff --git a/src/server/models/config/utils.ts b/src/server/models/config/utils.ts index 8558664..4fe4339 100644 --- a/src/server/models/config/utils.ts +++ b/src/server/models/config/utils.ts @@ -21,11 +21,7 @@ import { YamlConfigParser } from 'server/lib/yamlConfigParser'; import Repository from 'server/models/Repository'; import { Service } from 'server/models/yaml/types'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'models/yaml/utils.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export const isInObj = (obj, key) => (!obj ? false : key in obj); @@ -40,7 +36,7 @@ export const resolveRepository = async (repositoryFullName: string) => { const repositories = await Repository.query() .where(raw('LOWER(??)', [key]), '=', name) .catch((error) => { - logger.error(`Unable to find ${repositoryFullName} from Lifecycle Database: ${error}`); + getLogger().error({ error }, `Unable to find ${repositoryFullName} from Lifecycle Database`); return null; }); if (!repositories || repositories?.length === 0) { @@ -48,7 +44,7 @@ export const resolveRepository = async (repositoryFullName: string) => { } return repositories[0]; } catch (err) { - logger.error(`There was a problem resolving the repository ${repositoryFullName} \n Error: ${err}`); + getLogger().error({ error: err }, `Problem resolving repository ${repositoryFullName}`); } }; @@ -65,9 +61,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b const validator = new YamlConfigValidator(); const isConfigValid = validator.validate(configVersion, config); if (!isConfigValid) { - logger.error( - `YAML Config validation failed for ${name}/${branchName} using version Lifecyle Yaml version=${configVersion}` - ); + getLogger().error(`YAML Config validation failed for ${name}/${branchName} version=${configVersion}`); // TODO: This is a temporary fix to allow the UI to display the config // throw new Error( // `YAML Config validation failed for ${name}/${branchName} using version Lifecyle Yaml version=${configVersion}` @@ -75,7 +69,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b } return config; } catch (err) { - logger.error(`fetchLifecycleConfigByRepository error: ${err}`); + getLogger().error({ error: err }, `fetchLifecycleConfigByRepository failed`); return null; } }; diff --git a/src/server/models/yaml/Config.ts b/src/server/models/yaml/Config.ts index e4c8494..2f05505 100644 --- a/src/server/models/yaml/Config.ts +++ b/src/server/models/yaml/Config.ts @@ -21,11 +21,7 @@ import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValid import Repository from '../Repository'; import { Environment } from './YamlEnvironment'; import { Service, Service001 } from './YamlService'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'models/yaml/Config.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export interface LifecycleConfig { readonly version: string; @@ -70,7 +66,10 @@ export async function fetchLifecycleConfigByRepository( try { config = await new YamlConfigParser().parseYamlConfigFromBranch(repository.fullName, branchName); } catch (error) { - logger.warn(`Unable to fetch configuration from ${repository.fullName}/${branchName}: ${error}`); + getLogger({ repository: repository.fullName, branch: branchName }).warn( + { error }, + 'Unable to fetch configuration' + ); if (error instanceof EmptyFileError) { config = null; @@ -80,12 +79,12 @@ export async function fetchLifecycleConfigByRepository( } if (config != null) { - // The YAML config file could be syntax correctly but the schema could be wrong. try { new YamlConfigValidator().validate(config.version, config); } catch (error) { - logger.error( - `YAML Config validation failed for ${repository.fullName}/${branchName} using version=${config.version}: ${error}` + getLogger({ repository: repository.fullName, branch: branchName, version: config.version }).error( + { error }, + 'YAML config validation failed' ); throw new ValidationError(error); } @@ -110,9 +109,7 @@ export function getDeployingServicesByName(config: LifecycleConfig, serviceName: } } } catch (error) { - logger - .child({ error }) - .error(`There was a problem getting the service by its name while searching for ${serviceName} service`); + getLogger({ serviceName }).error({ error }, 'Failed to get service by name'); throw error; } @@ -132,9 +129,7 @@ export async function resolveRepository(repositoryFullName: string): Promise; @@ -507,13 +503,14 @@ export async function getHelmConfigFromYaml(service: Service): Promise { if (DeployTypes.HELM === getDeployType(service)) { const helmService = (service as unknown as HelmService).helm; - // First check for chart-specific configuration if (!globalConfig[helmService?.chart?.name]) { if (globalConfig?.publicChart?.block) throw new Error( `Unspported Chart: helmChart with name: ${helmService?.chart?.name} is not currently supported` ); - logger.warn(`[helmChart with name: ${helmService?.chart?.name} is not currently supported, proceed with caution`); + getLogger({ chartName: helmService?.chart?.name }).warn( + 'Helm chart not currently supported, proceed with caution' + ); } // Merge in priority order: @@ -579,13 +576,7 @@ export function getRepositoryName(service: Service): string { break; } } catch (error) { - logger.error( - `There was a problem getting the repository name for service name: ${JSON.stringify( - service, - null, - 2 - )} \n ${error}` - ); + getLogger({ serviceName: service?.name }).error({ error }, 'Failed to get repository name for service'); throw error; } diff --git a/src/server/services/__tests__/globalConfig.test.ts b/src/server/services/__tests__/globalConfig.test.ts index 4be891e..5af4d86 100644 --- a/src/server/services/__tests__/globalConfig.test.ts +++ b/src/server/services/__tests__/globalConfig.test.ts @@ -45,6 +45,7 @@ describe('GlobalConfigService', () => { beforeEach(() => { service = GlobalConfigService.getInstance(); + service.clearMemoryCache(); }); describe('getAllConfigs', () => { diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index 3f3f766..421d83e 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -15,7 +15,7 @@ */ import BaseService from './_service'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; import { Build, PullRequest, Deploy, Repository } from 'server/models'; import * as github from 'server/lib/github'; import { APP_HOST, QUEUE_NAMES } from 'shared/config'; @@ -50,10 +50,6 @@ import GlobalConfigService from './globalConfig'; import { ChartType, determineChartType } from 'server/lib/nativeHelm'; import { shouldUseNativeHelm } from 'server/lib/nativeHelm'; -const logger = rootLogger.child({ - filename: 'services/activityStream.ts', -}); - const createDeployMessage = async () => { const deployLabel = await getDeployLabel(); const disabledLabel = await getDisabledLabel(); @@ -74,29 +70,40 @@ export default class ActivityStream extends BaseService { }); processComments = async (job) => { - try { - const pullRequest: PullRequest = await this.db.models.PullRequest.findOne({ - id: job.data, - }); - await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]'); - const { build, repository } = pullRequest; - if (!build) { - logger.warn(`[BUILD] Build id not found for pull request with id: ${job.data}`); - return; + const { id, sender, correlationId, _ddTraceContext, targetGithubRepositoryId } = job.data; + + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + try { + getLogger({ stage: LogStage.COMMENT_PROCESSING }).debug(`Processing comment update for PR ${id}`); + + const pullRequest: PullRequest = await this.db.models.PullRequest.findOne({ + id, + }); + await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]'); + const { build } = pullRequest; + if (!build) { + getLogger({ stage: LogStage.COMMENT_FAILED }).warn(`Build id not found for pull request with id: ${id}`); + return; + } + + const { repository } = pullRequest; + await this.db.services.ActivityStream.updatePullRequestActivityStream( + build, + build.deploys, + pullRequest, + repository, + true, + true, + null, + false, + targetGithubRepositoryId + ); + + getLogger({ stage: LogStage.COMMENT_COMPLETE }).debug(`Comment updated for PR ${id}`); + } catch (error) { + getLogger({ stage: LogStage.COMMENT_FAILED }).error({ error }, `Error processing comment for PR ${id}`); } - await this.db.services.ActivityStream.updatePullRequestActivityStream( - build, - build.deploys, - pullRequest, - repository, - true, - true, - null, - false - ); - } catch (error) { - logger.error(`Error processing comment for PR ${job.data}:`, error); - } + }); }; /** @@ -133,11 +140,11 @@ export default class ActivityStream extends BaseService { try { if (isRedeployRequested) { - // if redeploy from comment, add to build queue and return - logger.info(`[BUILD ${buildUuid}] Redeploy triggered from comment edit`); + getLogger().info('Redeploy triggered from comment edit'); await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId, runUUID: runUuid, + ...extractContextForQueue(), }); return; } @@ -163,7 +170,7 @@ export default class ActivityStream extends BaseService { null, true ).catch((error) => { - logger.warn(`[BUILD ${buildUuid}] Failed to update the activity feed for comment edit: ${error}`); + getLogger().warn({ error }, 'Failed to update the activity feed for comment edit'); }); } } @@ -182,7 +189,7 @@ export default class ActivityStream extends BaseService { runUuid: string; }) { if (!build.id) { - logger.error(`[BUILD ${build.uuid}] No build provided to apply overrides from comment edit!`); + getLogger().error('No build provided to apply overrides from comment edit'); return; } @@ -191,7 +198,7 @@ export default class ActivityStream extends BaseService { const envOverrides = CommentHelper.parseEnvironmentOverrides(commentBody); const redeployOnPush = CommentHelper.parseRedeployOnPushes(commentBody); - logger.debug(`[BUILD ${build.uuid}] Parsed environment overrides: ${JSON.stringify(envOverrides)}`); + getLogger().debug(`Parsed environment overrides: ${JSON.stringify(envOverrides)}`); await build.$query().patch({ commentInitEnv: envOverrides, @@ -199,7 +206,7 @@ export default class ActivityStream extends BaseService { trackDefaultBranches: redeployOnPush, }); - logger.debug(`[BUILD ${build.uuid}] Service overrides: %j`, serviceOverrides); + getLogger().debug(`Service overrides: ${JSON.stringify(serviceOverrides)}`); await Promise.all(serviceOverrides.map((override) => this.patchServiceOverride(build, deploys, override))); @@ -214,21 +221,20 @@ export default class ActivityStream extends BaseService { await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId: build.id, runUUID: runUuid, + ...extractContextForQueue(), }); } } private async patchServiceOverride(build: Build, deploys: Deploy[], { active, serviceName, branchOrExternalUrl }) { - logger.debug( - `[BUILD ${build.uuid}] Patching service: ${serviceName}, active: ${active}, branch/url: ${branchOrExternalUrl}` - ); + getLogger().debug(`Patching service: ${serviceName} active=${active} branch/url=${branchOrExternalUrl}`); const deploy: Deploy = build.enableFullYaml ? deploys.find((d) => d.deployable.name === serviceName) : deploys.find((d) => d.service.name === serviceName); if (!deploy) { - logger.warn(`[BUILD ${build.uuid}] No deploy found for service: ${serviceName}`); + getLogger().warn(`No deploy found for service: ${serviceName}`); return; } @@ -246,22 +252,15 @@ export default class ActivityStream extends BaseService { active, }) .catch((error) => { - logger.error( - `[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deploy with external URL: ${error}` - ); + getLogger().error({ error }, `Failed to patch deploy for service=${serviceName} with external URL`); }); } else { - // Branch override - logger.debug( - `[BUILD ${build.uuid}] Setting branch override: ${branchOrExternalUrl} for deployable: ${deployable?.name}` - ); + getLogger().debug(`Setting branch override: ${branchOrExternalUrl} for deployable: ${deployable?.name}`); await deploy.deployable .$query() .patch({ commentBranchName: branchOrExternalUrl }) .catch((error) => { - logger.error( - `[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deployable with branch: ${error}` - ); + getLogger().error({ error }, `Failed to patch deployable for service=${serviceName} with branch`); }); await deploy @@ -274,7 +273,7 @@ export default class ActivityStream extends BaseService { active, }) .catch((error) => { - logger.error(`[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deploy with branch: ${error}`); + getLogger().error({ error }, `Failed to patch deploy for service=${serviceName} with branch`); }); } @@ -310,8 +309,7 @@ export default class ActivityStream extends BaseService { }); if (hasGithubMissionControlComment && !pullRequest?.commentId) { - const msg = `[BUILD ${build?.uuid}][activityStream][updateMissionControlComment] Status comment already exists but no mission control comment ID found!`; - logger.child({ pullRequest }).error(msg); + getLogger().error('Status comment already exists but no mission control comment ID found'); return; } @@ -331,9 +329,7 @@ export default class ActivityStream extends BaseService { const commentId = response?.data?.id; await pullRequest.$query().patch({ commentId, etag }); } catch (error) { - logger.error( - `[BUILD ${build?.uuid}] Failed to update Github mission control comment for ${fullName}/${branchName} - error: ${error}` - ); + getLogger().error({ error }, `Failed to update Github mission control comment for ${fullName}/${branchName}`); } } @@ -351,8 +347,7 @@ export default class ActivityStream extends BaseService { }); if (hasStatusComment && !commentId) { - const msg = `[BUILD ${build?.uuid}][activityStream][updateStatusComment] Status comment already exists but no status comment ID found!`; - logger.child({ pullRequest }).warn(msg); + getLogger().warn('Status comment already exists but no status comment ID found'); return; } const message = await this.generateStatusCommentForBuild(build, deploys, pullRequest); @@ -383,22 +378,21 @@ export default class ActivityStream extends BaseService { updateMissionControl: boolean, updateStatus: boolean, error: Error = null, - queue: boolean = true + queue: boolean = true, + targetGithubRepositoryId?: number ) { const buildId = build?.id; const uuid = build?.uuid; const isFullYaml = build?.enableFullYaml; const fullName = pullRequest?.fullName; const branchName = pullRequest?.branchName; - const prefix = `[BUILD ${uuid}]`; - const suffix = `for ${fullName}/${branchName}`; const isStatic = build?.isStatic ?? false; const labels = pullRequest?.labels || []; const hasStatusComment = await hasStatusCommentLabel(labels); const isDefaultStatusEnabled = await isDefaultStatusCommentsEnabled(); const isShowingStatusComment = isStatic || hasStatusComment || isDefaultStatusEnabled; if (!buildId) { - logger.error(`${prefix}[buidIdError] No build ID found ${suffix}`); + getLogger().error(`No build ID found for ${fullName}/${branchName}`); throw new Error('No build ID found for this build!'); } const resource = `build.${buildId}`; @@ -407,56 +401,70 @@ export default class ActivityStream extends BaseService { try { lock = await this.redlock.lock(resource, 9000); if (queue && !error) { - await this.commentQueue.add('comment', pullRequest.id, { - jobId: `pr-${pullRequest.id}`, - removeOnComplete: true, - removeOnFail: true, - }); + await this.commentQueue.add( + 'comment', + { id: pullRequest.id, targetGithubRepositoryId, ...extractContextForQueue() }, + { + jobId: `pr-${pullRequest.id}`, + removeOnComplete: true, + removeOnFail: true, + } + ); return; } if (updateStatus || updateMissionControl) { - await this.manageDeployments(build, deploys); + const deploysForGithubDeployment = targetGithubRepositoryId + ? deploys.filter((d) => d.githubRepositoryId === targetGithubRepositoryId) + : deploys; + + if (targetGithubRepositoryId) { + getLogger().info( + `Repo-filtered GitHub deployments: processing ${deploysForGithubDeployment.length}/${deploys.length} deploys` + ); + } + + await this.manageDeployments(build, deploysForGithubDeployment); const isControlEnabled = await isControlCommentsEnabled(); if (isControlEnabled) { await this.updateMissionControlComment(build, deploys, pullRequest, repository).catch((error) => { - logger - .child({ error }) - .warn( - `${prefix} (Full YAML: ${isFullYaml}) Unable to update ${queued} mission control comment ${suffix}` - ); + getLogger().warn( + { error }, + `Unable to update ${queued} mission control comment fullYaml=${isFullYaml} for ${fullName}/${branchName}` + ); }); } else { - logger.info(`${prefix} Mission control comments are disabled by configuration`); + getLogger().debug('Mission control comments are disabled'); } } if (updateStatus && isShowingStatusComment) { await this.updateStatusComment(build, deploys, pullRequest, repository).catch((error) => { - logger.warn( - `${prefix} (Full YAML: ${isFullYaml}) Unable to update ${queued} status comment ${suffix}: ${error}` + getLogger().warn( + { error }, + `Unable to update ${queued} status comment fullYaml=${isFullYaml} for ${fullName}/${branchName}` ); }); } } catch (error) { - logger.error(`${prefix} Failed to update the activity feed ${suffix}: ${error}`); + getLogger().error({ error }, `Failed to update the activity feed for ${fullName}/${branchName}`); } finally { if (lock) { try { await lock.unlock(); } catch (error) { - await this.forceUnlock(resource, prefix, suffix); + await this.forceUnlock(resource, uuid, fullName, branchName); } } } } - private async forceUnlock(resource: string, prefix: string, suffix: string) { + private async forceUnlock(resource: string, buildUuid: string, fullName: string, branchName: string) { try { await this.redis.del(resource); } catch (error) { - logger.child({ error }).error(`${prefix}[redlock] failed to forcefully unlock ${resource} ${suffix}`); + getLogger().error({ error }, `Failed to forcefully unlock ${resource} for ${fullName}/${branchName}`); } } @@ -518,9 +526,7 @@ export default class ActivityStream extends BaseService { break; } } else { - logger.debug( - `[BUILD ${build.uuid}] Skipping ${deploy.deployable.name} because it is an internal dependency.` - ); + getLogger().debug(`Skipping ${deploy.deployable.name} because it is an internal dependency`); } }); @@ -656,9 +662,9 @@ export default class ActivityStream extends BaseService { const isDeployedWithActiveErrors = isDeployed && hasErroringActiveDeploys; if (isDeployedWithActiveErrors) { const deployStatuses = deploys.map(({ branchName, uuid, status }) => ({ branchName, uuid, status })); - logger - .child({ deployStatuses, buildStatus }) - .info(`[BUILD ${uuid}][generateMissionControlComment] deployed build has erroring deploys`); + getLogger().info( + `Deployed build has erroring deploys: ${JSON.stringify(deployStatuses)} buildStatus=${buildStatus}` + ); metrics .increment('deployWithErrors') .event('Deploy Finished with Erroring Deploys', `${eventDetails.description} with erroring deploys`); @@ -692,9 +698,7 @@ export default class ActivityStream extends BaseService { } message += await this.editCommentForBuild(build, deploys).catch((error) => { - logger.error( - `[BUILD ${build.uuid}][generateMissionControlComment] (Full YAML Support: ${build.enableFullYaml}) Unable to generate mission control: ${error}` - ); + getLogger().error({ error }, `Unable to generate mission control fullYaml=${build.enableFullYaml}`); return ''; }); @@ -702,9 +706,7 @@ export default class ActivityStream extends BaseService { message += '\n---\n\n'; message += `## 📦 Deployments\n\n`; message += await this.environmentBlock(build).catch((error) => { - logger.error( - `[BUILD ${build.uuid}][generateMissionControlComment] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${error}` - ); + getLogger().error({ error }, `Unable to generate environment comment block fullYaml=${build.enableFullYaml}`); return ''; }); } @@ -712,21 +714,7 @@ export default class ActivityStream extends BaseService { message += `\n\nmission control ${isStaging() ? 'stg ' : ''}comment: enabled \n`; return message; } catch (error) { - logger - .child({ - error, - uuid, - branchName, - fullName, - status, - isOpen, - sha, - labels, - buildStatus, - }) - .error( - `[BUILD ${uuid}][generateMissionControlComment] Failed to generate mission control comment for ${fullName}/${branchName}` - ); + getLogger().error({ error }, `Failed to generate mission control comment for ${fullName}/${branchName}`); return message; } } @@ -818,19 +806,13 @@ export default class ActivityStream extends BaseService { message += 'We are busy building your code...\n'; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - logger - .child({ build, deploys, error }) - .error(`[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`); + getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nHere's where you can find your services after they're deployed:\n`; message += await this.environmentBlock(build).catch((error) => { - logger - .child({ build, error }) - .error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block` - ); + getLogger().error({ error }, `Unable to generate environment comment block fullYaml=${build.enableFullYaml}`); return ''; }); @@ -844,31 +826,26 @@ export default class ActivityStream extends BaseService { message += `We're deploying your code. Please stand by....\n\n`; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - logger - .child({ build, deploys, error }) - .error(`[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`); + getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nHere's where you can find your services after they're deployed:\n`; message += await this.environmentBlock(build).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}` + getLogger().error( + { error: e }, + `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` ); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}` - ); + getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); return ''; }); } else if (isReadyToDeployBuild) { message += '## 🚀 Ready to deploy\n'; message += `Your code is built. We're ready to deploy whenever you are.\n`; message += await this.deployingBlock(build).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate deployment status: ${e}` - ); + getLogger().error({ error: e }, `Unable to generate deployment status fullYaml=${build.enableFullYaml}`); return ''; }); message += await createDeployMessage(); @@ -879,23 +856,18 @@ export default class ActivityStream extends BaseService { message += `There was a problem deploying your code. Some services may have not rolled out successfully. Here are the URLs for your services:\n\n`; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - logger - .child({ build, deploys, error }) - .error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status` - ); + getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); return ''; }); message += await this.environmentBlock(build).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}` + getLogger().error( + { error: e }, + `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` ); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}` - ); + getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); return ''; }); } else if (build.status === BuildStatus.CONFIG_ERROR) { @@ -905,24 +877,19 @@ export default class ActivityStream extends BaseService { message += '## ✅ Deployed\n'; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - logger - .child({ build, deploys, error }) - .error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status` - ); + getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nWe've deployed your code. Here's where you can find your services:\n`; message += await this.environmentBlock(build).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}` + getLogger().error( + { error: e }, + `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` ); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - logger.error( - `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}` - ); + getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); return ''; }); } else { @@ -1170,7 +1137,6 @@ export default class ActivityStream extends BaseService { } private async manageDeployments(build, deploys) { - const uuid = build?.uuid; const isGithubDeployments = build?.githubDeployments; if (!isGithubDeployments) return; const isFullYaml = build?.enableFullYaml; @@ -1192,45 +1158,45 @@ export default class ActivityStream extends BaseService { ); const isDeployment = isActiveAndPublic && isDeploymentType; if (!isDeployment) { - logger.debug(`Skipping deployment ${deploy?.name}`); + getLogger().debug(`Skipping deployment ${deploy?.name}`); return; } await this.db.services.GithubService.githubDeploymentQueue - .add('deployment', { deployId, action: 'create' }, { delay: 10000, jobId: `deploy-${deployId}` }) - .catch((error) => - logger.child({ error }).warn(`[BUILD ${uuid}][manageDeployments] error with ${deployId}`) - ); + .add( + 'deployment', + { deployId, action: 'create', ...extractContextForQueue() }, + { delay: 10000, jobId: `deploy-${deployId}` } + ) + .catch((error) => getLogger().warn({ error }, `manageDeployments error with deployId=${deployId}`)); }) ); } catch (error) { - logger.child({ error }).debug(`[BUILD ${uuid}][manageDeployments] error`); + getLogger().debug({ error }, 'manageDeployments error'); } } private async purgeFastlyServiceCache(uuid: string) { try { const computeShieldServiceId = await this.fastly.getFastlyServiceId(uuid, 'compute-shield'); - logger.child({ computeShieldServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] computeShieldServiceId`); + getLogger().debug(`Fastly computeShieldServiceId=${computeShieldServiceId}`); if (computeShieldServiceId) { await this.fastly.purgeAllServiceCache(computeShieldServiceId, uuid, 'fastly'); } const optimizelyServiceId = await this.fastly.getFastlyServiceId(uuid, 'optimizely'); - logger.child({ optimizelyServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] optimizelyServiceId`); + getLogger().debug(`Fastly optimizelyServiceId=${optimizelyServiceId}`); if (optimizelyServiceId) { await this.fastly.purgeAllServiceCache(optimizelyServiceId, uuid, 'optimizely'); } const fastlyServiceId = await this.fastly.getFastlyServiceId(uuid, 'fastly'); - logger.child({ fastlyServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] fastlyServiceId`); + getLogger().debug(`Fastly fastlyServiceId=${fastlyServiceId}`); if (fastlyServiceId) { await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly'); } - logger - .child({ fastlyServiceId }) - .info(`[BUILD ${uuid}][activityStream][fastly][purgeFastlyServiceCache] success`); + getLogger().info(`Fastly purgeFastlyServiceCache success fastlyServiceId=${fastlyServiceId}`); } catch (error) { - logger.child({ error }).info(`[BUILD ${uuid}][activityStream][fastly][purgeFastlyServiceCache] error`); + getLogger().error({ error }, 'Fastly purgeFastlyServiceCache error'); } } } diff --git a/src/server/services/build.ts b/src/server/services/build.ts index d748aae..f62b3f1 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -30,7 +30,7 @@ import BaseService from './_service'; import _ from 'lodash'; import { QUEUE_NAMES } from 'shared/config'; import { LifecycleError } from 'server/lib/errors'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, extractContextForQueue, LogStage, updateLogContext } from 'server/lib/logger/index'; import { ParsingError, YamlConfigParser } from 'server/lib/yamlConfigParser'; import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValidator'; @@ -45,10 +45,6 @@ import GlobalConfigService from './globalConfig'; import { paginate, PaginationMetadata, PaginationParams } from 'server/lib/paginate'; import { getYamlFileContentFromBranch } from 'server/lib/github'; -const logger = rootLogger.child({ - filename: 'services/build.ts', -}); - const tracer = Tracer.getInstance(); tracer.initialize('build-service'); export interface IngressConfiguration { @@ -85,14 +81,14 @@ export default class BuildService extends BaseService { // Enqueue a deletion job const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][cleanupBuilds][buidIdError] No build ID found for this build!`); + getLogger().error('No build ID found for cleanup'); } - logger.info(`[BUILD ${build?.uuid}] Queuing build for deletion`); - await this.db.services.BuildService.deleteQueue.add('delete', { buildId }); + getLogger().info('Queuing build for deletion'); + await this.db.services.BuildService.deleteQueue.add('delete', { buildId, ...extractContextForQueue() }); } } } catch (e) { - logger.error(`[BUILD ${build.uuid}] Can't cleanup build: ${e}`); + getLogger().error({ error: e }, 'Cleanup build failed'); } } } @@ -256,7 +252,7 @@ export default class BuildService extends BaseService { (deploy.service.type === DeployTypes.DOCKER || deploy.service.type === DeployTypes.GITHUB) ) .map((deploy) => { - logger.debug(`${deploy.uuid}: active = ${deploy.active}`); + getLogger().debug(`Deploy active status: deployUuid=${deploy.uuid} active=${deploy.active}`); return this.ingressConfigurationForDeploy(deploy); }) ) @@ -385,7 +381,7 @@ export default class BuildService extends BaseService { const environments = await this.getEnvironmentsToBuild(environmentId, repositoryId); if (!environments.length) { - logger.debug('No matching environments'); + getLogger().debug('No matching environments'); return; } @@ -404,7 +400,7 @@ export default class BuildService extends BaseService { }); await Promise.all(promises); } catch (err) { - logger.fatal(`Failed to create and deploy build due to fatal error: ${err}`); + getLogger().fatal({ error: err }, 'Failed to create and deploy build'); } } @@ -417,17 +413,15 @@ export default class BuildService extends BaseService { await this.db.services.Webhook.upsertWebhooksWithYaml(build, build.pullRequest); } catch (error) { if (error instanceof ParsingError) { - logger.error(`[BUILD ${build.uuid}] Invalid Lifecycle Config File: ${error}`); + getLogger().error({ error }, 'Invalid Lifecycle Config File (parsing error)'); throw error; } else if (error instanceof ValidationError) { - logger.error(`[BUILD ${build.uuid}] Invalid Lifecycle Config File: ${error}`); + getLogger().error({ error }, 'Invalid Lifecycle Config File (validation error)'); throw error; } else { - // Temporary warps around the new implementation so it won't F up production if i did something stupid. - // This code has no use in production yet but will start collecting data to validate if implementation works or not. - logger.warn(`[BUILD ${build.uuid}] No worry. Nothing is bombed. Can ignore this error: ${error}`); + getLogger().warn({ error }, 'Non-critical error during YAML config import'); } } } @@ -440,6 +434,10 @@ export default class BuildService extends BaseService { try { const build = await this.findOrCreateBuild(environment, options, lifecycleConfig); + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + // After a build is susccessfully created or retrieved, // we need to create or update the deployables to be used for build and deploy. if (build && options != null) { @@ -459,8 +457,8 @@ export default class BuildService extends BaseService { } if (options.repositoryId && options.repositoryBranchName) { - logger.debug( - `[BUILD ${build.uuid}] Setting up default build services for repositoryID:${options.repositoryId} branch:${options.repositoryBranchName}` + getLogger().debug( + `Setting up default build services: repositoryId=${options.repositoryId} branch=${options.repositoryBranchName}` ); await this.setupDefaultBuildServiceOverrides( @@ -492,7 +490,7 @@ export default class BuildService extends BaseService { throw new Error('Missing build or deployment options from environment.'); } } catch (error) { - logger.fatal(`Failed to create build and deploys due to fatal error: ${error}`); + getLogger().fatal({ error }, 'Failed to create build and deploys'); } } @@ -508,6 +506,10 @@ export default class BuildService extends BaseService { const runUUID = nanoid(); /* We now own the build for as long as we see this UUID */ const uuid = build?.uuid; + + if (uuid) { + updateLogContext({ buildUuid: uuid }); + } const pullRequest = build?.pullRequest; const fullName = pullRequest?.fullName; const branchName = pullRequest?.branchName; @@ -522,10 +524,10 @@ export default class BuildService extends BaseService { if (!latestCommit) { latestCommit = await github.getSHAForBranch(branchName, owner, name); } - const deploys = await this.db.services.Deploy.findOrCreateDeploys(environment, build); + const deploys = await this.db.services.Deploy.findOrCreateDeploys(environment, build, githubRepositoryId); build?.$setRelated('deploys', deploys); await build?.$fetchGraph('pullRequest'); - await new BuildEnvironmentVariables(this.db).resolve(build); + await new BuildEnvironmentVariables(this.db).resolve(build, githubRepositoryId); await this.markConfigurationsAsBuilt(build); await this.updateStatusAndComment(build, BuildStatus.BUILDING, runUUID, true, true); const pullRequest = build?.pullRequest; @@ -537,8 +539,7 @@ export default class BuildService extends BaseService { dependencyGraph, }); } catch (error) { - // do nothing - logger.warn(`Unable to generate dependecy graph for ${build.uuid}`, error); + getLogger().warn({ error }, 'Unable to generate dependency graph'); } // Build Docker Images & Deploy CLI Based Infra At the Same Time @@ -546,7 +547,7 @@ export default class BuildService extends BaseService { this.buildImages(build, githubRepositoryId), this.deployCLIServices(build, githubRepositoryId), ]); - logger.debug(`[BUILD ${uuid}] Build results: buildImages=${results[0]}, deployCLIServices=${results[1]}`); + getLogger().debug(`Build results: buildImages=${results[0]} deployCLIServices=${results[1]}`); const success = _.every(results); /* Verify that all deploys are successfully built that are active */ if (success) { @@ -567,15 +568,13 @@ export default class BuildService extends BaseService { } } } else { - // If it's in an error state, then update the build to an error state, - // update the activity feed, and return. - logger.warn( - `[BUILD ${uuid}][resolveAndDeployBuild] Build is in an errored state. Not commencing with rollout for ${fullName}/${branchName}:${latestCommit}` + getLogger().warn( + `Build in errored state, not commencing rollout: fullName=${fullName} branchName=${branchName} latestCommit=${latestCommit}` ); await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true); } } catch (error) { - logger.child({ error }).error(`[BUILD ${uuid}][resolveAndDeployBuild][ERROR] Failed to deploy build: ${error}`); + getLogger().error({ error }, 'Failed to deploy build'); await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true, error); } @@ -620,7 +619,7 @@ export default class BuildService extends BaseService { githubDeployments, namespace: `env-${uuid}`, })); - logger.info(`[BUILD ${build.uuid}] Created build for pull request branch: ${options.repositoryBranchName}`); + getLogger().info(`Created build for pull request: branch=${options.repositoryBranchName}`); return build; } @@ -661,13 +660,11 @@ export default class BuildService extends BaseService { ): Promise { const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][createBuildServiceOverride][buidIdError] No build ID found for this build!`); + getLogger().error('No build ID found for createBuildServiceOverride'); } const serviceId = service?.id; if (!serviceId) { - logger.error( - `[BUILD ${build?.uuid}][createBuildServiceOverride][serviceIdError] No service ID found for this service!` - ); + getLogger().error('No service ID found for createBuildServiceOverride'); } const buildServiceOverride = (await this.db.models.BuildServiceOverride.findOne({ @@ -689,13 +686,17 @@ export default class BuildService extends BaseService { await build.reload(); await build?.$fetchGraph('[services, deploys.[service, build]]'); - logger.debug(`[DELETE ${build?.uuid}] Triggering cleanup`); + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + + getLogger().debug('Triggering cleanup'); await this.updateStatusAndComment(build, BuildStatus.TEARING_DOWN, build.runUUID, true, true).catch((error) => { - logger.warn(`[BUILD: ${build.uuid}] Failed to update status to ${BuildStatus.TEARING_DOWN}: ${error}`); + getLogger().warn({ error }, `Failed to update status to ${BuildStatus.TEARING_DOWN}`); }); await Promise.all([k8s.deleteBuild(build), cli.deleteBuild(build), uninstallHelmReleases(build)]).catch( - (error) => logger.child({ build, error }).error(`[DELETE ${build?.uuid}] Failed to cleanup build`) + (error) => getLogger().error({ error }, 'Failed to cleanup build') ); await Promise.all( @@ -705,6 +706,7 @@ export default class BuildService extends BaseService { await this.db.services.GithubService.githubDeploymentQueue.add('deployment', { deployId: deploy.id, action: 'delete', + ...extractContextForQueue(), }); }) ); @@ -712,15 +714,14 @@ export default class BuildService extends BaseService { await k8s.deleteNamespace(build.namespace); await this.db.services.Ingress.ingressCleanupQueue.add('cleanup', { buildId: build.id, + ...extractContextForQueue(), }); - logger.info(`[DELETE ${build?.uuid}] Deleted build`); + getLogger().info('Deleted build'); await this.updateStatusAndComment(build, BuildStatus.TORN_DOWN, build.runUUID, true, true).catch((error) => { - logger.warn(`[BUILD: ${build.uuid}] Failed to update status to ${BuildStatus.TORN_DOWN}: ${error}`); + getLogger().warn({ error }, `Failed to update status to ${BuildStatus.TORN_DOWN}`); }); } catch (e) { - logger.error( - `[DELETE ${build.uuid}] Error deleting build: ${e instanceof LifecycleError ? e.getMessage() : e}` - ); + getLogger().error({ error: e instanceof LifecycleError ? e.getMessage() : e }, 'Error deleting build'); } } } @@ -766,7 +767,7 @@ export default class BuildService extends BaseService { dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href); } } catch (err) { - logger.error(`[BUILD ${build.uuid}] Unable to get Fastly dashboard URL: ${err}`); + getLogger().error({ error: err }, 'Unable to get Fastly dashboard URL'); } } await build.$query().patch({ dashboardLinks }); @@ -780,15 +781,13 @@ export default class BuildService extends BaseService { updateStatus, error ).catch((e) => { - logger.error(`[BUILD ${build.uuid}] Unable to update pull request activity stream: ${e}`); + getLogger().error({ error: e }, 'Unable to update pull request activity stream'); }); } } finally { - // Even S**T happen, we still try to fire the LC webhooks no matter what - // Pull webhooks for this environment, and run them - logger.debug(`[BUILD ${build.uuid}] Build status changed to ${build.status}.`); + getLogger().debug(`Build status changed: status=${build.status}`); - await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id }); + await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id, ...extractContextForQueue() }); } } @@ -813,9 +812,9 @@ export default class BuildService extends BaseService { await deploy.$query().patch({ status: DeployStatus.BUILT }); } const configUUIDs = configDeploys.map((deploy) => deploy?.uuid).join(','); - logger.info(`[BUILD ${build.uuid}] Updated configuration type deploy ${configUUIDs} as built`); + getLogger().info(`Config deploys marked built: ${configUUIDs}`); } catch (error) { - logger.error(`[BUILD ${build.uuid}] Failed to update configuration type deploy as built: ${error}`); + getLogger().error({ error }, 'Failed to update configuration type deploy as built'); } } @@ -828,7 +827,7 @@ export default class BuildService extends BaseService { }); const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][deployCLIServices][buidIdError] No build ID found for this build!`); + getLogger().error('No build ID found for deployCLIServices'); } const deploys = await Deploy.query() .where({ buildId, ...(githubRepositoryId ? { githubRepositoryId } : {}) }) @@ -842,17 +841,14 @@ export default class BuildService extends BaseService { .filter((d) => d.active && CLIDeployTypes.has(d.deployable.type)) .map(async (deploy) => { if (!deploy) { - logger.debug( - `[BUILD ${build?.uuid}][deployCLIServices] This deploy is undefined. Deploys: %j`, - deploys - ); + getLogger().debug(`Deploy is undefined in deployCLIServices: deploysLength=${deploys.length}`); return false; } try { const result = await this.db.services.Deploy.deployCLI(deploy); return result; } catch (err) { - logger.error(`[BUILD ${build?.uuid}][DEPLOY ${deploy?.uuid}][deployCLIServices] Error: ${err}`); + getLogger().error({ error: err }, `CLI deploy failed: deployUuid=${deploy?.uuid}`); return false; } }) @@ -865,25 +861,21 @@ export default class BuildService extends BaseService { .filter((d) => d.active && CLIDeployTypes.has(d.service.type)) .map(async (deploy) => { if (deploy === undefined) { - logger.debug( - "Somehow deploy is undefined here.... That shouldn't be possible? Build deploy length is %s", - deploys.length - ); + getLogger().debug(`Deploy is undefined in deployCLIServices: deploysLength=${deploys.length}`); } const result = await this.db.services.Deploy.deployCLI(deploy).catch((error) => { - logger.error(`[${build.uuid} Build Failure: CLI Failed => ${error}`); + getLogger().error({ error }, 'CLI deploy failed'); return false; }); - if (!result) - logger.info(`[BUILD ${build?.uuid}][${deploy.uuid}][deployCLIServices] CLI deploy unsuccessful`); + if (!result) getLogger().info(`CLI deploy unsuccessful: deployUuid=${deploy.uuid}`); return result; }) ) ); } } catch (error) { - logger.error(`[${build.uuid} Build Failure: CLI Failed => ${error}`); + getLogger().error({ error }, 'CLI build failed'); return false; } } @@ -896,7 +888,7 @@ export default class BuildService extends BaseService { async buildImages(build: Build, githubRepositoryId = null): Promise { const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][buildImages][buidIdError] No build ID found for this build!`); + getLogger().error('No build ID found for buildImages'); } const deploys = await Deploy.query() @@ -919,36 +911,31 @@ export default class BuildService extends BaseService { d.deployable.type === DeployTypes.HELM) ); }); - logger.debug( - `[BUILD ${build.uuid}] Processing ${deploysToBuild.length} deploys for build: ${deploysToBuild + getLogger().debug( + `Processing deploys for build: count=${deploysToBuild.length} deployUuids=${deploysToBuild .map((d) => d.uuid) - .join(', ')}` + .join(',')}` ); const results = await Promise.all( deploysToBuild.map(async (deploy, index) => { if (deploy === undefined) { - logger.debug( - "Somehow deploy deploy is undefined here.... That shouldn't be possible? Build deploy length is %s", - build.deploys.length - ); + getLogger().debug(`Deploy is undefined in buildImages: deploysLength=${build.deploys.length}`); } await deploy.$query().patchAndFetch({ deployPipelineId: null, deployOutput: null, }); const result = await this.db.services.Deploy.buildImage(deploy, build.enableFullYaml, index); - logger.debug(`[BUILD ${build.uuid}] Deploy ${deploy.uuid} buildImage completed with result: ${result}`); + getLogger().debug(`buildImage completed: deployUuid=${deploy.uuid} result=${result}`); return result; }) ); const finalResult = _.every(results); - logger.debug( - `[BUILD ${build.uuid}] Build results for each deploy: ${results.join(', ')}, final: ${finalResult}` - ); + getLogger().debug(`Build results: results=${results.join(',')} final=${finalResult}`); return finalResult; } catch (error) { - logger.error(`[${build.uuid}] Uncaught Docker Build Error: ${error}`); + getLogger().error({ error }, 'Uncaught Docker Build Error'); return false; } } else { @@ -956,25 +943,24 @@ export default class BuildService extends BaseService { const results = await Promise.all( deploys .filter((d) => { - logger.debug(`[${d.uuid}] Check for service type for docker builds: %j`, d.service); + getLogger().debug( + `Check service type for docker builds: deployUuid=${d.uuid} serviceType=${d.service?.type}` + ); return d.active && (d.service.type === DeployTypes.DOCKER || d.service.type === DeployTypes.GITHUB); }) .map(async (deploy, index) => { if (deploy === undefined) { - logger.debug( - "Somehow deploy deploy is undefined here.... That shouldn't be possible? Build deploy length is %s", - build.deploys.length - ); + getLogger().debug(`Deploy is undefined in buildImages: deploysLength=${build.deploys.length}`); } const result = await this.db.services.Deploy.buildImage(deploy, build.enableFullYaml, index); - logger.debug(`[BUILD ${build.uuid}] Deploy ${deploy.uuid} buildImage completed with result: ${result}`); - if (!result) logger.info(`[BUILD ${build?.uuid}][${deploy.uuid}][buildImages] build image unsuccessful`); + getLogger().debug(`buildImage completed: deployUuid=${deploy.uuid} result=${result}`); + if (!result) getLogger().info(`Build image unsuccessful: deployUuid=${deploy.uuid}`); return result; }) ); return _.every(results); } catch (error) { - logger.error(`[${build.uuid}] Uncaught Docker Build Error: ${error}`); + getLogger().error({ error }, 'Uncaught Docker Build Error'); return false; } } @@ -1056,6 +1042,7 @@ export default class BuildService extends BaseService { // Queue ingress creation after all deployments await this.db.services.Ingress.ingressManifestQueue.add('manifest', { buildId, + ...extractContextForQueue(), }); // Legacy manifest generation for backwards compatibility @@ -1078,19 +1065,17 @@ export default class BuildService extends BaseService { await build.$query().patch({ manifest: legacyManifest }); } } - await this.updateDeploysImageDetails(build); + await this.updateDeploysImageDetails(build, githubRepositoryId); return true; } catch (e) { - logger.warn(`[BUILD ${build.uuid}] Some problem when deploying services to Kubernetes cluster: ${e}`); + getLogger().warn({ error: e }, 'Problem deploying services to Kubernetes cluster'); throw e; } } else { try { const buildId = build?.id; if (!buildId) { - logger.error( - `[BUILD ${build?.uuid}][generateAndApplyManifests][buidIdError] No build ID found for this build!` - ); + getLogger().error('No build ID found for generateAndApplyManifests'); } const { serviceAccount } = await GlobalConfigService.getInstance().getAllConfigs(); @@ -1120,6 +1105,7 @@ export default class BuildService extends BaseService { /* Generate the nginx manifests for this new build */ await this.db.services.Ingress.ingressManifestQueue.add('manifest', { buildId, + ...extractContextForQueue(), }); const isReady = await k8s.waitForPodReady(build); @@ -1138,12 +1124,12 @@ export default class BuildService extends BaseService { ) ) ); - await this.updateDeploysImageDetails(build); + await this.updateDeploysImageDetails(build, githubRepositoryId); } return true; } catch (e) { - logger.warn(`[BUILD ${build.uuid}] Some problem when deploying services to Kubernetes cluster: ${e}`); + getLogger().warn({ error: e }, 'Problem deploying services to Kubernetes cluster'); return false; } } @@ -1167,12 +1153,15 @@ export default class BuildService extends BaseService { return environments; } - private async updateDeploysImageDetails(build: Build) { + private async updateDeploysImageDetails(build: Build, githubRepositoryId?: number) { await build?.$fetchGraph('deploys'); + const deploys = githubRepositoryId + ? build.deploys.filter((d) => d.githubRepositoryId === githubRepositoryId) + : build.deploys; await Promise.all( - build.deploys.map((deploy) => deploy.$query().patch({ isRunningLatest: true, runningImage: deploy?.dockerImage })) + deploys.map((deploy) => deploy.$query().patch({ isRunningLatest: true, runningImage: deploy?.dockerImage })) ); - logger.debug(`[BUILD ${build.uuid}] Updated deploys with running image and latest status`); + getLogger().debug('Updated deploys with running image and latest status'); } /** @@ -1216,15 +1205,28 @@ export default class BuildService extends BaseService { * @param job the BullMQ job with the buildId */ processDeleteQueue = async (job) => { - try { - const buildId = job.data.buildId; - const build = await this.db.models.Build.query().findOne({ - id: buildId, - }); - await this.db.services.BuildService.deleteBuild(build); - } catch (error) { - logger.error(`Error processing delete queue for build ${job.data.buildId}:`, error); - } + const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => { + try { + const build = await this.db.models.Build.query().findOne({ + id: buildId, + }); + + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + + getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Deleting build'); + await this.db.services.BuildService.deleteBuild(build); + getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Build deleted'); + } catch (error) { + getLogger({ stage: LogStage.CLEANUP_FAILED }).error( + { error }, + `Error processing delete queue for build ${buildId}` + ); + } + }); }; /** @@ -1232,36 +1234,43 @@ export default class BuildService extends BaseService { * @param job the BullMQ job with the buildID */ processBuildQueue = async (job) => { - // No retry behavior - catch errors and log them - const buildId = job.data.buildId; - const githubRepositoryId = job?.data?.githubRepositoryId; - let build; - try { - build = await this.db.models.Build.query().findOne({ - id: buildId, - }); + const { buildId, githubRepositoryId, sender, correlationId, _ddTraceContext } = job.data; - await build?.$fetchGraph('[pullRequest, environment]'); - await build.pullRequest.$fetchGraph('[repository]'); + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + let build; + try { + build = await this.db.models.Build.query().findOne({ + id: buildId, + }); - await this.importYamlConfigFile(build?.environment, build); - const deploys = await this.db.services.Deploy.findOrCreateDeploys(build?.environment, build); + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } - build.$setRelated('deploys', deploys); - await build?.$fetchGraph('deploys.[service, deployable]'); + getLogger({ stage: LogStage.BUILD_STARTING }).info('Build started'); - await this.db.services.BuildService.resolveAndDeployBuild( - build, - build?.pullRequest?.deployOnUpdate, - githubRepositoryId - ); - } catch (error) { - if (error instanceof ParsingError || error instanceof ValidationError) { - this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error); - } else { - logger.fatal(`[BUILD ${build?.uuid}] Uncaught exception: ${error}`); + await build?.$fetchGraph('[pullRequest, environment]'); + await build.pullRequest.$fetchGraph('[repository]'); + + if (!githubRepositoryId) { + await this.importYamlConfigFile(build?.environment, build); + } + + await this.db.services.BuildService.resolveAndDeployBuild( + build, + build?.pullRequest?.deployOnUpdate, + githubRepositoryId + ); + + getLogger({ stage: LogStage.BUILD_COMPLETE }).info('Build completed'); + } catch (error) { + if (error instanceof ParsingError || error instanceof ValidationError) { + this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error); + } else { + getLogger({ stage: LogStage.BUILD_FAILED }).fatal({ error }, `Uncaught exception`); + } } - } + }); }; /** @@ -1271,30 +1280,44 @@ export default class BuildService extends BaseService { * @param done the Bull callback to invoke when we're done */ processResolveAndDeployBuildQueue = async (job) => { - let jobId; - let buildId: number; - try { - jobId = job?.data?.buildId; - const githubRepositoryId = job?.data?.githubRepositoryId; - if (!jobId) throw new Error('jobId is required but undefined'); - const build = await this.db.models.Build.query().findOne({ - id: jobId, - }); + const { sender, correlationId, _ddTraceContext } = job.data; - await build?.$fetchGraph('[pullRequest, environment]'); - await build.pullRequest.$fetchGraph('[repository]'); - buildId = build?.id; - if (!buildId) throw new Error('buildId is required but undefined'); + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + let jobId; + let buildId: number; + try { + jobId = job?.data?.buildId; + const githubRepositoryId = job?.data?.githubRepositoryId; + if (!jobId) throw new Error('jobId is required but undefined'); + const build = await this.db.models.Build.query().findOne({ + id: jobId, + }); - if (!build.pullRequest.deployOnUpdate) { - logger.info(`[BUILD ${build.uuid}] Pull request does not have deployOnUpdate enabled. Skipping build.`); - return; + await build?.$fetchGraph('[pullRequest, environment]'); + await build.pullRequest.$fetchGraph('[repository]'); + buildId = build?.id; + if (!buildId) throw new Error('buildId is required but undefined'); + + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + + getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build queued'); + + if (!build.pullRequest.deployOnUpdate) { + getLogger().info('Skipping: deployOnUpdate disabled'); + return; + } + // Enqueue a standard resolve build + await this.db.services.BuildService.buildQueue.add('build', { + buildId, + githubRepositoryId, + ...extractContextForQueue(), + }); + } catch (error) { + const text = `[BUILD ${buildId}][processResolveAndDeployBuildQueue] error processing buildId with the jobId, ${jobId}`; + getLogger().error({ error }, text); } - // Enqueue a standard resolve build - await this.db.services.BuildService.buildQueue.add('build', { buildId, githubRepositoryId }); - } catch (error) { - const text = `[BUILD ${buildId}][processResolveAndDeployBuildQueue] error processing buildId with the jobId, ${jobId}`; - logger.child({ error }).error(text); - } + }); }; } diff --git a/src/server/services/codefresh.ts b/src/server/services/codefresh.ts index 176024e..77d2284 100644 --- a/src/server/services/codefresh.ts +++ b/src/server/services/codefresh.ts @@ -17,15 +17,12 @@ import BaseService from './_service'; import * as YamlService from 'server/models/yaml'; import { triggerPipeline } from 'server/lib/codefresh'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ - filename: 'services/codefresh.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export default class CodefreshService extends BaseService { async triggerYamlConfigWebhookPipeline(webhook: YamlService.Webhook, data: Record): Promise { let buildId: string; + const buildUuid = data?.buildUUID; if ( webhook.state !== undefined && webhook.type !== undefined && @@ -34,11 +31,11 @@ export default class CodefreshService extends BaseService { ) { buildId = await triggerPipeline(webhook.pipelineId, webhook.trigger, data); } else { - logger - .child({ webhook }) - .error( - `[WEBHOOK ${webhook.name ?? ''} ${webhook.pipelineId}/${webhook.trigger}] Invalid webhook configuration.` - ); + getLogger({ buildUuid, webhook }).error( + `Invalid webhook configuration: name=${webhook.name ?? ''} pipelineId=${webhook.pipelineId} trigger=${ + webhook.trigger + }` + ); } return buildId; } diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts index 17ffca5..9b37116 100644 --- a/src/server/services/deploy.ts +++ b/src/server/services/deploy.ts @@ -17,7 +17,7 @@ import BaseService from './_service'; import { Environment, Build, Service, Deploy, Deployable } from 'server/models'; import * as codefresh from 'server/lib/codefresh'; -import rootLogger from 'server/lib/logger'; +import { getLogger, updateLogContext } from 'server/lib/logger/index'; import hash from 'object-hash'; import { DeployStatus, DeployTypes } from 'shared/constants'; import * as cli from 'server/lib/cli'; @@ -38,10 +38,6 @@ import { buildWithNative } from 'server/lib/nativeBuild'; import { constructEcrTag } from 'server/lib/codefresh/utils'; import { ChartType, determineChartType } from 'server/lib/nativeHelm'; -const logger = rootLogger.child({ - filename: 'services/deploy.ts', -}); - export interface DeployOptions { ownerId?: number; repositoryId?: string; @@ -66,8 +62,9 @@ export default class DeployService extends BaseService { * Creates all of the relevant deploys for a build, based on the provided environment, if they do not already exist. * @param environment the environment to use as a the template for these deploys * @param build the build these deploys will be associated with + * @param githubRepositoryId optional filter to only update SHA for deploys from this repo */ - async findOrCreateDeploys(environment: Environment, build: Build): Promise { + async findOrCreateDeploys(environment: Environment, build: Build, githubRepositoryId?: number): Promise { await build?.$fetchGraph('[deployables.[repository]]'); const { deployables } = build; @@ -76,41 +73,48 @@ export default class DeployService extends BaseService { // // With full yaml enable. Creating deploys from deployables instead of services. This will include YAML only config. // + const { kedaScaleToZero: defaultKedaScaleToZero } = await GlobalConfigService.getInstance().getAllConfigs(); + + const buildId = build?.id; + if (!buildId) { + getLogger().error('findOrCreateDeploys: No build ID found for this build'); + return []; + } + + const existingDeploys = await this.db.models.Deploy.query().where({ buildId }).withGraphFetched('deployable'); + const existingDeployMap = new Map(existingDeploys.map((d) => [d.deployableId, d])); + await Promise.all( deployables.map(async (deployable) => { const uuid = `${deployable.name}-${build?.uuid}`; - const buildId = build?.id; - if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`); - return; - } + const patchFields: Objection.PartialModelObject = {}; + const isTargetRepo = !githubRepositoryId || deployable.repositoryId === githubRepositoryId; - let deploy = await this.db.models.Deploy.findOne({ - deployableId: deployable.id, - buildId, - }).catch((error) => { - logger.warn(`[BUILD ${build?.uuid}] [Service ${deployable.id}] ${error}`); - return null; - }); + let deploy = existingDeployMap.get(deployable.id) ?? null; + if (!deploy) { + deploy = await this.db.models.Deploy.findOne({ + deployableId: deployable.id, + buildId, + }).catch((error) => { + getLogger().warn({ error, serviceId: deployable.id }, 'Failed to find deploy'); + return null; + }); + if (deploy) { + getLogger().warn(`Deploy not in batch result but found via fallback: deployableId=${deployable.id}`); + } + } if (deploy != null) { - await deploy.$fetchGraph('deployable'); - - // If deploy is already exists (re-deployment) - await deploy.$query().patch({ - deployableId: deployable?.id ?? null, - publicUrl: this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable), - internalHostname: uuid, - uuid, - branchName: deployable.commentBranchName ?? deployable.branchName, - tag: deployable.defaultTag, - }); - } else { - const buildId = build?.id; - if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`); + if (!isTargetRepo) { + return; } - // Create deploy object if this is new deployment + patchFields.deployableId = deployable?.id ?? null; + patchFields.publicUrl = this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable); + patchFields.internalHostname = uuid; + patchFields.uuid = uuid; + patchFields.branchName = deployable.commentBranchName ?? deployable.branchName; + patchFields.tag = deployable.defaultTag; + } else { deploy = await this.db.models.Deploy.create({ buildId, serviceId: deployable.serviceId, @@ -121,46 +125,34 @@ export default class DeployService extends BaseService { active: deployable.active, }); - await deploy.$fetchGraph('deployable'); - - await deploy.$query().patch({ - branchName: deployable.branchName, - tag: deployable.defaultTag, - publicUrl: this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable), - }); + patchFields.branchName = deployable.branchName; + patchFields.tag = deployable.defaultTag; + patchFields.publicUrl = this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable); deploy.$setRelated('deployable', deployable); deploy.$setRelated('build', build); } - // only set sha for deploys where needed - if ([DeployTypes.HELM, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(deployable.type)) { + if (isTargetRepo && [DeployTypes.HELM, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(deployable.type)) { try { const sha = await getShaForDeploy(deploy); - await deploy.$query().patch({ - sha, - }); + patchFields.sha = sha; } catch (error) { - logger.debug(`[DEPLOY ${deploy.uuid}] Unable to get SHA, continuing: ${error}`); + getLogger().debug({ error }, 'Unable to get SHA, continuing'); } } - const { kedaScaleToZero: defaultKedaScaleToZero } = await GlobalConfigService.getInstance().getAllConfigs(); - - const kedaScaleToZero = + patchFields.kedaScaleToZero = deployable?.kedaScaleToZero?.type === 'http' && defaultKedaScaleToZero?.enabled - ? { - ...defaultKedaScaleToZero, - ...deployable.kedaScaleToZero, - } + ? { ...defaultKedaScaleToZero, ...deployable.kedaScaleToZero } : null; - await deploy.$query().patch({ kedaScaleToZero }); + await deploy.$query().patch(patchFields); }) ).catch((error) => { - logger.error(`[BUILD ${build?.uuid}] Failed to create deploys from deployables: ${error}`); + getLogger().error({ error }, 'Failed to create deploys from deployables'); }); - logger.info(`[BUILD ${build?.uuid}] Deploys created(or exists already) for deployables with YAML config`); + getLogger().info('Deploys initialized'); } else { const serviceInitFunc = async (service: Service, active: boolean): Promise => { const newDeploys: Deploy[] = []; @@ -188,9 +180,7 @@ export default class DeployService extends BaseService { ); }) ); - logger.info( - `[BUILD ${build?.uuid}] Created ${newDeploys.length} deploys from services table for non-YAML config` - ); + getLogger().info(`Deploys created: count=${newDeploys.length}`); return newDeploys; }; @@ -199,20 +189,21 @@ export default class DeployService extends BaseService { environment.defaultServices.map((service) => serviceInitFunc(service, true)), environment.optionalServices.map((service) => serviceInitFunc(service, false)), ]).catch((error) => { - logger.error(`[BUILD ${build?.uuid}] Something is wrong when trying to create/update deploys: ${error}`); + getLogger().error({ error }, 'Failed to create/update deploys'); }); } const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`); + getLogger().error('findOrCreateDeploy: No build ID found for this build'); } await this.db.models.Deploy.query().where({ buildId }); await build?.$fetchGraph('deploys'); if (build?.deployables?.length !== build?.deploys?.length) { - logger.warn( - `[BUILD ${build?.uuid} (${buildId})] No worry. Nothing critical yet: Deployables count (${build.deployables.length}) mismatch with Deploys count (${build.deploys.length}).` + getLogger().warn( + { buildId, deployablesCount: build.deployables.length, deploysCount: build.deploys.length }, + 'Deployables count mismatch with Deploys count' ); } @@ -231,18 +222,18 @@ export default class DeployService extends BaseService { const uuid = `${service.name}-${build?.uuid}`; const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`); + getLogger().error('findOrCreateDeploy: No build ID found for this build'); } const serviceId = service?.id; if (!serviceId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][serviceIdError] No service ID found for this service!`); + getLogger().error('findOrCreateDeploy: No service ID found for this service'); } // Deployable should be find at this point; otherwise, something is very wrong. const deployable: Deployable = await this.db.models.Deployable.query() .findOne({ buildId, serviceId }) .catch((error) => { - logger.error(`[BUILD ${build.uuid}] [Service ${serviceId}] ${error}`); + getLogger().error({ error, serviceId }, 'Failed to find deployable'); return null; }); @@ -250,7 +241,7 @@ export default class DeployService extends BaseService { serviceId, buildId, }).catch((error) => { - logger.warn(`[BUILD ${build?.uuid}] [Service ${serviceId}] ${error}`); + getLogger().warn({ error, serviceId }, 'Failed to find deploy'); return null; }); if (deploy != null) { @@ -265,13 +256,11 @@ export default class DeployService extends BaseService { } else { const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`); + getLogger().error('findOrCreateDeploy: No build ID found for this build'); } const serviceId = service?.id; if (!serviceId) { - logger.error( - `[BUILD ${build?.uuid}][findOrCreateDeploy][serviceIdError] No service ID found for this service!` - ); + getLogger().error('findOrCreateDeploy: No service ID found for this service'); } // Create deploy object if this is new deployment deploy = await this.db.models.Deploy.create({ @@ -286,7 +275,7 @@ export default class DeployService extends BaseService { await build?.$fetchGraph('[buildServiceOverrides]'); const override = build.buildServiceOverrides.find((bso) => bso.serviceId === serviceId); - logger.debug(`[BUILD ${build.uuid}] Override: ${override}`); + getLogger().debug({ override: override ? JSON.stringify(override) : null }, 'Service override found'); /* Default to the service branch name */ let resolvedBranchName = service.branchName; /* If the deploy already has a branch name set, use that */ @@ -375,19 +364,21 @@ export default class DeployService extends BaseService { } return null; } catch (error) { - logger.debug(`Error checking for existing Aurora database: ${error}`); + getLogger().debug({ error }, 'Error checking for existing Aurora database'); return null; } } async deployAurora(deploy: Deploy): Promise { + updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); try { // For now, we're just going to shell out and run the deploy await deploy.reload(); await deploy.$fetchGraph('[build, deployable]'); + updateLogContext({ serviceName: deploy.deployable?.name }); if (!deploy.deployable) { - logger.error(`[DEPLOY ${deploy?.uuid}] Missing deployable for Aurora restore`); + getLogger().error('Missing deployable for Aurora restore'); return false; } @@ -399,7 +390,7 @@ export default class DeployService extends BaseService { * Both statuses indicate the Aurora database already exists and should not be recreated */ if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) { - logger.info(`[DEPLOY ${deploy?.uuid}] Aurora restore already built (status: ${deploy.status})`); + getLogger().info(`Aurora: already built, skipping`); return true; } @@ -407,9 +398,7 @@ export default class DeployService extends BaseService { // This handles both: status is BUILT/READY but cname missing, OR first-time deploy const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); if (existingDbEndpoint) { - logger.info( - `[DEPLOY ${deploy?.uuid}] Aurora database already exists with endpoint ${existingDbEndpoint}, skipping creation` - ); + getLogger().info(`Aurora: exists, skipping`); await deploy.$query().patch({ cname: existingDbEndpoint, status: DeployStatus.BUILT, @@ -423,7 +412,7 @@ export default class DeployService extends BaseService { buildLogs: uuid, runUUID: nanoid(), }); - logger.info(`[DEPLOY ${deploy?.uuid}] Restoring Aurora cluster for ${deploy?.uuid}`); + getLogger().info('Aurora: restoring'); await cli.cliDeploy(deploy); // After creation, find the database endpoint @@ -440,10 +429,10 @@ export default class DeployService extends BaseService { status: DeployStatus.BUILT, }); } - logger.info(`[DEPLOY ${deploy?.uuid}] Restored Aurora cluster for ${deploy?.uuid}`); + getLogger().info('Aurora: restored'); return true; } catch (e) { - logger.error(`[DEPLOY ${deploy?.uuid}] Aurora cluster restore for ${deploy?.uuid} failed with error: ${e}`); + getLogger().error({ error: e }, 'Aurora cluster restore failed'); await deploy.$query().patch({ status: DeployStatus.ERROR, }); @@ -452,6 +441,7 @@ export default class DeployService extends BaseService { } async deployCodefresh(deploy: Deploy): Promise { + updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); let result: boolean = false; // We'll use either a tag specified in the UI when creating a manual build @@ -465,19 +455,16 @@ export default class DeployService extends BaseService { await deploy.reload(); await deploy.$fetchGraph('[service.[repository], deployable.[repository], build]'); const { build, service, deployable } = deploy; + updateLogContext({ serviceName: deployable?.name || service?.name }); const { repository } = build.enableFullYaml ? deployable : service; const repo = repository?.fullName; const [owner, name] = repo?.split('/') || []; const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name).catch((error) => { - logger.warn( - `[BUILD ${build.uuid}] ${owner}/${name}/${deploy.branchName} Something could be wrong when retrieving commit SHA for ${deploy.uuid} from github: ${error}` - ); + getLogger().warn({ error, owner, name, branch: deploy.branchName }, 'Failed to retrieve commit SHA from github'); }); if (!fullSha) { - logger.warn( - `[BUILD ${build.uuid}] ${owner}/${name}/${deploy.branchName} Commit SHA for ${deploy.uuid} cannot be falsy. Check the owner, etc.` - ); + getLogger().warn({ owner, name, branch: deploy.branchName }, 'Commit SHA cannot be falsy'); result = false; } else { @@ -490,10 +477,10 @@ export default class DeployService extends BaseService { // Make sure we're in a clean state await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch( (error) => { - logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`); + getLogger().warn({ error }, 'Failed to update activity feed'); } ); - logger.info(`[BUILD ${deploy?.uuid}] Marked codefresh deploy ${deploy?.uuid} as built since no changes`); + getLogger().info('Codefresh: no changes, marked built'); result = true; } else { let buildLogs: string; @@ -508,10 +495,10 @@ export default class DeployService extends BaseService { }); codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => { - logger.error(`[BUILD ${build.uuid}] Failed to receive codefresh build id for ${deploy.uuid}: ${error}`); + getLogger().error({ error }, 'Failed to receive codefresh build id'); return null; }); - logger.info(`[DEPLOY ${deploy?.uuid}] Triggered codefresh build for ${deploy?.uuid}`); + getLogger().info('Codefresh: build triggered'); if (codefreshBuildId != null) { buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; @@ -525,16 +512,12 @@ export default class DeployService extends BaseService { }, runUUID ).catch((error) => { - logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`); + getLogger().warn({ error }, 'Failed to update activity feed'); }); - logger - .child({ url: buildLogs }) - .info(`[DEPLOY ${deploy?.uuid}] Wait for codefresh build to complete for ${deploy?.uuid}`); + getLogger().info(`Codefresh: waiting for build url=${buildLogs}`); await cli.waitForCodefresh(codefreshBuildId); const buildOutput = await getLogs(codefreshBuildId); - logger - .child({ url: buildLogs }) - .info(`[DEPLOY ${deploy?.uuid}] Codefresh build completed for ${deploy?.uuid}`); + getLogger().info('Codefresh: build completed'); await this.patchAndUpdateActivityFeed( deploy, { @@ -545,16 +528,14 @@ export default class DeployService extends BaseService { }, runUUID ).catch((error) => { - logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`); + getLogger().warn({ error }, 'Failed to update activity feed'); }); result = true; } } catch (error) { // Error'd while waiting for the pipeline to finish. This is usually due to an actual // pipeline failure or a pipeline getting terminated. - logger - .child({ url: buildLogs }) - .error(`[BUILD ${build?.uuid}] Codefresh build failed for ${deploy?.uuid}: ${error}`); + getLogger().error({ error, url: buildLogs }, 'Codefresh build failed'); await this.patchAndUpdateActivityFeed( deploy, { @@ -593,6 +574,7 @@ export default class DeployService extends BaseService { * @param deploy the deploy to build an image for */ async buildImage(deploy: Deploy, enableFullYaml: boolean, index: number): Promise { + updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); try { // We'll use either a tag specified in the UI when creating a manual build // or the default tag specified on the service @@ -603,8 +585,8 @@ export default class DeployService extends BaseService { await deploy.$fetchGraph('[service, build.[environment], deployable]'); const { service, build, deployable } = deploy; + updateLogContext({ serviceName: deployable?.name || service?.name }); const uuid = build?.uuid; - const uuidText = uuid ? `[DEPLOY ${uuid}][buildImage]:` : '[DEPLOY][buildImage]:'; if (!enableFullYaml) { await service.$fetchGraph('repository'); @@ -672,7 +654,7 @@ export default class DeployService extends BaseService { const shortSha = fullSha.substring(0, 7); - logger.debug(`${uuidText} Building docker image ${service.name} ${deploy.branchName}`); + getLogger().debug({ serviceName: service.name, branchName: deploy.branchName }, 'Building docker image'); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILDING, sha: fullSha }, runUUID); /** * @note { svc: index } ensures the hash for each image is unique per service @@ -694,10 +676,10 @@ export default class DeployService extends BaseService { (await codefresh.tagExists({ tag, ecrRepo, uuid })) && (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); - logger.debug(`${uuidText} Tags exist check for ${deploy.uuid}: ${tagsExist}`); + getLogger().debug({ tagsExist }, 'Tags exist check'); const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; if (!ecrDomain || !registry) { - logger.child({ lifecycleDefaults }).error(`[BUILD ${deploy.uuid}] Missing ECR config to build image`); + getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); return false; } @@ -748,7 +730,7 @@ export default class DeployService extends BaseService { } } } else { - logger.debug(`${uuidText} Build type not recognized: ${service.type} for deploy.`); + getLogger().debug({ type: service.type }, 'Build type not recognized'); return false; } return true; @@ -765,7 +747,7 @@ export default class DeployService extends BaseService { }, runUUID ); - logger.info(`[${deploy?.uuid}] Marked ${deploy.uuid} as BUILT since its a public docker image`); + getLogger().info('Image: public, marked built'); return true; case DeployTypes.HELM: { try { @@ -782,8 +764,9 @@ export default class DeployService extends BaseService { try { fullSha = await github.getShaForDeploy(deploy); } catch (shaError) { - logger.debug( - `[${deploy?.uuid}] Could not get SHA for PUBLIC helm chart, continuing without it: ${shaError.message}` + getLogger().debug( + { error: shaError }, + 'Could not get SHA for PUBLIC helm chart, continuing without it' ); } } @@ -799,17 +782,17 @@ export default class DeployService extends BaseService { ); return true; } catch (error) { - logger.child({ error }).warn(`[${deploy?.uuid}] Error processing Helm deployment: ${error.message}`); + getLogger().warn({ error }, 'Error processing Helm deployment'); return false; } } default: - logger.debug(`[${deploy.uuid}] Build type not recognized: ${deployable.type} for deploy.`); + getLogger().debug({ type: deployable.type }, 'Build type not recognized'); return false; } } } catch (e) { - logger.error(`[${deploy.uuid}] Uncaught error building docker image: ${e}`); + getLogger().error({ error: e }, 'Uncaught error building docker image'); return false; } } @@ -817,42 +800,42 @@ export default class DeployService extends BaseService { public async patchAndUpdateActivityFeed( deploy: Deploy, params: Objection.PartialModelObject, - runUUID: string + runUUID: string, + targetGithubRepositoryId?: number ) { let build: Build; try { const id = deploy?.id; await this.db.models.Deploy.query().where({ id, runUUID }).patch(params); if (deploy.runUUID !== runUUID) { - logger.debug( - `[DEPLOY ${deploy.uuid}] runUUID mismatch: deploy.runUUID=${deploy.runUUID}, provided runUUID=${runUUID}` - ); + getLogger().debug({ deployRunUUID: deploy.runUUID, providedRunUUID: runUUID }, 'runUUID mismatch'); return; } - await deploy.$fetchGraph('build.[deploys.[service, deployable], pullRequest.[repository]]'); + + await deploy.$fetchGraph('build.pullRequest'); build = deploy?.build; const pullRequest = build?.pullRequest; await this.db.services.ActivityStream.updatePullRequestActivityStream( build, - build?.deploys, + [], pullRequest, - pullRequest?.repository, + null, true, true, null, - false + true, + targetGithubRepositoryId ); } catch (error) { - logger.child({ error }).warn(`[BUILD ${build?.uuid}] Failed to update the activity feeds`); + getLogger().warn({ error }, 'Failed to update the activity feeds'); } } private async patchDeployWithTag({ tag, deploy, initTag, ecrDomain }) { await deploy.$fetchGraph('[build, service, deployable]'); const { build, deployable, service } = deploy; - const uuid = build?.uuid; - const uuidText = uuid ? `[DEPLOY ${uuid}][patchDeployWithTag]:` : '[DEPLOY][patchDeployWithTag]:'; + const _uuid = build?.uuid; let ecrRepo = deployable?.ecr as string; const serviceName = build?.enableFullYaml ? deployable?.name : service?.name; @@ -868,7 +851,7 @@ export default class DeployService extends BaseService { initDockerImage, }) .catch((error) => { - logger.warn(`${uuidText} ${error}`); + getLogger().warn({ error }, 'patchDeployWithTag failed'); }); } @@ -906,11 +889,10 @@ export default class DeployService extends BaseService { async buildImageForHelmAndGithub(deploy: Deploy, runUUID: string) { const { build, deployable } = deploy; const uuid = build?.uuid; - const uuidText = `[BUILD ${deploy?.uuid}]:`; if (deploy.branchName === null) { // This means we're using an external host, rather than building from source. await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID); - logger.info(`${uuidText} [${deploy?.uuid}] Deploy is marked ready for external Host`); + getLogger().info('Deploy is marked ready for external Host'); } else { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID); @@ -935,9 +917,7 @@ export default class DeployService extends BaseService { // Verify we actually have a SHA from github before proceeding if (!fullSha) { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); - logger.error( - `${uuidText} Failed to retrieve SHA for ${owner}/${name}/${deploy.branchName} to build ${deploy.uuid}` - ); + getLogger().error({ owner, name, branch: deploy.branchName }, 'Failed to retrieve SHA to build'); return false; } @@ -961,7 +941,7 @@ export default class DeployService extends BaseService { const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; if (!ecrDomain || !registry) { - logger.child({ lifecycleDefaults }).error(`[BUILD ${deploy.uuid}] Missing ECR config to build image`); + getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); return false; } @@ -970,11 +950,11 @@ export default class DeployService extends BaseService { (await codefresh.tagExists({ tag, ecrRepo, uuid })) && (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); - logger.debug(`${uuidText} Tags exist check for ${deploy.uuid}: ${tagsExist}`); + getLogger().debug({ tagsExist }, 'Tags exist check'); // Check for and skip duplicates if (!tagsExist) { - logger.info(`${uuidText} Building image`); + getLogger().info('Image: building'); // if this deploy has any env vars that depend on other builds, we need to wait for those builds to finish // and update the env vars in this deploy before we can build the image @@ -1012,7 +992,7 @@ export default class DeployService extends BaseService { }; if (['buildkit', 'kaniko'].includes(deployable.builder?.engine)) { - logger.info(`${uuidText} Building image with native build (${deployable.builder.engine})`); + getLogger().info(`Image: building (${deployable.builder.engine})`); const nativeOptions = { ...buildOptions, @@ -1048,7 +1028,7 @@ export default class DeployService extends BaseService { } } - logger.info(`${uuidText} Building image with Codefresh`); + getLogger().info('Image: building (Codefresh)'); const buildPipelineId = await codefresh.buildImage(buildOptions); const buildLogs = `https://g.codefresh.io/build/${buildPipelineId}`; @@ -1060,15 +1040,15 @@ export default class DeployService extends BaseService { if (buildSuccess) { await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); - logger.child({ url: buildLogs }).info(`${uuidText} Image built successfully`); + getLogger().info('Image: built'); return true; } else { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID); - logger.child({ url: buildLogs }).warn(`${uuidText} Error building image for ${deploy?.uuid}`); + getLogger().warn({ url: buildLogs }, 'Error building image'); return false; } } else { - logger.info(`${uuidText} Image already exist for ${deploy?.uuid}`); + getLogger().info('Image: exists, skipping build'); await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID); return true; @@ -1084,13 +1064,13 @@ export default class DeployService extends BaseService { const servicesToWaitFor = extractEnvVarsWithBuildDependencies(deploy.deployable.env); for (const [serviceName, patternsInfo] of Object.entries(servicesToWaitFor)) { - const awaitingService = deploy.uuid; + const _awaitingService = deploy.uuid; const waitingForService = `${serviceName}-${build.uuid}`; const dependentDeploy = deploys.find((d) => d.uuid === waitingForService); if (dependentDeploy.uuid === waitingForService) { - logger.info(`[BUILD ${awaitingService}]: ${awaitingService} is waiting for ${waitingForService} to complete`); + getLogger().info({ waitingFor: waitingForService }, 'Waiting for service to complete'); await this.patchAndUpdateActivityFeed( deploy, @@ -1130,9 +1110,7 @@ export default class DeployService extends BaseService { // about the output of that build, we can just pass an empty string as the pattern if (!item.pattern || item.pattern.trim() === '') { extractedValues[item.envKey] = ''; - logger.info( - `[BUILD ${awaitingDeploy?.uuid}]: Empty pattern for key "${item.envKey}". Assuming build dependecy` - ); + getLogger().info({ envKey: item.envKey }, 'Empty pattern, assuming build dependency'); return; } @@ -1141,17 +1119,19 @@ export default class DeployService extends BaseService { if (match && match[0]) { extractedValues[item.envKey] = match[0]; - logger.debug( - `[BUILD ${awaitingDeploy?.uuid}]: Successfully extracted value: "${match[0]}" for key: "${item.envKey}" using pattern "${item.pattern}"` + getLogger().debug( + { value: match[0], envKey: item.envKey, pattern: item.pattern }, + 'Successfully extracted value' ); } else { - logger.info( - `[BUILD ${awaitingDeploy?.uuid}]: No match found for pattern "${item.pattern}" in ${serviceName} build pipeline with id: ${pipelineId}. Value of ${item.envKey} will be empty` + getLogger().info( + { pattern: item.pattern, serviceName, pipelineId, envKey: item.envKey }, + 'No match found for pattern, value will be empty' ); } }); } catch (error) { - logger.error(`Error processing pipeline ${pipelineId} for service ${serviceName}:`, error); + getLogger().error({ error, pipelineId, serviceName }, 'Error processing pipeline'); throw error; } } diff --git a/src/server/services/deployable.ts b/src/server/services/deployable.ts index eb8f746..c51337b 100644 --- a/src/server/services/deployable.ts +++ b/src/server/services/deployable.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import BaseService from './_service'; import { Environment, Repository, Service, PullRequest, Build, Deploy } from 'server/models'; import Deployable from 'server/models/Deployable'; @@ -24,10 +24,6 @@ import { CAPACITY_TYPE, DeployTypes } from 'shared/constants'; import { Builder, Helm, KedaScaleToZero } from 'server/models/yaml'; import GlobalConfigService from './globalConfig'; -const logger = rootLogger.child({ - filename: 'services/deployable.ts', -}); - export interface DeployableAttributes { appShort?: string; ecr?: string; @@ -211,11 +207,11 @@ export default class DeployableService extends BaseService { attributes.serviceDisksYaml = JSON.stringify(yamlServiceDisks); } } catch (error) { - logger - .child({ service, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem generating deployable attributes from the database configuration.` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to generate deployable attributes from database configuration'); throw error; } @@ -383,11 +379,11 @@ export default class DeployableService extends BaseService { }; } } catch (error) { - logger - .child({ service, deployment }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem generating deployable attributes from the yaml configuration. Error: ${error}` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to generate deployable attributes from yaml configuration'); throw error; } @@ -419,11 +415,11 @@ export default class DeployableService extends BaseService { mergedAttributes = { ...yamlAttributes }; } } catch (error) { - logger - .child({ dbAttributes, yamlAttributes, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem merging deployable attributes from the database with the yaml configuration. ${error}` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to merge deployable attributes from database with yaml configuration'); throw error; } @@ -475,11 +471,11 @@ export default class DeployableService extends BaseService { } } } catch (error) { - logger - .child({ service, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem overwriting the deployable object configuration with the yaml configuration.` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to overwrite deployable configuration with yaml configuration'); throw error; } } @@ -514,8 +510,8 @@ export default class DeployableService extends BaseService { const dependencies: Service[] = await this.db.models.Service.query().where('dependsOnServiceId', service.id); - logger.debug( - `[BUILD ${buildUUID}] ${service.name} has ${dependencies.length} database dependency(dependsOnServiceId).` + getLogger({ buildUUID, service: service.name }).debug( + `Service has ${dependencies.length} database dependency(dependsOnServiceId)` ); await Promise.all( @@ -539,11 +535,11 @@ export default class DeployableService extends BaseService { }) ); } catch (error) { - logger - .child({ service, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem creating or updating the deployable attributes from the database configuration.` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to create or update deployable attributes from database configuration'); throw error; } } @@ -570,9 +566,11 @@ export default class DeployableService extends BaseService { const dbService: Service = await Service.query() .findOne({ name: service.name }) .catch((error) => { - logger - .child({ error }) - .debug(`[BUILD ${buildUUID}] Not really an error. Just no db config for this yaml based service`); + getLogger({ + buildUUID, + service: service.name, + error, + }).debug('No database config for this yaml based service'); return null; }); @@ -592,8 +590,8 @@ export default class DeployableService extends BaseService { await build?.pullRequest?.$fetchGraph('[repository]'); repository = build?.pullRequest?.repository; if (!repository) { - logger.error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] Unable to find ${repoName} from Lifecycle database. Please verify the repository name and make sure Lifecycle Github app is installed on repository.` + getLogger({ buildUUID, service: service.name }).error( + `Unable to find ${repoName} from Lifecycle database. Verify repository name and ensure Lifecycle Github app is installed` ); } } @@ -641,11 +639,11 @@ export default class DeployableService extends BaseService { ); } } catch (error) { - logger - .child({ deployableServices, service, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem creating or updating the deployable attributes from the yaml configuration when using a services yaml configuration.` - ); + getLogger({ + buildUUID, + service: service.name, + error, + }).error('Failed to create or update deployable attributes from yaml configuration'); throw error; } } @@ -714,11 +712,11 @@ export default class DeployableService extends BaseService { build ); } catch (error) { - logger - .child({ dbEnvService, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${dbEnvService.name}] There was a problem during attribution while using the database configuration.` - ); + getLogger({ + buildUUID, + service: dbEnvService.name, + error, + }).error('Failed during attribution while using database configuration'); throw error; } }) @@ -734,11 +732,11 @@ export default class DeployableService extends BaseService { await attribution(environment.optionalServices, false); } } catch (error) { - logger - .child({ environment, error }) - .error( - `[BUILD ${buildUUID}] [ENVIRONMENT ${environment.name}] There was a problem creating or update the deployable object from the database configuration.` - ); + getLogger({ + buildUUID, + environment: environment.name, + error, + }).error('Failed to create or update deployable from database configuration'); throw error; } } @@ -777,7 +775,7 @@ export default class DeployableService extends BaseService { id: yamlEnvService.serviceId, }) .catch((error) => { - logger.child({ error }).warn(`[BUILD ${buildUUID}] error`); + getLogger({ buildUUID, error }).warn('Query error'); return null; }); @@ -797,18 +795,16 @@ export default class DeployableService extends BaseService { build ); } else { - logger.error(`[BUILD ${buildUUID}] [yamlEnvService ${yamlEnvService}]`); - logger.error(`[BUILD ${buildUUID}] [service ${service}]`); - logger.error( - `[BUILD ${buildUUID}] Service ID (${yamlEnvService.serviceId}) cannot be find in the database configuration.` + getLogger({ buildUUID, serviceId: yamlEnvService.serviceId }).error( + 'Service ID cannot be found in the database configuration' ); } } catch (error) { - logger - .child({ yamlEnvService, error }) - .error( - `[BUILD ${buildUUID}] [SERVICE ${yamlEnvService.name}] There was a problem creating or updating the deployable object from the yaml configuration when using a services ID.` - ); + getLogger({ + buildUUID, + service: yamlEnvService.name, + error, + }).error('Failed to create or update deployable from yaml configuration when using service ID'); throw error; } } else { @@ -885,32 +881,30 @@ export default class DeployableService extends BaseService { build ); } else { - logger.warn( - `[BUILD ${buildUUID}] Service Name (${yamlEnvService.name}) cannot be find in the yaml configuration. Is it referenced via the Lifecycle database?` + getLogger({ buildUUID, service: yamlEnvService.name }).warn( + 'Service cannot be found in yaml configuration. Is it referenced via the Lifecycle database?' ); } } else { - logger - .child({ repository, deploy }) - .warn( - `[BUILD ${buildUUID}][DEPLOY ${deploy?.uuid}] Unable to locate YAML config file from ${repository?.fullName}:${branchName}. Is this a database service?` - ); + getLogger({ buildUUID, deployUUID: deploy?.uuid, repository: repository?.fullName }).warn( + `Unable to locate YAML config file from ${repository?.fullName}:${branchName}. Is this a database service?` + ); } } catch (error) { - logger - .child({ error, yamlEnvService }) - .error( - `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration when using a services yaml configuration.` - ); + getLogger({ + buildUUID, + service: yamlEnvService.name, + error, + }).error('Failed to create or update deployable from yaml configuration'); throw error; } } } catch (error) { - logger - .child({ error, yamlEnvService }) - .error( - `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration.` - ); + getLogger({ + buildUUID, + service: yamlEnvService.name, + error, + }).error('Failed to create or update deployable from yaml configuration'); throw error; } }) @@ -969,14 +963,10 @@ export default class DeployableService extends BaseService { } } } else { - logger.warn(`[BUILD ${buildUUID}] Missing PR branch name.`); + getLogger({ buildUUID }).warn('Missing PR branch name'); } } catch (error) { - logger - .child({ error }) - .error( - `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration.` - ); + getLogger({ buildUUID, error }).error('Failed to create or update deployable from yaml configuration'); throw error; } } @@ -1028,15 +1018,17 @@ export default class DeployableService extends BaseService { Array.from(deployableServices.values()) ); } else { - logger.fatal(`[BUILD ${buildUUID}] Pull Request cannot be undefined`); + getLogger({ buildUUID }).fatal('Pull Request cannot be undefined'); } } catch (error) { - logger - .child({ environment, error }) - .error(`[BUILD ${buildUUID}] [ENVIRONMENT ${environment.name}] There was a problem upserting the deployables.`); + getLogger({ + buildUUID, + environment: environment.name, + error, + }).error('Failed to upsert deployables'); throw error; } - logger.info(`[BUILD ${buildUUID}] Created/Updated ${deployables.length} deployables`); + getLogger({ buildUUID }).info(`Created/Updated ${deployables.length} deployables`); return deployables; } @@ -1063,9 +1055,11 @@ export default class DeployableService extends BaseService { .where('buildId', buildId) .first() .catch((error) => { - logger - .child({ error }) - .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to search deployable`); + getLogger({ + buildUUID, + service: deployableAttr.name, + error, + }).error('Unable to search deployable'); return undefined; }); @@ -1074,15 +1068,19 @@ export default class DeployableService extends BaseService { .$query() .patch(deployableAttr as object) .catch((error) => { - logger - .child({ error }) - .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to patch deployable`); + getLogger({ + buildUUID, + service: deployableAttr.name, + error, + }).error('Unable to patch deployable'); }); } else { deployable = await this.db.models.Deployable.create(deployableAttr as object).catch((error) => { - logger - .child({ error }) - .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to create new deployable`); + getLogger({ + buildUUID, + service: deployableAttr.name, + error, + }).error('Unable to create new deployable'); return undefined; }); } diff --git a/src/server/services/environment.ts b/src/server/services/environment.ts index 7c8f9ab..167d569 100644 --- a/src/server/services/environment.ts +++ b/src/server/services/environment.ts @@ -14,14 +14,10 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import Environment from 'server/models/Environment'; import Service from './_service'; -const logger = rootLogger.child({ - filename: 'services/repository.ts', -}); - export default class EnvironmentService extends Service { /** * Retrieve a Lifecycle environment. If it doesn't exist, create a new record. @@ -48,9 +44,7 @@ export default class EnvironmentService extends Service { autoDeploy, })); } catch (error) { - logger.fatal( - `[Environment ${envName}] [UUID ${uuid != null ?? '???'}] Unable to find or create environment: ${error}` - ); + getLogger({ environment: envName, uuid, error }).fatal('Unable to find or create environment'); throw error; } diff --git a/src/server/services/github.ts b/src/server/services/github.ts index 1f29d92..56dd5fc 100644 --- a/src/server/services/github.ts +++ b/src/server/services/github.ts @@ -17,7 +17,7 @@ import { parse as fParse } from 'flatted'; import _ from 'lodash'; import Service from './_service'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; import { IssueCommentEvent, PullRequestEvent, PushEvent } from '@octokit/webhooks-types'; import { GithubPullRequestActions, @@ -35,10 +35,6 @@ import { createOrUpdateGithubDeployment, deleteGithubDeploymentAndEnvironment } import { enableKillSwitch, isStaging, hasDeployLabel } from 'server/lib/utils'; import { redisClient } from 'server/lib/dependencies'; -const logger = rootLogger.child({ - filename: 'services/github.ts', -}); - export default class GithubService extends Service { // Handle the pull request webhook mapping the entrance with webhook body async handlePullRequestHook({ @@ -60,7 +56,7 @@ export default class GithubService extends Service { labels, }, }: PullRequestEvent) { - logger.info(`[GITHUB ${fullName}/${branch}] Pull request ${action}`); + getLogger({}).info(`PR: ${action} ${fullName}/${branch}`); const isOpened = [GithubPullRequestActions.OPENED, GithubPullRequestActions.REOPENED].includes( action as GithubPullRequestActions ); @@ -79,16 +75,7 @@ export default class GithubService extends Service { isJSON: true, })) as LifecycleYamlConfigOptions; } catch (error) { - logger - .child({ - action, - status, - branch, - branchSha, - fullName, - error, - }) - .warn(`[GITHUB ${fullName}/${branch}][handlePullRequestHook] Unable to fetch lifecycle config`); + getLogger({}).warn({ error }, `Unable to fetch lifecycle config for ${fullName}/${branch}`); } } repository = await this.db.services.Repository.findRepository(ownerId, repositoryId, installationId); @@ -153,6 +140,7 @@ export default class GithubService extends Service { action: 'enable', waitForComment: true, labels: labels.map((l) => l.name), + ...extractContextForQueue(), }); } } else if (isClosed) { @@ -160,7 +148,7 @@ export default class GithubService extends Service { pullRequestId, }); if (!build) { - logger.warn(`[GITHUB ${fullName}/${branch}] No build found for closed pull request. Skipping deletion`); + getLogger({}).warn(`No build found for closed pull request ${fullName}/${branch}, skipping deletion`); return; } await this.db.services.BuildService.deleteBuild(build); @@ -170,20 +158,11 @@ export default class GithubService extends Service { action: 'disable', waitForComment: false, labels: labels.map((l) => l.name), + ...extractContextForQueue(), }); } } catch (error) { - logger - .child({ - action, - status, - pullRequest, - environment, - repository, - error, - build, - }) - .fatal(`[GITHUB ${fullName}/${branch}] Unable to handle Github pull request event: ${error}`); + getLogger().fatal({ error }, `Unable to handle Github pull request event for ${fullName}/${branch}`); } } @@ -202,16 +181,10 @@ export default class GithubService extends Service { if (!pullRequest || isBot) return; await pullRequest.$fetchGraph('[build, repository]'); - logger.info(`[GITHUB ${pullRequest.build?.uuid}] Pull request comment edited by ${commentCreatorUsername}`); + getLogger().info(`PR: comment edited by ${commentCreatorUsername}`); await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body); } catch (error) { - logger - .child({ - error, - pullRequest, - commentCreatorUsername, - }) - .error(`Unable to handle Github Issue Comment event: ${error}`); + getLogger().error({ error }, `Unable to handle Github Issue Comment event`); } }; @@ -220,7 +193,7 @@ export default class GithubService extends Service { action, pull_request: { id: githubPullRequestId, labels, state: status }, } = body; - let pullRequest: PullRequest, build: Build, repository: Repository; + let pullRequest: PullRequest, build: Build, _repository: Repository; try { // this is a hacky way to force deploy by adding a label const labelNames = labels.map(({ name }) => name.toLowerCase()) || []; @@ -238,7 +211,7 @@ export default class GithubService extends Service { await pullRequest.$fetchGraph('[build, repository]'); build = pullRequest?.build; - repository = pullRequest?.repository; + _repository = pullRequest?.repository; await this.patchPullRequest({ pullRequest, labels, @@ -246,11 +219,7 @@ export default class GithubService extends Service { status, autoDeploy: false, }); - logger.info( - `[BUILD ${build?.uuid}] Patched pull request with labels(${action}) ${ - labels.length ? `: ${labels.map(({ name }) => name).join(', ')}` : '' - }` - ); + getLogger().info(`Labels: ${action}${labels.length ? ` [${labels.map(({ name }) => name).join(', ')}]` : ''}`); if (pullRequest.deployOnUpdate === false) { // when pullRequest.deployOnUpdate is false, it means that there is no `lifecycle-deploy!` label @@ -260,22 +229,14 @@ export default class GithubService extends Service { const buildId = build?.id; if (!buildId) { - logger - .child({ build }) - .error(`[BUILD ${build?.uuid}][handleLabelWebhook][buidIdError] No build ID found for this pull request!`); + getLogger().error(`No build ID found for this pull request in handleLabelWebhook`); } await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId, + ...extractContextForQueue(), }); } catch (error) { - logger - .child({ - build, - pullRequest, - repository, - error, - }) - .error(`[BUILD ${build?.uuid}][handleLabelWebhook] Error processing label webhook`); + getLogger().error({ error }, `Error processing label webhook`); } }; @@ -284,7 +245,7 @@ export default class GithubService extends Service { const branchName = ref.split('refs/heads/')[1]; if (!branchName) return; const hasVoidCommit = [previousCommit, latestCommit].some((commit) => this.isVoidCommit(commit)); - logger.debug(`[GITHUB] Push event repo ${repoName}, branch ${branchName}`); + getLogger({}).debug(`Push event repo=${repoName} branch=${branchName}`); const models = this.db.models; try { @@ -331,7 +292,7 @@ export default class GithubService extends Service { for (const build of buildsToDeploy) { const buildId = build?.id; if (!buildId) { - logger.error(`[BUILD ${build?.uuid}][handlePushWebhook][buidIdError] No build ID found for this build!`); + getLogger().error(`No build ID found for this build in handlePushWebhook`); } // Only check for failed deploys on PR environments, not static environments let hasFailedDeploys = false; @@ -344,23 +305,22 @@ export default class GithubService extends Service { hasFailedDeploys = failedDeploys.length > 0; if (hasFailedDeploys) { - logger.info( - `[BUILD ${build?.uuid}] Detected ${failedDeploys.length} failed deploy(s). Triggering full redeploy for push on repo: ${repoName} branch: ${branchName}` - ); + getLogger().info(`Push: ${failedDeploys.length} failed deploys, full redeploy ${repoName}/${branchName}`); } } if (!hasFailedDeploys) { - logger.info(`[BUILD ${build?.uuid}] Deploying build for push on repo: ${repoName} branch: ${branchName}`); + getLogger().info(`Push: deploying ${repoName}/${branchName}`); } await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId, ...(hasFailedDeploys ? {} : { githubRepositoryId }), + ...extractContextForQueue(), }); } } catch (error) { - logger.error(`[GITHUB] Error processing push webhook: ${error}`); + getLogger({}).error({ error }, `Error processing push webhook`); } }; @@ -397,13 +357,15 @@ export default class GithubService extends Service { if (!build) return; - logger.info(`[BUILD ${build?.uuid}] Redeploying static env for push on branch`); + getLogger().info('Push: redeploying static env'); await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId: build?.id, + ...extractContextForQueue(), }); } catch (error) { - logger.error( - `[GITHUB] Error processing push webhook for static env for branch: ${branchName} at repository id: ${githubRepositoryId}.\n Error: ${error}` + getLogger({}).error( + { error }, + `Error processing push webhook for static env for branch=${branchName} repositoryId=${githubRepositoryId}` ); } }; @@ -412,7 +374,7 @@ export default class GithubService extends Service { const { body } = req; const type = req.headers['x-github-event']; - logger.debug(`***** Incoming Github Webhook: ${type} *****`); + getLogger({}).debug(`Incoming Github Webhook type=${type}`); const isVerified = github.verifyWebhookSignature(req); if (!isVerified) { @@ -424,28 +386,28 @@ export default class GithubService extends Service { try { const labelNames = body.pull_request.labels.map(({ name }) => name.toLowerCase()) || []; if (isStaging() && !labelNames.includes(FallbackLabels.DEPLOY_STG)) { - logger.debug(`[GITHUB] STAGING RUN DETECTED - Skipping processing of this event`); + getLogger({}).debug(`Staging run detected, skipping processing of this event`); return; } const hasLabelChange = [GithubWebhookTypes.LABELED, GithubWebhookTypes.UNLABELED].includes(body.action); if (hasLabelChange) return await this.handleLabelWebhook(body); else return await this.handlePullRequestHook(body); } catch (e) { - logger.error(`There is problem when handling PULL_REQUEST event: ${e}`); + getLogger({}).error({ error: e }, `Error handling PULL_REQUEST event`); throw e; } case GithubWebhookTypes.PUSH: try { return await this.handlePushWebhook(body); } catch (e) { - logger.error(`There is problem when handling PUSH event: ${e}`); + getLogger({}).error({ error: e }, `Error handling PUSH event`); throw e; } case GithubWebhookTypes.ISSUE_COMMENT: try { return await this.handleIssueCommentWebhook(body); } catch (e) { - logger.error(`There is problem when handling ISSUE_COMMENT event: ${e}`); + getLogger({}).error({ error: e }, `Error handling ISSUE_COMMENT event`); throw e; } default: @@ -462,11 +424,16 @@ export default class GithubService extends Service { }); processWebhooks = async (job) => { - try { - await this.db.services.GithubService.dispatchWebhook(fParse(job.data.message)); - } catch (error) { - logger.error(`Error processing webhook:`, error); - } + const { correlationId, sender, message, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + try { + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).debug('Webhook: processing'); + await this.db.services.GithubService.dispatchWebhook(fParse(message)); + } catch (error) { + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).fatal({ error }, 'Error processing webhook'); + } + }); }; githubDeploymentQueue = this.queueManager.registerQueue(QUEUE_NAMES.GITHUB_DEPLOYMENT, { @@ -478,26 +445,34 @@ export default class GithubService extends Service { }); processGithubDeployment = async (job) => { - // This queue has 3 attempts configured, so errors will cause retries - const { deployId, action } = job.data; - const text = `[DEPLOYMENT ${deployId}][processGithubDeployment] ${action}`; - const deploy = await this.db.models.Deploy.query().findById(deployId); - try { - switch (action) { - case 'create': { - await createOrUpdateGithubDeployment(deploy); - break; - } - case 'delete': { - await deleteGithubDeploymentAndEnvironment(deploy); - break; + const { deployId, action, sender, correlationId, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, sender, _ddTraceContext, deployUuid: String(deployId) }, async () => { + const deploy = await this.db.models.Deploy.query().findById(deployId); + try { + getLogger({ stage: LogStage.DEPLOY_STARTING }).debug(`GitHub deployment: ${action}`); + + switch (action) { + case 'create': { + await createOrUpdateGithubDeployment(deploy); + break; + } + case 'delete': { + await deleteGithubDeploymentAndEnvironment(deploy); + break; + } + default: + throw new Error(`Unknown action: ${action}`); } - default: - throw new Error(`Unknown action: ${action}`); + + getLogger({ stage: LogStage.DEPLOY_COMPLETE }).debug(`GitHub deployment: ${action} completed`); + } catch (error) { + getLogger({ stage: LogStage.DEPLOY_FAILED }).warn( + { error }, + `Error processing GitHub deployment job=${job?.id} action=${action}` + ); } - } catch (error) { - logger.child({ error }).warn(`${text} Error processing job ${job?.id} with action ${action}`); - } + }); }; private patchPullRequest = async ({ pullRequest, labels, action, status, autoDeploy = false }) => { @@ -523,15 +498,7 @@ export default class GithubService extends Service { labels: JSON.stringify(labelNames), }); } catch (error) { - logger - .child({ - error, - pullRequest, - labels, - action, - status, - }) - .error(`[BUILD][patchPullRequest] Error patching pull request for ${pullRequest?.fullName}/${branch}`); + getLogger().error({ error }, `Error patching pull request for ${pullRequest?.fullName}/${branch}`); } }; diff --git a/src/server/services/globalConfig.ts b/src/server/services/globalConfig.ts index 5140bd2..87c4c97 100644 --- a/src/server/services/globalConfig.ts +++ b/src/server/services/globalConfig.ts @@ -15,23 +15,23 @@ */ import { createAppAuth } from '@octokit/auth-app'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import BaseService from './_service'; import { GlobalConfig, LabelsConfig } from './types/globalConfig'; import { GITHUB_APP_INSTALLATION_ID, APP_AUTH, APP_ENV, QUEUE_NAMES } from 'shared/config'; import { Metrics } from 'server/lib/metrics'; import { redisClient } from 'server/lib/dependencies'; -const logger = rootLogger.child({ - filename: 'services/globalConfig.ts', -}); - const REDIS_CACHE_KEY = 'global_config'; const GITHUB_CACHED_CLIENT_TOKEN = 'github_cached_client_token'; export default class GlobalConfigService extends BaseService { private static instance: GlobalConfigService; + private memoryCache: GlobalConfig | null = null; + private memoryCacheExpiry: number = 0; + private static MEMORY_CACHE_TTL_MS = 30000; // 30 seconds + static getInstance(): GlobalConfigService { if (!this.instance) { this.instance = new GlobalConfigService(); @@ -39,6 +39,11 @@ export default class GlobalConfigService extends BaseService { return this.instance; } + clearMemoryCache(): void { + this.memoryCache = null; + this.memoryCacheExpiry = 0; + } + protected cacheRefreshQueue = this.queueManager.registerQueue(QUEUE_NAMES.GLOBAL_CONFIG_CACHE_REFRESH, { connection: redisClient.getConnection(), }); @@ -60,29 +65,46 @@ export default class GlobalConfigService extends BaseService { } /** - * Get all global configs. First, it will try to retrieve them from the cache. - * If they are not available if cache is empty, it will fetch them from the DB, cache them, and then return them. + * Get all global configs. Uses a three-tier caching strategy: + * 1. In-memory cache (30 second TTL) - fastest, eliminates Redis calls + * 2. Redis cache - shared across pods + * 3. Database - source of truth * @returns A map of all config keys values. **/ async getAllConfigs(refreshCache: boolean = false): Promise { + const now = Date.now(); + + if (!refreshCache && this.memoryCache && now < this.memoryCacheExpiry) { + return this.memoryCache; + } + const cachedConfigs = await this.redis.hgetall(REDIS_CACHE_KEY); if (Object.keys(cachedConfigs).length === 0 || refreshCache) { - logger.debug('Cache miss for all configs, fetching from DB'); + getLogger().debug('Cache miss for all configs, fetching from DB'); const configsFromDb = await this.getAllConfigsFromDb(); - // to delete keys removed from database - // this is not a common scenario that happens with global config table, but just to be safe const keysFromDb = new Set(Object.keys(configsFromDb)); const keysToRemove = Object.keys(cachedConfigs).filter((key) => !keysFromDb.has(key)); if (keysToRemove.length > 0) { await this.redis.hdel(REDIS_CACHE_KEY, ...keysToRemove); - logger.debug(`Deleted stale keys from cache: ${keysToRemove.join(', ')}`); + getLogger().debug(`Deleted stale keys from cache: keys=${keysToRemove.join(', ')}`); } await this.redis.hmset(REDIS_CACHE_KEY, configsFromDb); - return this.deserialize(configsFromDb); + const result = this.deserialize(configsFromDb); + + this.memoryCache = result; + this.memoryCacheExpiry = now + GlobalConfigService.MEMORY_CACHE_TTL_MS; + + return result; } - return this.deserialize(cachedConfigs); + + const result = this.deserialize(cachedConfigs); + + this.memoryCache = result; + this.memoryCacheExpiry = now + GlobalConfigService.MEMORY_CACHE_TTL_MS; + + return result; } /** @@ -120,7 +142,7 @@ export default class GlobalConfigService extends BaseService { if (!labels) throw new Error('Labels configuration not found in global config'); return labels; } catch (error) { - logger.error('Error retrieving labels configuration, using fallback defaults', error); + getLogger().error({ error }, 'Error retrieving labels configuration, using fallback defaults'); // Return fallback defaults on error return { deploy: ['lifecycle-deploy!'], @@ -139,7 +161,7 @@ export default class GlobalConfigService extends BaseService { try { deserializedConfigs[key as keyof GlobalConfig] = JSON.parse(value as string); } catch (e) { - logger.error(`Error deserializing config for key ${key}: ${e.message}`); + getLogger().error({ error: e }, `Error deserializing config: key=${key}`); } } return deserializedConfigs as GlobalConfig; @@ -172,7 +194,7 @@ export default class GlobalConfigService extends BaseService { try { await this.getGithubClientToken(true); } catch (error) { - logger.child({ error }).error(`Error refreshing GlobalConfig cache during boot: ${error}`); + getLogger().error({ error }, 'Error refreshing GlobalConfig cache during boot'); } } @@ -189,14 +211,19 @@ export default class GlobalConfigService extends BaseService { ); } - processCacheRefresh = async () => { - try { - await this.getAllConfigs(true); - await this.getGithubClientToken(true); - logger.debug('GlobalConfig and Github cache refreshed successfully.'); - } catch (error) { - logger.child({ error }).error('Error refreshing GlobalConfig cache'); - } + processCacheRefresh = async (job) => { + const { correlationId } = job?.data || {}; + + return withLogContext({ correlationId: correlationId || `cache-refresh-${Date.now()}` }, async () => { + try { + getLogger({ stage: LogStage.CONFIG_REFRESH }).info('Refreshing GlobalConfig and Github cache'); + await this.getAllConfigs(true); + await this.getGithubClientToken(true); + getLogger({ stage: LogStage.CONFIG_REFRESH }).debug('GlobalConfig and Github cache refreshed successfully'); + } catch (error) { + getLogger({ stage: LogStage.CONFIG_FAILED }).error({ error }, 'Error refreshing GlobalConfig cache'); + } + }); }; /** @@ -209,9 +236,9 @@ export default class GlobalConfigService extends BaseService { async setConfig(key: string, value: any): Promise { try { await this.db.knex('global_config').insert({ key, config: value }).onConflict('key').merge(); - logger.info(`Set global config value for key: ${key}`); + getLogger().info(`Set global config value: key=${key}`); } catch (err: any) { - logger.child({ err }).error(`Error setting global config value for key: ${key}`); + getLogger().error({ error: err }, `Error setting global config value: key=${key}`); throw err; } } diff --git a/src/server/services/ingress.ts b/src/server/services/ingress.ts index f6ab993..7a124f7 100644 --- a/src/server/services/ingress.ts +++ b/src/server/services/ingress.ts @@ -15,7 +15,7 @@ */ /* eslint-disable no-unused-vars */ -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import BaseService from './_service'; import fs from 'fs'; import { TMP_PATH, QUEUE_NAMES } from 'shared/config'; @@ -27,10 +27,6 @@ import GlobalConfigService from './globalConfig'; const MANIFEST_PATH = `${TMP_PATH}/ingress`; -const logger = rootLogger.child({ - filename: 'services/ingress.ts', -}); - export default class IngressService extends BaseService { async updateIngressManifest(): Promise { return true; @@ -66,47 +62,57 @@ export default class IngressService extends BaseService { * @param done the done callback */ ingressCleanupForBuild = async (job) => { - // queue has retry attempts configured, so errors will cause retries - const buildId = job.data.buildId; - // For cleanup purpose, we want to include the ingresses for all the services (active or not) to cleanup just in case. - const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, true); - const namespace = await this.db.services.BuildService.getNamespace({ id: buildId }); - try { - configurations.forEach(async (configuration) => { - await shellPromise(`kubectl delete ingress ingress-${configuration.deployUUID} --namespace ${namespace}`).catch( - (error) => { - logger.warn(`[DEPLOY ${configuration.deployUUID}] ${error}`); + const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => { + getLogger({ stage: LogStage.INGRESS_PROCESSING }).info('Ingress: cleaning up'); + + // For cleanup purpose, we want to include the ingresses for all the services (active or not) to cleanup just in case. + const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, true); + const namespace = await this.db.services.BuildService.getNamespace({ id: buildId }); + try { + configurations.forEach(async (configuration) => { + await shellPromise( + `kubectl delete ingress ingress-${configuration.deployUUID} --namespace ${namespace}` + ).catch((error) => { + getLogger({ stage: LogStage.INGRESS_PROCESSING }).warn(`${error}`); return null; + }); + }); + getLogger({ stage: LogStage.INGRESS_COMPLETE }).info('Ingress: cleaned up'); + } catch (e) { + getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error: e }, 'Error cleaning up ingress'); + } + }); + }; + + createOrUpdateIngressForBuild = async (job) => { + const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => { + getLogger({ stage: LogStage.INGRESS_PROCESSING }).info('Ingress: creating'); + + // We just want to create/update ingress for active services only + const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, false); + const namespace = await this.db.services.BuildService.getNamespace({ id: buildId }); + const { lifecycleDefaults, domainDefaults } = await GlobalConfigService.getInstance().getAllConfigs(); + const manifests = configurations.map((configuration) => { + return yaml.dump( + this.generateNginxManifestForConfiguration({ + configuration, + ingressClassName: lifecycleDefaults?.ingressClassName, + altHosts: domainDefaults?.altHttp || [], + }), + { + skipInvalid: true, } ); }); - } catch (e) { - // It's ok if this fails. - logger.warn(e); - } - }; + manifests.forEach(async (manifest, idx) => { + await this.applyManifests(manifest, `${buildId}-${idx}-nginx`, namespace); + }); - createOrUpdateIngressForBuild = async (job) => { - // queue has retry attempts configured, so errors will cause retries - const buildId = job.data.buildId; - // We just want to create/update ingress for active services only - const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, false); - const namespace = await this.db.services.BuildService.getNamespace({ id: buildId }); - const { lifecycleDefaults, domainDefaults } = await GlobalConfigService.getInstance().getAllConfigs(); - const manifests = configurations.map((configuration) => { - return yaml.dump( - this.generateNginxManifestForConfiguration({ - configuration, - ingressClassName: lifecycleDefaults?.ingressClassName, - altHosts: domainDefaults?.altHttp || [], - }), - { - skipInvalid: true, - } - ); - }); - manifests.forEach(async (manifest, idx) => { - await this.applyManifests(manifest, `${buildId}-${idx}-nginx`, namespace); + getLogger({ stage: LogStage.INGRESS_COMPLETE }).info('Ingress: created'); }); }; @@ -194,7 +200,7 @@ export default class IngressService extends BaseService { await fs.promises.writeFile(localPath, manifest, 'utf8'); await shellPromise(`kubectl apply -f ${localPath} --namespace ${namespace}`); } catch (error) { - logger.warn(error); + getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error }, 'Failed to apply ingress manifest'); } }; } diff --git a/src/server/services/label.ts b/src/server/services/label.ts index 7432d8d..7801650 100644 --- a/src/server/services/label.ts +++ b/src/server/services/label.ts @@ -18,15 +18,11 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import { waitForColumnValue } from 'shared/utils'; import { updatePullRequestLabels } from 'server/lib/github'; import { getDeployLabel } from 'server/lib/utils'; -const logger = rootLogger.child({ - filename: 'services/label.ts', -}); - interface LabelJob { pullRequestId: number; action: 'enable' | 'disable'; @@ -55,62 +51,85 @@ export default class LabelService extends Service { * Process label queue jobs */ processLabelQueue = async (job: Job) => { - const { pullRequestId, action, waitForComment, labels: currentLabels } = job.data; + const { + pullRequestId, + action, + waitForComment, + labels: currentLabels, + sender, + correlationId, + _ddTraceContext, + } = job.data as LabelJob & { sender?: string; correlationId?: string; _ddTraceContext?: Record }; - try { - const pullRequest = await this.db.models.PullRequest.query() - .findById(pullRequestId) - .withGraphFetched('[repository, build]'); + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + try { + const pullRequest = await this.db.models.PullRequest.query() + .findById(pullRequestId) + .withGraphFetched('[repository, build]'); - if (!pullRequest) { - throw new Error(`[BUILD unknown] Pull request with id ${pullRequestId} not found`); - } + if (!pullRequest) { + throw new Error(`Pull request with id ${pullRequestId} not found`); + } - const { repository, build } = pullRequest; - const buildUuid = build?.uuid || 'unknown'; - if (!repository) { - throw new Error(`[BUILD ${buildUuid}] Repository not found for pull request ${pullRequestId}`); - } + const { repository, build } = pullRequest; + const buildUuid = build?.uuid || 'unknown'; + if (!repository) { + throw new Error(`Repository not found for pull request ${pullRequestId}`); + } - if (waitForComment && !pullRequest.commentId) { - logger.debug(`[BUILD ${buildUuid}] Waiting for comment_id to be set before updating labels`); - // 60 attempts * 5 seconds = 5 minutes - const updatedPullRequest = await waitForColumnValue(pullRequest, 'commentId', 60, 5000); + getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).info( + `Processing label ${action} for PR ${pullRequest.pullRequestNumber}` + ); - if (!updatedPullRequest) { - logger.warn(`[BUILD ${buildUuid}] Timeout waiting for comment_id while updating labels after 5 minutes`); + if (waitForComment && !pullRequest.commentId) { + getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).debug( + 'Waiting for comment_id to be set before updating labels' + ); + // 60 attempts * 5 seconds = 5 minutes + const updatedPullRequest = await waitForColumnValue(pullRequest, 'commentId', 60, 5000); + + if (!updatedPullRequest) { + getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).warn( + 'Timeout waiting for comment_id while updating labels after 5 minutes' + ); + } } - } - let updatedLabels: string[]; + let updatedLabels: string[]; - const deployLabel = await getDeployLabel(); - if (action === 'enable') { - if (!currentLabels.includes(deployLabel)) { - updatedLabels = [...currentLabels, deployLabel]; + const deployLabel = await getDeployLabel(); + if (action === 'enable') { + if (!currentLabels.includes(deployLabel)) { + updatedLabels = [...currentLabels, deployLabel]; + } else { + getLogger({ stage: LogStage.LABEL_COMPLETE, buildUuid }).debug( + `Deploy label "${deployLabel}" already exists on PR, skipping update` + ); + return; + } } else { - logger.debug(`[BUILD ${buildUuid}] Deploy label "${deployLabel}" already exists on PR, skipping update`); - return; + const labelsConfig = await this.db.services.GlobalConfig.getLabels(); + const deployLabels = labelsConfig.deploy || []; + updatedLabels = currentLabels.filter((label) => !deployLabels.includes(label)); } - } else { - const labelsConfig = await this.db.services.GlobalConfig.getLabels(); - const deployLabels = labelsConfig.deploy || []; - updatedLabels = currentLabels.filter((label) => !deployLabels.includes(label)); - } - await updatePullRequestLabels({ - installationId: repository.githubInstallationId, - pullRequestNumber: pullRequest.pullRequestNumber, - fullName: pullRequest.fullName, - labels: updatedLabels, - }); + await updatePullRequestLabels({ + installationId: repository.githubInstallationId, + pullRequestNumber: pullRequest.pullRequestNumber, + fullName: pullRequest.fullName, + labels: updatedLabels, + }); - logger.info( - `[BUILD ${buildUuid}] Successfully ${action === 'enable' ? 'added' : 'removed'} ${deployLabel} label` - ); - } catch (error) { - logger.error({ error }, `[PR ${pullRequestId}] Failed to process label job`); - throw error; - } + getLogger({ stage: LogStage.LABEL_COMPLETE, buildUuid }).info( + `Successfully ${action === 'enable' ? 'added' : 'removed'} ${deployLabel} label` + ); + } catch (error) { + getLogger({ stage: LogStage.LABEL_FAILED }).error( + { error }, + `Failed to process label job for PR ${pullRequestId}` + ); + throw error; + } + }); }; } diff --git a/src/server/services/override.ts b/src/server/services/override.ts index e99875e..39bebaf 100644 --- a/src/server/services/override.ts +++ b/src/server/services/override.ts @@ -15,15 +15,11 @@ */ import BaseService from './_service'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Build } from 'server/models'; import * as k8s from 'server/lib/kubernetes'; import DeployService from './deploy'; -const logger = rootLogger.child({ - filename: 'services/override.ts', -}); - interface ValidationResult { valid: boolean; error?: string; @@ -60,7 +56,7 @@ export default class OverrideService extends BaseService { return { valid: false, error: 'UUID is not available' }; } } catch (error) { - logger.error('Error checking UUID uniqueness:', error); + getLogger().error({ error }, 'Error checking UUID uniqueness'); return { valid: false, error: 'Unable to validate UUID' }; } @@ -77,7 +73,7 @@ export default class OverrideService extends BaseService { const oldUuid = build.uuid; const oldNamespace = build.namespace; - logger.info(`[BUILD ${oldUuid}] Updating UUID to '${newUuid}'`); + getLogger({ buildUuid: oldUuid }).info(`Updating UUID to '${newUuid}'`); try { return await this.db.models.Build.transact(async (trx) => { @@ -110,12 +106,11 @@ export default class OverrideService extends BaseService { const updatedBuild = await this.db.models.Build.query(trx).findById(build.id); - // Delete the old namespace for cleanup (non-blocking, outside transaction) k8s.deleteNamespace(oldNamespace).catch((error) => { - logger.warn(`[BUILD ${oldUuid}] Failed to delete old namespace ${oldNamespace}:`, error); + getLogger({ buildUuid: oldUuid }).warn({ error }, `Failed to delete old namespace ${oldNamespace}`); }); - logger.info( - `[BUILD ${newUuid}] Successfully updated UUID from '${oldUuid}' to '${newUuid}', updated ${deploys.length} deploys` + getLogger({ buildUuid: newUuid }).info( + `Successfully updated UUID from '${oldUuid}' to '${newUuid}', updated ${deploys.length} deploys` ); return { @@ -124,7 +119,7 @@ export default class OverrideService extends BaseService { }; }); } catch (error) { - logger.error(`[BUILD ${oldUuid}] Failed to update UUID to '${newUuid}': ${error}`, error); + getLogger({ buildUuid: oldUuid }).error({ error }, `Failed to update UUID to '${newUuid}'`); throw error; } } diff --git a/src/server/services/pullRequest.ts b/src/server/services/pullRequest.ts index 766b928..4db7f7b 100644 --- a/src/server/services/pullRequest.ts +++ b/src/server/services/pullRequest.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import { PullRequest, Repository } from 'server/models'; import BaseService from './_service'; import { UniqueViolationError } from 'objection'; @@ -34,10 +34,6 @@ export interface PullRequestOptions { branch: string; } -const logger = rootLogger.child({ - filename: 'services/pullRequest.ts', -}); - export default class PullRequestService extends BaseService { /** * Get Pull Request Model. If it doesn't exist in the database, create a new one. @@ -66,22 +62,19 @@ export default class PullRequestService extends BaseService { }); } catch (error) { if (error instanceof UniqueViolationError) { - logger.info( - `[REPO]${fullName} [PR#]${pullRequestNumber} Pull request already exists, fetching existing record` - ); + getLogger({ fullName, pullRequestNumber }).debug('PR: exists, fetching'); pullRequest = await this.db.models.PullRequest.findOne({ repositoryId: repository.id, githubPullRequestId, }); if (!pullRequest) { - // should never happen, but just in case throw new Error( `Failed to find pull request after unique violation for repo ${repository.id}, PR ${githubPullRequestId}` ); } } else { - logger.error(`[REPO]${fullName} [PR#]${pullRequestNumber} Failed to create pull request: ${error}`); + getLogger({ fullName, pullRequestNumber }).error({ error }, 'Failed to create pull request'); throw error; } } @@ -124,7 +117,10 @@ export default class PullRequestService extends BaseService { ); return hasLabel; } catch (e) { - logger.error(`[REPO]${pullRequest.fullName} [PR NUM]${pullRequest.pullRequestNumber}: ${e}`); + getLogger({ fullName: pullRequest.fullName, pullRequestNumber: pullRequest.pullRequestNumber }).error( + { error: e }, + 'Failed to check lifecycle enabled for pull request' + ); return true; } } @@ -146,7 +142,10 @@ export default class PullRequestService extends BaseService { const hasState = response.data.state === state; return hasLabels && hasState; } catch (e) { - logger.error(`[REPO]${name} [PR ID]${githubPullRequestId}: ${e}`); + getLogger({ fullName: name, githubPullRequestId }).error( + { error: e }, + 'Failed to check pull request labels and state' + ); return true; } } @@ -161,12 +160,18 @@ export default class PullRequestService extends BaseService { }); // eslint-disable-next-line no-unused-vars - processCleanupClosedPRs = async (_job) => { - try { - await this.db.services.BuildService.cleanupBuilds(); - } catch (error) { - logger.error(`Error processing cleanup closed PRs:`, error); - } + processCleanupClosedPRs = async (job) => { + const { correlationId } = job.data || {}; + + return withLogContext({ correlationId: correlationId || `cleanup-${Date.now()}` }, async () => { + try { + getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Cleanup: processing closed PRs'); + await this.db.services.BuildService.cleanupBuilds(); + getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Cleanup: closed PRs completed'); + } catch (error) { + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error processing cleanup closed PRs'); + } + }); }; /** @@ -183,7 +188,10 @@ export default class PullRequestService extends BaseService { const response = await github .getPullRequestByRepositoryFullName(pullRequest.repository.fullName, pullRequest.pullRequestNumber) .catch((error) => { - logger.error(`${error}`); + getLogger({ + fullName: pullRequest.repository.fullName, + pullRequestNumber: pullRequest.pullRequestNumber, + }).error({ error }, 'Failed to get pull request by repository full name'); return null; }); diff --git a/src/server/services/repository.ts b/src/server/services/repository.ts index 0699b16..0213b15 100644 --- a/src/server/services/repository.ts +++ b/src/server/services/repository.ts @@ -14,14 +14,10 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Repository } from 'server/models'; import BaseService from './_service'; -const logger = rootLogger.child({ - filename: 'services/repository.ts', -}); - export default class RepositoryService extends BaseService { /** * Retrieve a Lifecycle Github Repository model. If it doesn't exist, create a new record. @@ -59,7 +55,7 @@ export default class RepositoryService extends BaseService { defaultEnvId, })); } catch (error) { - logger.error(error); + getLogger({ githubRepositoryId, error }).error('Failed to find or create repository'); throw error; } @@ -86,7 +82,7 @@ export default class RepositoryService extends BaseService { ownerId, }); } catch (error) { - logger.error(error); + getLogger({ githubRepositoryId, error }).error('Failed to find repository'); throw error; } diff --git a/src/server/services/service.ts b/src/server/services/service.ts index a8550c6..89ec13b 100644 --- a/src/server/services/service.ts +++ b/src/server/services/service.ts @@ -14,17 +14,13 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { Environment, Repository } from 'server/models'; import ServiceModel from 'server/models/Service'; import { CAPACITY_TYPE, DeployTypes } from 'shared/constants'; import BaseService from './_service'; import GlobalConfigService from './globalConfig'; -const logger = rootLogger.child({ - filename: 'services/service.ts', -}); - export default class ServiceService extends BaseService { async findOrCreateDefaultService(environment: Environment, repository: Repository): Promise { let services: ServiceModel[] = []; @@ -32,8 +28,8 @@ export default class ServiceService extends BaseService { try { await environment.$fetchGraph('[defaultServices]'); if (environment.defaultServices != null && environment.defaultServices.length > 0) { - logger.debug( - `[${environment.name}] There is/are ${environment.defaultServices.length} default dependency service(s) in the database.` + getLogger({ environment: environment.name }).debug( + `Found ${environment.defaultServices.length} default dependency service(s) in database` ); services = environment.defaultServices; } else { @@ -82,7 +78,7 @@ export default class ServiceService extends BaseService { } } } catch (error) { - logger.error(error); + getLogger({ environment: environment.name, error }).error('Failed to find or create default service'); throw error; } diff --git a/src/server/services/ttlCleanup.ts b/src/server/services/ttlCleanup.ts index 046804c..0e38a46 100644 --- a/src/server/services/ttlCleanup.ts +++ b/src/server/services/ttlCleanup.ts @@ -18,7 +18,7 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { updatePullRequestLabels, createOrUpdatePullRequestComment, getPullRequestLabels } from 'server/lib/github'; import { getKeepLabel, getDisabledLabel, getDeployLabel } from 'server/lib/utils'; @@ -27,12 +27,9 @@ import Metrics from 'server/lib/metrics'; import { DEFAULT_TTL_INACTIVITY_DAYS, DEFAULT_TTL_CHECK_INTERVAL_MINUTES } from 'shared/constants'; import GlobalConfigService from './globalConfig'; -const logger = rootLogger.child({ - filename: 'services/ttlCleanup.ts', -}); - interface TTLCleanupJob { dryRun?: boolean; + correlationId?: string; } interface StaleEnvironment { @@ -62,79 +59,63 @@ export default class TTLCleanupService extends Service { * Process TTL cleanup queue jobs */ processTTLCleanupQueue = async (job: Job) => { - try { - // Always read fresh config to handle runtime config changes - const config = await this.getTTLConfig(); - - if (!config.enabled) { - logger.info('[TTL] TTL cleanup is disabled, skipping'); - return; - } - - // Job data takes precedence (for manual API calls), fall back to config for scheduled jobs - const dryRun = job.data.dryRun ?? config.dryRun; - const source = job.data.dryRun !== undefined ? 'api-override' : 'config'; - - logger.info('[TTL] Starting TTL cleanup job', { - dryRun, - source, - jobDataDryRun: job.data.dryRun, - configDryRun: config.dryRun, - }); - - const staleEnvironments = await this.findStaleEnvironments(config.inactivityDays, config.excludedRepositories); + const { correlationId } = job.data || {}; - logger.info(`[TTL] Found ${staleEnvironments.length} stale environments`, { - inactivityDays: config.inactivityDays, - dryRun, - }); + return withLogContext({ correlationId: correlationId || `ttl-cleanup-${Date.now()}` }, async () => { + try { + // Always read fresh config to handle runtime config changes + const config = await this.getTTLConfig(); - let successCount = 0; - let errorCount = 0; + if (!config.enabled) { + getLogger({ stage: LogStage.CLEANUP_STARTING }).debug('TTL: disabled, skipping'); + return; + } - for (const env of staleEnvironments) { - try { - if (dryRun) { - const dbLabels = this.parseLabels(env.pullRequest.labels); - - logger.info(`[TTL ${env.buildUUID}] [DRY RUN] Would clean up environment (NO ACTION TAKEN)`, { - namespace: env.namespace, - prNumber: env.pullRequest.pullRequestNumber, - fullName: env.pullRequest.fullName, - daysExpired: env.daysExpired, - currentLabelsFromGitHub: env.currentLabels, - labelsInDatabase: dbLabels, - labelDriftDetected: env.hadLabelDrift, - }); - successCount++; - } else { - logger.info(`[TTL ${env.buildUUID}] Cleaning up stale environment`, { - namespace: env.namespace, - prNumber: env.pullRequest.pullRequestNumber, - fullName: env.pullRequest.fullName, - }); - await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun); - successCount++; + // Job data takes precedence (for manual API calls), fall back to config for scheduled jobs + const dryRun = job.data.dryRun ?? config.dryRun; + + getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: starting cleanup dryRun=${dryRun}`); + + const staleEnvironments = await this.findStaleEnvironments(config.inactivityDays, config.excludedRepositories); + + getLogger({ stage: LogStage.CLEANUP_STARTING }).info( + `TTL: found ${staleEnvironments.length} stale envs (${config.inactivityDays}d inactive)` + ); + + let successCount = 0; + let errorCount = 0; + + for (const env of staleEnvironments) { + try { + if (dryRun) { + getLogger({ buildUuid: env.buildUUID }).info( + `TTL: [DRY RUN] would cleanup ${env.namespace} PR#${env.pullRequest.pullRequestNumber}` + ); + successCount++; + } else { + getLogger({ buildUuid: env.buildUUID }).info( + `TTL: cleaning ${env.namespace} PR#${env.pullRequest.pullRequestNumber}` + ); + await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun); + successCount++; + } + } catch (error) { + errorCount++; + getLogger({ buildUuid: env.buildUUID }).error( + { error }, + `Failed to cleanup environment: namespace=${env.namespace}` + ); } - } catch (error) { - errorCount++; - logger.error(`[TTL ${env.buildUUID}] Failed to cleanup environment`, { - namespace: env.namespace, - error, - }); } - } - logger.info('[TTL] TTL cleanup job completed', { - totalFound: staleEnvironments.length, - successCount, - errorCount, - dryRun, - }); - } catch (error) { - logger.error('[TTL] Error in TTL cleanup job', { error }); - throw error; - } + getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info( + `TTL: completed found=${staleEnvironments.length} success=${successCount} errors=${errorCount}` + ); + } catch (error) { + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error in TTL cleanup job'); + throw error; + } + }); }; private parseLabels(labels: string | string[] | null): string[] { @@ -180,7 +161,7 @@ export default class TTLCleanupService extends Service { const namespaces = namespacesResponse.body.items; - logger.info(`[TTL] Scanning ${namespaces.length} namespaces with TTL enabled`); + getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: scanning ${namespaces.length} namespaces`); // Fetch dynamic labels once at the start const keepLabel = await getKeepLabel(); @@ -197,7 +178,7 @@ export default class TTLCleanupService extends Service { const expireAtUnix = labels['lfc/ttl-expireAtUnix']; if (!expireAtUnix) { - logger.debug(`[TTL] Namespace ${nsName} has no TTL expiration label, skipping`); + getLogger({}).debug(`Namespace ${nsName} has no TTL expiration label, skipping`); continue; } @@ -211,45 +192,47 @@ export default class TTLCleanupService extends Service { const buildUUID = labels['lfc/uuid']; // Use lfc/uuid (intentional difference) if (!buildUUID) { - logger.warn(`[TTL] Namespace ${nsName} has no lfc/uuid label, skipping`); + getLogger({}).warn(`Namespace ${nsName} has no lfc/uuid label, skipping`); continue; } - logger.debug(`[TTL ${buildUUID}] Namespace ${nsName} expired ${daysExpired} days ago`); + getLogger({ buildUuid: buildUUID }).debug(`Namespace ${nsName} expired ${daysExpired} days ago`); const build = await this.db.models.Build.query() .findOne({ uuid: buildUUID }) .withGraphFetched('[pullRequest.repository]'); if (!build) { - logger.warn(`[TTL ${buildUUID}] No build found for namespace ${nsName}, skipping`); + getLogger({ buildUuid: buildUUID }).warn(`No build found for namespace ${nsName}, skipping`); continue; } if (build.status === 'torn_down' || build.status === 'pending') { - logger.debug(`[TTL ${buildUUID}] Build is already ${build.status}, skipping`); + getLogger({ buildUuid: buildUUID }).debug(`Build is already ${build.status}, skipping`); continue; } if (build.isStatic) { - logger.debug(`[TTL ${buildUUID}] Build is static environment, skipping`); + getLogger({ buildUuid: buildUUID }).debug(`Build is static environment, skipping`); continue; } const pullRequest = build.pullRequest; if (!pullRequest) { - logger.warn(`[TTL ${buildUUID}] No pull request found, skipping`); + getLogger({ buildUuid: buildUUID }).warn(`No pull request found, skipping`); continue; } if (pullRequest.status !== 'open') { - logger.debug(`[TTL ${buildUUID}] PR is ${pullRequest.status}, skipping`); + getLogger({ buildUuid: buildUUID }).debug(`PR is ${pullRequest.status}, skipping`); continue; } if (excludedRepositories.length > 0 && excludedRepositories.includes(pullRequest.fullName)) { - logger.debug(`[TTL ${buildUUID}] Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping`); + getLogger({ buildUuid: buildUUID }).debug( + `Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping` + ); continue; } @@ -262,34 +245,33 @@ export default class TTLCleanupService extends Service { fullName: pullRequest.fullName, }); - logger.debug( - `[TTL ${buildUUID}] Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}` + getLogger({ buildUuid: buildUUID }).debug( + `Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}` ); // Sync labels back to DB if they differ (self-healing) const dbLabels = this.parseLabels(pullRequest.labels); if (JSON.stringify(currentLabels.sort()) !== JSON.stringify(dbLabels.sort())) { - logger.info(`[TTL ${buildUUID}] Label drift detected, syncing to database`, { - dbLabels, - currentLabels, - }); + getLogger({ buildUuid: buildUUID }).debug('TTL: label drift detected, syncing to DB'); await pullRequest.$query().patch({ labels: JSON.stringify(currentLabels) as any, }); } } catch (error) { - logger.warn(`[TTL ${buildUUID}] Failed to fetch labels from GitHub, falling back to DB: ${error}`); + getLogger({ buildUuid: buildUUID }).warn({ error }, `Failed to fetch labels from GitHub, falling back to DB`); // Fallback to DB labels if GitHub API fails currentLabels = this.parseLabels(pullRequest.labels); } if (currentLabels.includes(keepLabel)) { - logger.debug(`[TTL ${buildUUID}] Has ${keepLabel} label (verified from GitHub), skipping`); + getLogger({ buildUuid: buildUUID }).debug(`Has ${keepLabel} label (verified from GitHub), skipping`); continue; } if (currentLabels.includes(disabledLabel)) { - logger.debug(`[TTL ${buildUUID}] Already has ${disabledLabel} label (verified from GitHub), skipping`); + getLogger({ buildUuid: buildUUID }).debug( + `Already has ${disabledLabel} label (verified from GitHub), skipping` + ); continue; } @@ -308,7 +290,10 @@ export default class TTLCleanupService extends Service { }); } } catch (error) { - logger.error('[TTL] Error scanning K8s namespaces for stale environments', { error }); + getLogger({ stage: LogStage.CLEANUP_FAILED }).error( + { error }, + 'Error scanning K8s namespaces for stale environments' + ); throw error; } @@ -328,12 +313,7 @@ export default class TTLCleanupService extends Service { const buildUuid = build.uuid; const repository = pullRequest.repository; - logger.info(`[TTL ${buildUuid}] Cleaning up stale environment`, { - namespace, - prNumber: pullRequest.pullRequestNumber, - fullName: pullRequest.fullName, - daysExpired: env.daysExpired, - }); + getLogger({ buildUuid }).info(`TTL: cleaning ${namespace} PR#${pullRequest.pullRequestNumber}`); // Fetch dynamic labels at runtime const deployLabel = await getDeployLabel(); @@ -351,9 +331,7 @@ export default class TTLCleanupService extends Service { labels: updatedLabels, }); - logger.info(`[TTL ${buildUuid}] Updated labels: removed ${deployLabel}, added ${disabledLabel}`, { - prNumber: pullRequest.pullRequestNumber, - }); + getLogger({ buildUuid }).debug(`TTL: labels updated PR#${pullRequest.pullRequestNumber}`); const commentMessage = await this.generateCleanupComment(inactivityDays, commentTemplate); @@ -366,9 +344,7 @@ export default class TTLCleanupService extends Service { etag: null, }); - logger.info(`[TTL ${buildUuid}] Posted cleanup comment to PR`, { - prNumber: pullRequest.pullRequestNumber, - }); + getLogger({ buildUuid }).debug(`TTL: cleanup comment posted PR#${pullRequest.pullRequestNumber}`); await pullRequest.$query().patch({ labels: JSON.stringify(updatedLabels) as any, @@ -378,11 +354,10 @@ export default class TTLCleanupService extends Service { const metrics = new Metrics('ttl.cleanup', { repositoryName: pullRequest.fullName }); metrics.increment('total', { dry_run: dryRun.toString() }); } catch (error) { - logger.error(`[TTL ${buildUuid}] Failed to cleanup stale environment`, { - namespace, - prNumber: pullRequest.pullRequestNumber, - error, - }); + getLogger({ buildUuid }).error( + { error }, + `Failed to cleanup stale environment: namespace=${namespace} prNumber=${pullRequest.pullRequestNumber}` + ); throw error; } } @@ -420,7 +395,7 @@ export default class TTLCleanupService extends Service { const config = await this.getTTLConfig(); if (!config.enabled) { - logger.info('[TTL] TTL cleanup is disabled in global config'); + getLogger({}).debug('TTL: disabled in config'); return; } @@ -436,8 +411,6 @@ export default class TTLCleanupService extends Service { } ); - logger.info( - `[TTL] TTL cleanup job scheduled every ${config.checkIntervalMinutes} minutes (${config.inactivityDays} day TTL, dryRun: ${config.dryRun})` - ); + getLogger({}).info(`TTL: scheduled every ${config.checkIntervalMinutes}min`); } } diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index caf55e3..53994bb 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger/index'; import BaseService from './_service'; import { Build, PullRequest } from 'server/models'; import * as YamlService from 'server/models/yaml'; @@ -28,10 +28,6 @@ import { redisClient } from 'server/lib/dependencies'; import { validateWebhook } from 'server/lib/webhook/webhookValidator'; import { executeDockerWebhook, executeCommandWebhook } from 'server/lib/webhook'; -const logger = rootLogger.child({ - filename: 'services/webhook.ts', -}); - export class WebhookError extends LifecycleError { constructor(msg: string, uuid: string = null, service: string = null) { super(uuid, service, msg); @@ -66,10 +62,12 @@ export default class WebhookService extends BaseService { if (yamlConfig?.environment?.webhooks != null) { webhooks = yamlConfig.environment.webhooks; await build.$query().patch({ webhooksYaml: JSON.stringify(webhooks) }); - logger.child({ webhooks }).info(`[BUILD ${build.uuid}] Updated build with webhooks from config`); + getLogger({ buildUuid: build.uuid }).info( + `Updated build with webhooks from config: webhooks=${JSON.stringify(webhooks)}` + ); } else { await build.$query().patch({ webhooksYaml: null }); - logger.info(`[BUILD ${build.uuid}] No webhooks found in config`); + getLogger({ buildUuid: build.uuid }).info('No webhooks found in config'); } } return webhooks; @@ -84,7 +82,7 @@ export default class WebhookService extends BaseService { // Only skips if explicitly set to false. If undefined/missing, webhooks execute (default behavior) const { features } = await this.db.services.GlobalConfig.getAllConfigs(); if (features?.webhooks === false) { - logger.debug(`[BUILD ${build.uuid}] Webhooks feature flag is disabled. Skipping webhook execution.`); + getLogger({ buildUuid: build.uuid }).debug('Webhooks feature flag is disabled, skipping webhook execution'); return; } @@ -94,14 +92,14 @@ export default class WebhookService extends BaseService { case BuildStatus.TORN_DOWN: break; default: - logger.debug(`[BUILD ${build.uuid}] Skipping Lifecycle Webhooks execution for status: ${build.status}`); + getLogger({ buildUuid: build.uuid }).debug(`Skipping Lifecycle Webhooks execution for status: ${build.status}`); return; } // if build is not full yaml and no webhooks defined in YAML config, we should not run webhooks (no more db webhook support) if (!build.enableFullYaml && build.webhooksYaml == null) { - logger.debug( - `[BUILD ${build.uuid}] Skipping Lifecycle Webhooks(non yaml config build) execution for status: ${build.status}` + getLogger({ buildUuid: build.uuid }).debug( + `Skipping Lifecycle Webhooks (non yaml config build) execution for status: ${build.status}` ); return; } @@ -114,12 +112,12 @@ export default class WebhookService extends BaseService { const configFileWebhooks: YamlService.Webhook[] = webhooks.filter((webhook) => webhook.state === build.status); // if no webhooks defined in YAML config, we should not run webhooks if (configFileWebhooks != null && configFileWebhooks.length < 1) { - logger.info(`[BUILD ${build.uuid}] No webhooks found to be triggered for build status: ${build.status}`); + getLogger({ buildUuid: build.uuid }).info(`No webhooks found to be triggered for build status: ${build.status}`); return; } - logger.info(`[BUILD ${build.uuid}] Triggering for build status: ${build.status}`); + getLogger({ buildUuid: build.uuid }).info(`Triggering webhooks for build status: ${build.status}`); for (const webhook of configFileWebhooks) { - logger.info(`[BUILD ${build.uuid}] Running webhook: ${webhook.name}`); + getLogger({ buildUuid: build.uuid }).info(`Running webhook: ${webhook.name}`); await this.runYamlConfigFileWebhookForBuild(webhook, build); } } @@ -146,9 +144,9 @@ export default class WebhookService extends BaseService { switch (webhook.type) { case 'codefresh': { const buildId: string = await this.db.services.Codefresh.triggerYamlConfigWebhookPipeline(webhook, data); - logger - .child({ url: `https://g.codefresh.io/build/${buildId}` }) - .info(`[BUILD ${build.uuid}] Webhook (${webhook.name}) triggered: ${buildId}`); + getLogger({ buildUuid: build.uuid }).info( + `Webhook (${webhook.name}) triggered: buildId=${buildId} url=https://g.codefresh.io/build/${buildId}` + ); metadata = { link: `https://g.codefresh.io/build/${buildId}`, }; @@ -176,11 +174,13 @@ export default class WebhookService extends BaseService { metadata: { status: 'starting' }, status: 'executing', }); - logger.info(`[BUILD ${build.uuid}] Docker webhook (${webhook.name}) invoked`); + getLogger({ buildUuid: build.uuid }).info(`Docker webhook (${webhook.name}) invoked`); // Execute webhook (this waits for completion) const result = await executeDockerWebhook(webhook, build, data); - logger.info(`[BUILD ${build.uuid}] Docker webhook (${webhook.name}) executed: ${result.jobName}`); + getLogger({ buildUuid: build.uuid }).info( + `Docker webhook (${webhook.name}) executed: jobName=${result.jobName}` + ); // Update the invocation record with final status await invocation.$query().patch({ @@ -206,11 +206,13 @@ export default class WebhookService extends BaseService { metadata: { status: 'starting' }, status: 'executing', }); - logger.info(`[BUILD ${build.uuid}] Command webhook (${webhook.name}) invoked`); + getLogger({ buildUuid: build.uuid }).info(`Command webhook (${webhook.name}) invoked`); // Execute webhook (this waits for completion) const result = await executeCommandWebhook(webhook, build, data); - logger.info(`[BUILD ${build.uuid}] Command webhook (${webhook.name}) executed: ${result.jobName}`); + getLogger({ buildUuid: build.uuid }).info( + `Command webhook (${webhook.name}) executed: jobName=${result.jobName}` + ); // Update the invocation record with final status await invocation.$query().patch({ @@ -228,9 +230,9 @@ export default class WebhookService extends BaseService { throw new Error(`Unsupported webhook type: ${webhook.type}`); } - logger.debug(`[BUILD ${build.uuid}] Webhook history added for runUUID: ${build.runUUID}`); + getLogger({ buildUuid: build.uuid }).debug(`Webhook history added for runUUID: ${build.runUUID}`); } catch (error) { - logger.error(`[BUILD ${build.uuid}] Error invoking webhook: ${error}`); + getLogger({ buildUuid: build.uuid }).error({ error }, 'Error invoking webhook'); // Still create a failed invocation record await this.db.models.WebhookInvocations.create({ @@ -259,14 +261,23 @@ export default class WebhookService extends BaseService { }); processWebhookQueue = async (job) => { - const buildId = job.data.buildId; - const build = await this.db.models.Build.query().findOne({ - id: buildId, + const { buildId, sender, correlationId, _ddTraceContext } = job.data; + + return withLogContext({ correlationId, sender, _ddTraceContext }, async () => { + const build = await this.db.models.Build.query().findOne({ + id: buildId, + }); + + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + + try { + await this.db.services.Webhook.runWebhooksForBuild(build); + getLogger({ stage: LogStage.WEBHOOK_COMPLETE }).info('Webhooks invoked'); + } catch (e) { + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error({ error: e }, 'Failed to invoke the webhook'); + } }); - try { - await this.db.services.Webhook.runWebhooksForBuild(build); - } catch (e) { - logger.error(`[BUILD ${build.uuid}] Failed to invoke the webhook: ${e}`); - } }; } diff --git a/sysops/tilt/ngrok-keycloak.yaml b/sysops/tilt/ngrok-keycloak.yaml index 11d5dff..aced647 100644 --- a/sysops/tilt/ngrok-keycloak.yaml +++ b/sysops/tilt/ngrok-keycloak.yaml @@ -29,15 +29,14 @@ spec: spec: containers: - name: ngrok - image: ngrok/ngrok:latest + image: wernight/ngrok command: ['ngrok'] args: - 'http' + - '--authtoken=$(NGROK_AUTHTOKEN)' - '--hostname=$(NGROK_KEYCLOAK_DOMAIN)' - '--log=stdout' - - '--log-level=debug' - 'lifecycle-keycloak:8080' # point at the Keycloak Service's name & port - envFrom: - secretRef: name: ngrok-secret diff --git a/sysops/tilt/ngrok.yaml b/sysops/tilt/ngrok.yaml index b8fc02c..40c46c6 100644 --- a/sysops/tilt/ngrok.yaml +++ b/sysops/tilt/ngrok.yaml @@ -29,15 +29,14 @@ spec: spec: containers: - name: ngrok - image: ngrok/ngrok:latest + image: wernight/ngrok command: ['ngrok'] args: - 'http' + - '--authtoken=$(NGROK_AUTHTOKEN)' - '--hostname=$(NGROK_LIFECYCLE_DOMAIN)' - '--log=stdout' - - '--log-level=debug' - 'lifecycle-web:80' # point at the K8s Service's name & port - envFrom: - secretRef: name: ngrok-secret diff --git a/tsconfig.json b/tsconfig.json index f3ead8c..9ee33a3 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -28,9 +28,15 @@ "jsx": "preserve", "incremental": true, "experimentalDecorators": true, - "emitDecoratorMetadata": true + "emitDecoratorMetadata": true, + "plugins": [ + { + "name": "next" + } + ], + "strictNullChecks": true }, - "include": ["src/**/*", "scripts/**/*", "package.json"], + "include": ["src/**/*", "scripts/**/*", "package.json", ".next/types/**/*.ts"], "exclude": ["node_modules", "**/node_modules/*"], "ts-node": { "compilerOptions": { From 3384f12d1461c0ad780beda6f0f2830b531a7489 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 10:23:49 -0800 Subject: [PATCH 02/23] use logger serializer for errors --- src/pages/api/v1/admin/ttl/cleanup.ts | 15 +++----------- src/pages/api/v1/ai/chat.ts | 20 ++++--------------- src/pages/api/v1/ai/models.ts | 5 +---- src/pages/api/v1/builds/[uuid]/deploy.ts | 5 +---- src/pages/api/v1/builds/[uuid]/graph.ts | 5 +---- src/pages/api/v1/builds/[uuid]/index.ts | 10 ++-------- .../v1/builds/[uuid]/jobs/[jobName]/events.ts | 10 ++-------- .../v1/builds/[uuid]/services/[name]/build.ts | 2 +- .../[uuid]/services/[name]/deployment.ts | 5 +---- src/pages/api/v1/builds/[uuid]/torndown.ts | 5 +---- src/pages/api/v1/builds/[uuid]/webhooks.ts | 12 +++-------- src/pages/api/v1/builds/index.ts | 2 +- src/pages/api/v1/config/cache.ts | 10 ++-------- src/pages/api/v1/deploy-summary.ts | 4 +--- src/pages/api/v1/deployables.ts | 4 +--- src/pages/api/v1/deploys.ts | 4 +--- src/pages/api/v1/pull-requests/[id]/builds.ts | 5 +---- src/pages/api/v1/pull-requests/[id]/index.ts | 5 +---- src/pages/api/v1/pull-requests/index.ts | 5 +---- src/pages/api/v1/repos/index.ts | 2 +- src/pages/api/v1/schema/validate.ts | 5 +---- src/pages/api/v1/users/index.ts | 2 +- src/pages/api/webhooks/github.ts | 5 +---- 23 files changed, 33 insertions(+), 114 deletions(-) diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts index 163b500..55d025d 100644 --- a/src/pages/api/v1/admin/ttl/cleanup.ts +++ b/src/pages/api/v1/admin/ttl/cleanup.ts @@ -157,10 +157,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error occurred on TTL cleanup operation' - ); + getLogger().error({ error }, 'Error occurred on TTL cleanup operation'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -178,10 +175,7 @@ async function getTTLConfig(res: NextApiResponse) { return res.status(200).json({ config: ttlConfig }); } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error occurred retrieving TTL cleanup config' - ); + getLogger().error({ error }, 'Error occurred retrieving TTL cleanup config'); return res.status(500).json({ error: 'Unable to retrieve TTL cleanup configuration' }); } } @@ -212,10 +206,7 @@ async function triggerTTLCleanup(req: NextApiRequest, res: NextApiResponse) { dryRun, }); } catch (error) { - getLogger({ stage: LogStage.CLEANUP_FAILED }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error occurred triggering TTL cleanup' - ); + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error occurred triggering TTL cleanup'); return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' }); } }); diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts index 9a30387..39b9800 100644 --- a/src/pages/api/v1/ai/chat.ts +++ b/src/pages/api/v1/ai/chat.ts @@ -65,10 +65,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) await llmService.initialize(); } } catch (error) { - getLogger({ buildUuid }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Failed to initialize LLM service' - ); + getLogger({ buildUuid }).error({ error }, 'Failed to initialize LLM service'); res.write( `data: ${JSON.stringify({ error: error.message, @@ -89,10 +86,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) try { context = await aiAgentContextService.gatherFullContext(buildUuid); } catch (error) { - getLogger({ buildUuid }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Failed to gather context' - ); + getLogger({ buildUuid }).error({ error }, 'Failed to gather context'); res.write( `data: ${JSON.stringify({ error: `Build not found: ${error.message}`, @@ -187,10 +181,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } } } catch (error: any) { - getLogger({ buildUuid }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'LLM query failed' - ); + getLogger({ buildUuid }).error({ error }, 'LLM query failed'); // Check if it's a rate limit error if ( @@ -235,10 +226,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`); res.end(); } catch (error: any) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Unexpected error in AI agent chat' - ); + getLogger().error({ error }, 'Unexpected error in AI agent chat'); res.write(`data: ${JSON.stringify({ error: error?.message || 'Internal error' })}\n\n`); res.end(); } diff --git a/src/pages/api/v1/ai/models.ts b/src/pages/api/v1/ai/models.ts index c87c49a..66505f7 100644 --- a/src/pages/api/v1/ai/models.ts +++ b/src/pages/api/v1/ai/models.ts @@ -56,10 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) return res.status(200).json({ models }); } catch (error: any) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Failed to fetch available models' - ); + getLogger().error({ error }, 'Failed to fetch available models'); return res.status(500).json({ error: 'Failed to fetch available models' }); } } diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index 2c3f8df..e8c20d7 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -121,10 +121,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { message: `Redeploy for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.BUILD_FAILED }).error( - { error: error instanceof Error ? error.message : String(error) }, - `Unable to proceed with redeploy for build ${uuid}` - ); + getLogger({ stage: LogStage.BUILD_FAILED }).error({ error }, `Unable to proceed with redeploy for build ${uuid}`); return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); } }); diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts index 73916d1..da5d776 100644 --- a/src/pages/api/v1/builds/[uuid]/graph.ts +++ b/src/pages/api/v1/builds/[uuid]/graph.ts @@ -107,10 +107,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { dependencyGraph: build.dependencyGraph, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error fetching dependency graph' - ); + getLogger({ buildUuid: uuid as string }).error({ error }, 'Error fetching dependency graph'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts index 34c1591..c7071f9 100644 --- a/src/pages/api/v1/builds/[uuid]/index.ts +++ b/src/pages/api/v1/builds/[uuid]/index.ts @@ -54,10 +54,7 @@ async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { return res.status(200).json(build); } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error fetching build' - ); + getLogger({ buildUuid: uuid as string }).error({ error }, 'Error fetching build'); return res.status(500).json({ error: 'An unexpected error occurred' }); } } @@ -109,10 +106,7 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlatio }, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - `Error updating UUID to newUuid=${newUuid}` - ); + getLogger({ buildUuid: uuid as string }).error({ error }, `Error updating UUID to newUuid=${newUuid}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } } diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts index 4a2c275..afc059f 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts @@ -216,10 +216,7 @@ async function getJobEvents(jobName: string, namespace: string, buildUuid: strin return events; } catch (error) { - getLogger({ buildUuid }).error( - { error: error instanceof Error ? error.message : String(error) }, - `jobName=${jobName} Error fetching events` - ); + getLogger({ buildUuid }).error({ error }, `jobName=${jobName} Error fetching events`); throw error; } } @@ -250,10 +247,7 @@ const eventsHandler = async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error( - { error: error instanceof Error ? error.message : String(error) }, - `jobName=${jobName} Error getting events` - ); + logger.error({ error }, `jobName=${jobName} Error getting events`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts index e9371bc..daf6b04 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts @@ -159,7 +159,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { }); } catch (error) { getLogger({ stage: LogStage.BUILD_FAILED }).error( - { error: error instanceof Error ? error.message : String(error) }, + { error }, `Unable to proceed with redeploy for services ${name} in build ${uuid}` ); return res.status(500).json({ error: `Unable to proceed with redeploy for services ${name} in build ${uuid}.` }); diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts index 193793b..86011c4 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts @@ -280,10 +280,7 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { getLogger({ buildUuid: uuid }).warn(`No deployment details found: deployUuid=${deployUuid}`); return res.status(404).json({ error: 'Deployment not found' }); } catch (error) { - getLogger({ buildUuid: uuid }).error( - { error: error instanceof Error ? error.message : String(error) }, - `Error getting deployment details: deployUuid=${deployUuid}` - ); + getLogger({ buildUuid: uuid }).error({ error }, `Error getting deployment details: deployUuid=${deployUuid}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts index 657dc6c..7236cf5 100644 --- a/src/pages/api/v1/builds/[uuid]/torndown.ts +++ b/src/pages/api/v1/builds/[uuid]/torndown.ts @@ -141,10 +141,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { namespacesUpdated: updatedDeploys, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error in cleanup API' - ); + getLogger({ buildUuid: uuid as string }).error({ error }, 'Error in cleanup API'); return res.status(500).json({ error: 'An unexpected error occurred.' }); } }; diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts index 9753f5d..a32fbf9 100644 --- a/src/pages/api/v1/builds/[uuid]/webhooks.ts +++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts @@ -206,10 +206,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - `Error handling ${req.method} request` - ); + getLogger({ buildUuid: uuid as string }).error({ error }, `Error handling ${req.method} request`); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -256,7 +253,7 @@ async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { }); } catch (error) { getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error( - { error: error instanceof Error ? error.message : String(error) }, + { error }, `Unable to proceed with webhook for build ${uuid}` ); return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` }); @@ -297,10 +294,7 @@ async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) { }, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Failed to retrieve webhooks' - ); + getLogger({ buildUuid: uuid as string }).error({ error }, 'Failed to retrieve webhooks'); return res.status(500).json({ error: `Unable to retrieve webhooks for build ${uuid}.` }); } } diff --git a/src/pages/api/v1/builds/index.ts b/src/pages/api/v1/builds/index.ts index aa206a7..2e442ad 100644 --- a/src/pages/api/v1/builds/index.ts +++ b/src/pages/api/v1/builds/index.ts @@ -204,7 +204,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger({ error: error instanceof Error ? error.message : String(error) }).error('Failed to fetch builds'); + getLogger({ error }).error('Failed to fetch builds'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/config/cache.ts b/src/pages/api/v1/config/cache.ts index 3660bcc..7c00b88 100644 --- a/src/pages/api/v1/config/cache.ts +++ b/src/pages/api/v1/config/cache.ts @@ -107,10 +107,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error occurred on config cache operation' - ); + getLogger().error({ error }, 'Error occurred on config cache operation'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -121,10 +118,7 @@ async function getCachedConfig(res: NextApiResponse, refresh: boolean = false) { const configs = await configService.getAllConfigs(refresh); return res.status(200).json({ configs }); } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Error occurred retrieving cache config' - ); + getLogger().error({ error }, 'Error occurred retrieving cache config'); return res.status(500).json({ error: `Unable to retrieve global config values` }); } } diff --git a/src/pages/api/v1/deploy-summary.ts b/src/pages/api/v1/deploy-summary.ts index de6750d..dfe467f 100644 --- a/src/pages/api/v1/deploy-summary.ts +++ b/src/pages/api/v1/deploy-summary.ts @@ -158,9 +158,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(result.rows); } catch (error) { - getLogger({ error: error instanceof Error ? error.message : String(error) }).error( - `Failed to fetch deploy summary: buildId=${parsedBuildId}` - ); + getLogger({ error }).error(`Failed to fetch deploy summary: buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deployables.ts b/src/pages/api/v1/deployables.ts index f056fa5..34e96b0 100644 --- a/src/pages/api/v1/deployables.ts +++ b/src/pages/api/v1/deployables.ts @@ -159,9 +159,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deployables); } catch (error) { - getLogger({ error: error instanceof Error ? error.message : String(error) }).error( - `Failed to fetch deployables: buildId=${parsedBuildId}` - ); + getLogger({ error }).error(`Failed to fetch deployables: buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts index e487541..bfb0b11 100644 --- a/src/pages/api/v1/deploys.ts +++ b/src/pages/api/v1/deploys.ts @@ -222,9 +222,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deploys); } catch (error) { - getLogger({ error: error instanceof Error ? error.message : String(error) }).error( - `Failed to fetch deploys: buildId=${parsedBuildId}` - ); + getLogger({ error }).error(`Failed to fetch deploys: buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/builds.ts b/src/pages/api/v1/pull-requests/[id]/builds.ts index c3ad473..9ab2e43 100644 --- a/src/pages/api/v1/pull-requests/[id]/builds.ts +++ b/src/pages/api/v1/pull-requests/[id]/builds.ts @@ -155,10 +155,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(builds); } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - `Failed to fetch builds for pull request: id=${parsedId}` - ); + getLogger().error({ error }, `Failed to fetch builds for pull request: id=${parsedId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/index.ts b/src/pages/api/v1/pull-requests/[id]/index.ts index fe52c29..7f1bfea 100644 --- a/src/pages/api/v1/pull-requests/[id]/index.ts +++ b/src/pages/api/v1/pull-requests/[id]/index.ts @@ -144,10 +144,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(pullRequest); } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - `Failed to fetch pull request: id=${parsedId}` - ); + getLogger().error({ error }, `Failed to fetch pull request: id=${parsedId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/index.ts b/src/pages/api/v1/pull-requests/index.ts index 57db542..486dabc 100644 --- a/src/pages/api/v1/pull-requests/index.ts +++ b/src/pages/api/v1/pull-requests/index.ts @@ -271,10 +271,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger().error( - { error: error instanceof Error ? error.message : String(error) }, - 'Failed to fetch pull requests' - ); + getLogger().error({ error }, 'Failed to fetch pull requests'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/repos/index.ts b/src/pages/api/v1/repos/index.ts index 0f9b8d6..b849602 100644 --- a/src/pages/api/v1/repos/index.ts +++ b/src/pages/api/v1/repos/index.ts @@ -180,7 +180,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger().error({ error: error instanceof Error ? error.message : String(error) }, 'Error fetching repos'); + getLogger().error({ error }, 'Error fetching repos'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts index b9edebc..af30f62 100644 --- a/src/pages/api/v1/schema/validate.ts +++ b/src/pages/api/v1/schema/validate.ts @@ -130,10 +130,7 @@ const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse { return res.status(200).json(response); } catch (error) { - getLogger().error({ error: error instanceof Error ? error.message : String(error) }, 'Error fetching users'); + getLogger().error({ error }, 'Error fetching users'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index 68face7..b5719aa 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -53,10 +53,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { getLogger({ stage: LogStage.WEBHOOK_QUEUED }).info('Webhook queued for processing'); res.status(200).end(); } catch (error) { - getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error( - { error: error instanceof Error ? error.message : String(error) }, - 'Webhook failure' - ); + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error({ error }, 'Webhook failure'); res.status(500).end(); } }); From d3d4f34a3f6c573d5b11d251cb96529777497015 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 11:20:47 -0800 Subject: [PATCH 03/23] fix webhook invokaction log to only log when webhook is actually invoked --- src/server/services/webhook.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index 53994bb..7e1d4f9 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -120,6 +120,9 @@ export default class WebhookService extends BaseService { getLogger({ buildUuid: build.uuid }).info(`Running webhook: ${webhook.name}`); await this.runYamlConfigFileWebhookForBuild(webhook, build); } + getLogger({ stage: LogStage.WEBHOOK_COMPLETE, buildUuid: build.uuid }).info( + `Webhooks completed: count=${configFileWebhooks.length} status=${build.status}` + ); } /** @@ -274,7 +277,6 @@ export default class WebhookService extends BaseService { try { await this.db.services.Webhook.runWebhooksForBuild(build); - getLogger({ stage: LogStage.WEBHOOK_COMPLETE }).info('Webhooks invoked'); } catch (e) { getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error({ error: e }, 'Failed to invoke the webhook'); } From 7e6e7704d170f2fe5d7c55f50996446d2585f4e6 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 12:08:25 -0800 Subject: [PATCH 04/23] use withLogContext to avoid contex overwriting --- src/server/lib/cli.ts | 18 +- .../deploymentManager/deploymentManager.ts | 117 +-- src/server/lib/nativeBuild/index.ts | 78 +- src/server/lib/nativeHelm/helm.ts | 113 +-- src/server/services/activityStream.ts | 49 +- src/server/services/deploy.ts | 727 +++++++++--------- 6 files changed, 556 insertions(+), 546 deletions(-) diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index 931017d..dcd9e9b 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -18,7 +18,7 @@ import { merge } from 'lodash'; import { Build, Deploy, Service, Deployable } from 'server/models'; import { CLIDeployTypes, DeployTypes } from 'shared/constants'; import { shellPromise } from './shell'; -import { getLogger } from './logger/index'; +import { getLogger, withLogContext } from './logger/index'; import GlobalConfigService from 'server/services/globalConfig'; import { DatabaseSettings } from 'server/services/types/globalConfig'; @@ -36,7 +36,10 @@ export async function deployBuild(build: Build) { return CLIDeployTypes.has(serviceType); }) .map(async (deploy) => { - return await cliDeploy(deploy); + return withLogContext( + { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, + async () => cliDeploy(deploy) + ); }) ); } @@ -184,9 +187,14 @@ export async function deleteBuild(build: Build) { return CLIDeployTypes.has(serviceType) && d.active; }) .map(async (deploy) => { - const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type; - getLogger({ buildUuid }).info('Deleting CLI deploy'); - return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy); + return withLogContext( + { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, + async () => { + const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type; + getLogger().info('Deleting CLI deploy'); + return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy); + } + ); }) ); getLogger({ buildUuid }).info('Deleted CLI resources'); diff --git a/src/server/lib/deploymentManager/deploymentManager.ts b/src/server/lib/deploymentManager/deploymentManager.ts index 00be405..ee3b88b 100644 --- a/src/server/lib/deploymentManager/deploymentManager.ts +++ b/src/server/lib/deploymentManager/deploymentManager.ts @@ -20,7 +20,7 @@ import { DeployStatus, DeployTypes, CLIDeployTypes } from 'shared/constants'; import { createKubernetesApplyJob, monitorKubernetesJob } from '../kubernetesApply/applyManifest'; import { nanoid, customAlphabet } from 'nanoid'; import DeployService from 'server/services/deploy'; -import { getLogger, updateLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; import { waitForDeployPodReady } from '../kubernetes'; @@ -134,77 +134,78 @@ export class DeploymentManager { } private async deployManifests(deploy: Deploy): Promise { - updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }); - const jobId = generateJobId(); - const deployService = new DeployService(); - const runUUID = deploy.runUUID || nanoid(); - - try { - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOYING, - statusMessage: 'Creating Kubernetes apply job', - }, - runUUID - ); + return withLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }, async () => { + const jobId = generateJobId(); + const deployService = new DeployService(); + const runUUID = deploy.runUUID || nanoid(); - await deploy.$fetchGraph('[build, deployable, service]'); + try { + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOYING, + statusMessage: 'Creating Kubernetes apply job', + }, + runUUID + ); - if (!deploy.manifest) { - throw new Error(`Deploy ${deploy.uuid} has no manifest. Ensure manifests are generated before deployment.`); - } + await deploy.$fetchGraph('[build, deployable, service]'); - await ensureServiceAccountForJob(deploy.build.namespace, 'deploy'); + if (!deploy.manifest) { + throw new Error(`Deploy ${deploy.uuid} has no manifest. Ensure manifests are generated before deployment.`); + } - await createKubernetesApplyJob({ - deploy, - namespace: deploy.build.namespace, - jobId, - }); + await ensureServiceAccountForJob(deploy.build.namespace, 'deploy'); - const shortSha = deploy.sha?.substring(0, 7) || 'unknown'; - const jobName = `${deploy.uuid}-deploy-${jobId}-${shortSha}`; - const result = await monitorKubernetesJob(jobName, deploy.build.namespace); + await createKubernetesApplyJob({ + deploy, + namespace: deploy.build.namespace, + jobId, + }); - if (!result.success) { - throw new Error(result.message); - } - // Wait for the actual application pods to be ready - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOYING, - statusMessage: 'Waiting for pods to be ready', - }, - runUUID - ); + const shortSha = deploy.sha?.substring(0, 7) || 'unknown'; + const jobName = `${deploy.uuid}-deploy-${jobId}-${shortSha}`; + const result = await monitorKubernetesJob(jobName, deploy.build.namespace); - const cliDeploy = CLIDeployTypes.has(deploy.deployable.type); - const isReady = cliDeploy ? true : await waitForDeployPodReady(deploy); + if (!result.success) { + throw new Error(result.message); + } - if (isReady) { await deployService.patchAndUpdateActivityFeed( deploy, { - status: DeployStatus.READY, - statusMessage: cliDeploy ? 'CLI Deploy completed' : 'Kubernetes pods are ready', + status: DeployStatus.DEPLOYING, + statusMessage: 'Waiting for pods to be ready', }, runUUID ); - } else { - throw new Error('Pods failed to become ready within timeout'); + + const cliDeploy = CLIDeployTypes.has(deploy.deployable.type); + const isReady = cliDeploy ? true : await waitForDeployPodReady(deploy); + + if (isReady) { + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.READY, + statusMessage: cliDeploy ? 'CLI Deploy completed' : 'Kubernetes pods are ready', + }, + runUUID + ); + } else { + throw new Error('Pods failed to become ready within timeout'); + } + } catch (error) { + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOY_FAILED, + statusMessage: `Kubernetes apply failed: ${error.message}`, + }, + runUUID + ); + throw error; } - } catch (error) { - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOY_FAILED, - statusMessage: `Kubernetes apply failed: ${error.message}`, - }, - runUUID - ); - throw error; - } + }); } } diff --git a/src/server/lib/nativeBuild/index.ts b/src/server/lib/nativeBuild/index.ts index 27e542e..0581861 100644 --- a/src/server/lib/nativeBuild/index.ts +++ b/src/server/lib/nativeBuild/index.ts @@ -15,7 +15,7 @@ */ import { Deploy } from '../../models'; -import { getLogger, withSpan, updateLogContext } from '../logger/index'; +import { getLogger, withSpan, withLogContext } from '../logger/index'; import { ensureNamespaceExists } from './utils'; import { buildWithEngine, NativeBuildOptions } from './engines'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; @@ -29,51 +29,51 @@ export interface NativeBuildResult { } export async function buildWithNative(deploy: Deploy, options: NativeBuildOptions): Promise { - return withSpan( - 'lifecycle.build.image', - async () => { - updateLogContext({ deployUuid: options.deployUuid, serviceName: deploy.deployable?.name }); - const startTime = Date.now(); - getLogger().info('Build: starting (native)'); + return withLogContext({ deployUuid: options.deployUuid, serviceName: deploy.deployable?.name }, async () => { + return withSpan( + 'lifecycle.build.image', + async () => { + const startTime = Date.now(); + getLogger().info('Build: starting (native)'); - try { - await ensureNamespaceExists(options.namespace); + try { + await ensureNamespaceExists(options.namespace); - const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build'); + const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build'); - const buildOptions = { - ...options, - serviceAccount: serviceAccountName, - }; + const buildOptions = { + ...options, + serviceAccount: serviceAccountName, + }; - await deploy.$fetchGraph('[deployable]'); - updateLogContext({ serviceName: deploy.deployable?.name }); - const builderEngine = deploy.deployable?.builder?.engine; + await deploy.$fetchGraph('[deployable]'); + const builderEngine = deploy.deployable?.builder?.engine; - let result: NativeBuildResult; + let result: NativeBuildResult; - if (builderEngine === 'buildkit' || builderEngine === 'kaniko') { - getLogger().debug(`Build: using ${builderEngine} engine`); - result = await buildWithEngine(deploy, buildOptions, builderEngine); - } else { - throw new Error(`Unsupported builder engine: ${builderEngine}`); - } + if (builderEngine === 'buildkit' || builderEngine === 'kaniko') { + getLogger().debug(`Build: using ${builderEngine} engine`); + result = await buildWithEngine(deploy, buildOptions, builderEngine); + } else { + throw new Error(`Unsupported builder engine: ${builderEngine}`); + } - const duration = Date.now() - startTime; - getLogger().info(`Build: completed success=${result.success} duration=${duration}ms`); + const duration = Date.now() - startTime; + getLogger().info(`Build: completed success=${result.success} duration=${duration}ms`); - return result; - } catch (error) { - const duration = Date.now() - startTime; - getLogger().error(`Build: failed error=${error.message} duration=${duration}ms`); + return result; + } catch (error) { + const duration = Date.now() - startTime; + getLogger().error(`Build: failed error=${error.message} duration=${duration}ms`); - return { - success: false, - logs: `Build error: ${error.message}`, - jobName: '', - }; - } - }, - { resource: options.deployUuid } - ); + return { + success: false, + logs: `Build error: ${error.message}`, + jobName: '', + }; + } + }, + { resource: options.deployUuid } + ); + }); } diff --git a/src/server/lib/nativeHelm/helm.ts b/src/server/lib/nativeHelm/helm.ts index c7181c2..a466f21 100644 --- a/src/server/lib/nativeHelm/helm.ts +++ b/src/server/lib/nativeHelm/helm.ts @@ -18,7 +18,7 @@ import yaml from 'js-yaml'; import fs from 'fs'; import Deploy from 'server/models/Deploy'; import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger, withSpan, updateLogContext } from 'server/lib/logger/index'; +import { getLogger, withSpan, withLogContext } from 'server/lib/logger/index'; import { shellPromise } from 'server/lib/shell'; import { randomAlphanumeric } from 'server/lib/random'; import { nanoid } from 'nanoid'; @@ -357,62 +357,63 @@ export async function deployHelm(deploys: Deploy[]): Promise { await Promise.all( deploys.map(async (deploy) => { - return withSpan( - 'lifecycle.helm.deploy', - async () => { - updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }); - const startTime = Date.now(); - const runUUID = deploy.runUUID ?? nanoid(); - const deployService = new DeployService(); - - try { - const useNative = await shouldUseNativeHelm(deploy); - const method = useNative ? 'Native Helm' : 'Codefresh Helm'; - - getLogger().debug(`Using ${method}`); - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOYING, - statusMessage: `Deploying via ${method}`, - }, - runUUID - ); - - if (useNative) { - await deployNativeHelm(deploy); - } else { - await deployCodefreshHelm(deploy, deployService, runUUID); + return withLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }, async () => { + return withSpan( + 'lifecycle.helm.deploy', + async () => { + const startTime = Date.now(); + const runUUID = deploy.runUUID ?? nanoid(); + const deployService = new DeployService(); + + try { + const useNative = await shouldUseNativeHelm(deploy); + const method = useNative ? 'Native Helm' : 'Codefresh Helm'; + + getLogger().debug(`Using ${method}`); + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOYING, + statusMessage: `Deploying via ${method}`, + }, + runUUID + ); + + if (useNative) { + await deployNativeHelm(deploy); + } else { + await deployCodefreshHelm(deploy, deployService, runUUID); + } + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.READY, + statusMessage: `Successfully deployed via ${method}`, + }, + runUUID + ); + + await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime); + } catch (error) { + await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message); + + await deployService.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.DEPLOY_FAILED, + statusMessage: `Helm deployment failed: ${error.message}`, + }, + runUUID + ); + + throw error; } - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.READY, - statusMessage: `Successfully deployed via ${method}`, - }, - runUUID - ); - - await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime); - } catch (error) { - await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message); - - await deployService.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.DEPLOY_FAILED, - statusMessage: `Helm deployment failed: ${error.message}`, - }, - runUUID - ); - - throw error; - } - }, - { resource: deploy.uuid, tags: { 'deploy.uuid': deploy.uuid } } - ); + }, + { resource: deploy.uuid, tags: { 'deploy.uuid': deploy.uuid } } + ); + }); }) ); } diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index 421d83e..d5ab400 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -1145,29 +1145,34 @@ export default class ActivityStream extends BaseService { try { await Promise.all( deploys.map(async (deploy) => { - const deployId = deploy?.id; - const service = deploy?.service; - const deployable = deploy?.deployable; - const isActive = deploy?.active; - const isOrgHelmChart = orgChartName === deployable?.helm?.chart?.name; - const isPublic = isFullYaml ? deployable.public || isOrgHelmChart : service.public; - const serviceType = isFullYaml ? deployable?.type : service?.type; - const isActiveAndPublic = isActive && isPublic; - const isDeploymentType = [DeployTypes.DOCKER, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes( - serviceType + return withLogContext( + { deployUuid: deploy?.uuid, serviceName: deploy?.deployable?.name || deploy?.service?.name }, + async () => { + const deployId = deploy?.id; + const service = deploy?.service; + const deployable = deploy?.deployable; + const isActive = deploy?.active; + const isOrgHelmChart = orgChartName === deployable?.helm?.chart?.name; + const isPublic = isFullYaml ? deployable.public || isOrgHelmChart : service.public; + const serviceType = isFullYaml ? deployable?.type : service?.type; + const isActiveAndPublic = isActive && isPublic; + const isDeploymentType = [DeployTypes.DOCKER, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes( + serviceType + ); + const isDeployment = isActiveAndPublic && isDeploymentType; + if (!isDeployment) { + getLogger().debug(`Skipping deployment ${deploy?.name}`); + return; + } + await this.db.services.GithubService.githubDeploymentQueue + .add( + 'deployment', + { deployId, action: 'create', ...extractContextForQueue() }, + { delay: 10000, jobId: `deploy-${deployId}` } + ) + .catch((error) => getLogger().warn({ error }, `manageDeployments error with deployId=${deployId}`)); + } ); - const isDeployment = isActiveAndPublic && isDeploymentType; - if (!isDeployment) { - getLogger().debug(`Skipping deployment ${deploy?.name}`); - return; - } - await this.db.services.GithubService.githubDeploymentQueue - .add( - 'deployment', - { deployId, action: 'create', ...extractContextForQueue() }, - { delay: 10000, jobId: `deploy-${deployId}` } - ) - .catch((error) => getLogger().warn({ error }, `manageDeployments error with deployId=${deployId}`)); }) ); } catch (error) { diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts index 9b37116..b69ed17 100644 --- a/src/server/services/deploy.ts +++ b/src/server/services/deploy.ts @@ -17,7 +17,7 @@ import BaseService from './_service'; import { Environment, Build, Service, Deploy, Deployable } from 'server/models'; import * as codefresh from 'server/lib/codefresh'; -import { getLogger, updateLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import hash from 'object-hash'; import { DeployStatus, DeployTypes } from 'shared/constants'; import * as cli from 'server/lib/cli'; @@ -370,187 +370,178 @@ export default class DeployService extends BaseService { } async deployAurora(deploy: Deploy): Promise { - updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); - try { - // For now, we're just going to shell out and run the deploy - await deploy.reload(); - await deploy.$fetchGraph('[build, deployable]'); - updateLogContext({ serviceName: deploy.deployable?.name }); + return withLogContext( + { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, + async () => { + try { + await deploy.reload(); + await deploy.$fetchGraph('[build, deployable]'); - if (!deploy.deployable) { - getLogger().error('Missing deployable for Aurora restore'); - return false; - } + if (!deploy.deployable) { + getLogger().error('Missing deployable for Aurora restore'); + return false; + } - /** - * For now, only run the CLI deploy step one time. - * Check for both BUILT and READY status because: - * - deployAurora sets status to BUILT after successful creation - * - DeploymentManager.deployManifests then changes it to READY after Kubernetes manifest deployment - * Both statuses indicate the Aurora database already exists and should not be recreated - */ - if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) { - getLogger().info(`Aurora: already built, skipping`); - return true; - } + if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) { + getLogger().info(`Aurora: already built, skipping`); + return true; + } - // Check if database already exists in AWS before attempting to create - // This handles both: status is BUILT/READY but cname missing, OR first-time deploy - const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); - if (existingDbEndpoint) { - getLogger().info(`Aurora: exists, skipping`); - await deploy.$query().patch({ - cname: existingDbEndpoint, - status: DeployStatus.BUILT, - }); - return true; - } + const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); + if (existingDbEndpoint) { + getLogger().info(`Aurora: exists, skipping`); + await deploy.$query().patch({ + cname: existingDbEndpoint, + status: DeployStatus.BUILT, + }); + return true; + } - const uuid = nanoid(); - await deploy.$query().patch({ - status: DeployStatus.BUILDING, - buildLogs: uuid, - runUUID: nanoid(), - }); - getLogger().info('Aurora: restoring'); - await cli.cliDeploy(deploy); + const uuid = nanoid(); + await deploy.$query().patch({ + status: DeployStatus.BUILDING, + buildLogs: uuid, + runUUID: nanoid(), + }); + getLogger().info('Aurora: restoring'); + await cli.cliDeploy(deploy); - // After creation, find the database endpoint - const dbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); - if (dbEndpoint) { - await deploy.$query().patch({ - cname: dbEndpoint, - }); - } + const dbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); + if (dbEndpoint) { + await deploy.$query().patch({ + cname: dbEndpoint, + }); + } - await deploy.reload(); - if (deploy.buildLogs === uuid) { - await deploy.$query().patch({ - status: DeployStatus.BUILT, - }); + await deploy.reload(); + if (deploy.buildLogs === uuid) { + await deploy.$query().patch({ + status: DeployStatus.BUILT, + }); + } + getLogger().info('Aurora: restored'); + return true; + } catch (e) { + getLogger().error({ error: e }, 'Aurora cluster restore failed'); + await deploy.$query().patch({ + status: DeployStatus.ERROR, + }); + return false; + } } - getLogger().info('Aurora: restored'); - return true; - } catch (e) { - getLogger().error({ error: e }, 'Aurora cluster restore failed'); - await deploy.$query().patch({ - status: DeployStatus.ERROR, - }); - return false; - } + ); } async deployCodefresh(deploy: Deploy): Promise { - updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); - let result: boolean = false; + return withLogContext( + { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, + async () => { + let result: boolean = false; - // We'll use either a tag specified in the UI when creating a manual build - // or the default tag specified on the service - const runUUID = nanoid(); - await deploy.$query().patch({ - runUUID, - }); + const runUUID = nanoid(); + await deploy.$query().patch({ + runUUID, + }); - // For now, we're just going to shell out and run the deploy - await deploy.reload(); - await deploy.$fetchGraph('[service.[repository], deployable.[repository], build]'); - const { build, service, deployable } = deploy; - updateLogContext({ serviceName: deployable?.name || service?.name }); - const { repository } = build.enableFullYaml ? deployable : service; - const repo = repository?.fullName; - const [owner, name] = repo?.split('/') || []; - const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name).catch((error) => { - getLogger().warn({ error, owner, name, branch: deploy.branchName }, 'Failed to retrieve commit SHA from github'); - }); + await deploy.reload(); + await deploy.$fetchGraph('[service.[repository], deployable.[repository], build]'); + const { build, service, deployable } = deploy; + const { repository } = build.enableFullYaml ? deployable : service; + const repo = repository?.fullName; + const [owner, name] = repo?.split('/') || []; + const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name).catch((error) => { + getLogger().warn( + { error, owner, name, branch: deploy.branchName }, + 'Failed to retrieve commit SHA from github' + ); + }); - if (!fullSha) { - getLogger().warn({ owner, name, branch: deploy.branchName }, 'Commit SHA cannot be falsy'); + if (!fullSha) { + getLogger().warn({ owner, name, branch: deploy.branchName }, 'Commit SHA cannot be falsy'); - result = false; - } else { - const shortSha = fullSha.substring(0, 7); - const envSha = hash(merge(deploy.env || {}, build.commentRuntimeEnv)); - const buildSha = `${shortSha}-${envSha}`; - - // If the SHA's are the same, nothing need to do and considered as done. - if (deploy?.sha === buildSha) { - // Make sure we're in a clean state - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch( - (error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); - } - ); - getLogger().info('Codefresh: no changes, marked built'); - result = true; - } else { - let buildLogs: string; - let codefreshBuildId: string; - try { - await deploy.$query().patch({ - buildLogs: null, - buildPipelineId: null, - buildOutput: null, - deployPipelineId: null, - deployOutput: null, - }); - - codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => { - getLogger().error({ error }, 'Failed to receive codefresh build id'); - return null; - }); - getLogger().info('Codefresh: build triggered'); - if (codefreshBuildId != null) { - buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; - - await this.patchAndUpdateActivityFeed( - deploy, - { - buildLogs, - status: DeployStatus.BUILDING, - buildPipelineId: codefreshBuildId, - statusMessage: 'CI build triggered...', - }, - runUUID - ).catch((error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); - }); - getLogger().info(`Codefresh: waiting for build url=${buildLogs}`); - await cli.waitForCodefresh(codefreshBuildId); - const buildOutput = await getLogs(codefreshBuildId); - getLogger().info('Codefresh: build completed'); - await this.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.BUILT, - sha: buildSha, - buildOutput, - statusMessage: 'CI build completed', - }, - runUUID - ).catch((error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); - }); + result = false; + } else { + const shortSha = fullSha.substring(0, 7); + const envSha = hash(merge(deploy.env || {}, build.commentRuntimeEnv)); + const buildSha = `${shortSha}-${envSha}`; + + if (deploy?.sha === buildSha) { + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch( + (error) => { + getLogger().warn({ error }, 'Failed to update activity feed'); + } + ); + getLogger().info('Codefresh: no changes, marked built'); result = true; + } else { + let buildLogs: string; + let codefreshBuildId: string; + try { + await deploy.$query().patch({ + buildLogs: null, + buildPipelineId: null, + buildOutput: null, + deployPipelineId: null, + deployOutput: null, + }); + + codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => { + getLogger().error({ error }, 'Failed to receive codefresh build id'); + return null; + }); + getLogger().info('Codefresh: build triggered'); + if (codefreshBuildId != null) { + buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; + + await this.patchAndUpdateActivityFeed( + deploy, + { + buildLogs, + status: DeployStatus.BUILDING, + buildPipelineId: codefreshBuildId, + statusMessage: 'CI build triggered...', + }, + runUUID + ).catch((error) => { + getLogger().warn({ error }, 'Failed to update activity feed'); + }); + getLogger().info(`Codefresh: waiting for build url=${buildLogs}`); + await cli.waitForCodefresh(codefreshBuildId); + const buildOutput = await getLogs(codefreshBuildId); + getLogger().info('Codefresh: build completed'); + await this.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.BUILT, + sha: buildSha, + buildOutput, + statusMessage: 'CI build completed', + }, + runUUID + ).catch((error) => { + getLogger().warn({ error }, 'Failed to update activity feed'); + }); + result = true; + } + } catch (error) { + getLogger().error({ error, url: buildLogs }, 'Codefresh build failed'); + await this.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.ERROR, + sha: buildSha, + statusMessage: 'CI build failed', + }, + runUUID + ); + result = false; + } } - } catch (error) { - // Error'd while waiting for the pipeline to finish. This is usually due to an actual - // pipeline failure or a pipeline getting terminated. - getLogger().error({ error, url: buildLogs }, 'Codefresh build failed'); - await this.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.ERROR, - sha: buildSha, - statusMessage: 'CI build failed', - }, - runUUID - ); - result = false; } - } - } - return result; + return result; + } + ); } async deployCLI(deploy: Deploy): Promise { @@ -574,227 +565,231 @@ export default class DeployService extends BaseService { * @param deploy the deploy to build an image for */ async buildImage(deploy: Deploy, enableFullYaml: boolean, index: number): Promise { - updateLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }); - try { - // We'll use either a tag specified in the UI when creating a manual build - // or the default tag specified on the service - const runUUID = deploy.runUUID ?? nanoid(); - await deploy.$query().patch({ - runUUID, - }); - - await deploy.$fetchGraph('[service, build.[environment], deployable]'); - const { service, build, deployable } = deploy; - updateLogContext({ serviceName: deployable?.name || service?.name }); - const uuid = build?.uuid; - - if (!enableFullYaml) { - await service.$fetchGraph('repository'); - let config: YamlService.LifecycleConfig; - const isClassicModeOnly = build?.environment?.classicModeOnly ?? false; - if (!isClassicModeOnly) { - config = await YamlService.fetchLifecycleConfigByRepository(service.repository, deploy.branchName); - } - - // Docker types are already built - next - if (service.type === DeployTypes.DOCKER) { - await this.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.BUILT, - dockerImage: `${service.dockerImage}:${deploy.tag}`, - }, - runUUID - ); - return true; - } else if (service.type === DeployTypes.GITHUB) { - if (deploy.branchName === null) { - // This means we're using an external host, rather than building from source. - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID); - } else { - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID); - - await build?.$fetchGraph('pullRequest.[repository]'); - const pullRequest = build?.pullRequest; - const author = pullRequest?.githubLogin; - const enabledFeatures = build?.enabledFeatures || []; - const repository = service?.repository; - const repo = repository?.fullName; - const [owner, name] = repo?.split('/') || []; - const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name); - - let repositoryName: string = service.repository.fullName; - let branchName: string = deploy.branchName; - let dockerfilePath: string = service.dockerfilePath || './Dockerfile'; - let initDockerfilePath: string = service.initDockerfilePath; - - let githubService: YamlService.GithubService; - // TODO This should be updated! - if (config != null && config.version === '0.0.3-alpha-1') { - const yamlService: YamlService.Service = YamlService.getDeployingServicesByName(config, service.name); - if (yamlService != null) { - githubService = yamlService as YamlService.GithubService; - - repositoryName = githubService.github.repository; - branchName = githubService.github.branchName; - dockerfilePath = githubService.github.docker.app.dockerfilePath; - - if (githubService.github.docker.init != null) { - initDockerfilePath = githubService.github.docker.init.dockerfilePath; - } - } - } + return withLogContext( + { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, + async () => { + try { + const runUUID = deploy.runUUID ?? nanoid(); + await deploy.$query().patch({ + runUUID, + }); - // Verify we actually have a SHA from github before proceeding - if (!fullSha) { - // We were unable to retrieve this branch/repo combo - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); - return false; - } + await deploy.$fetchGraph('[service, build.[environment], deployable]'); + const { service, build, deployable } = deploy; + const uuid = build?.uuid; - const shortSha = fullSha.substring(0, 7); - - getLogger().debug({ serviceName: service.name, branchName: deploy.branchName }, 'Building docker image'); - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILDING, sha: fullSha }, runUUID); - /** - * @note { svc: index } ensures the hash for each image is unique per service - */ - const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv, { svc: index }); - const envVarsHash = hash(envVariables); - const buildPipelineName = deployable?.dockerBuildPipelineName; - const tag = generateDeployTag({ sha: shortSha, envVarsHash }); - const initTag = generateDeployTag({ prefix: 'lfc-init', sha: shortSha, envVarsHash }); - let ecrRepo = deployable?.ecr; - - const { lifecycleDefaults, app_setup } = await GlobalConfigService.getInstance().getAllConfigs(); - const { ecrDomain, ecrRegistry: registry } = lifecycleDefaults; - - const serviceName = deploy.build?.enableFullYaml ? deployable?.name : deploy.service?.name; - ecrRepo = constructEcrRepoPath(deployable?.ecr, serviceName, ecrDomain); - - const tagsExist = - (await codefresh.tagExists({ tag, ecrRepo, uuid })) && - (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); - - getLogger().debug({ tagsExist }, 'Tags exist check'); - const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; - if (!ecrDomain || !registry) { - getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); - return false; + if (!enableFullYaml) { + await service.$fetchGraph('repository'); + let config: YamlService.LifecycleConfig; + const isClassicModeOnly = build?.environment?.classicModeOnly ?? false; + if (!isClassicModeOnly) { + config = await YamlService.fetchLifecycleConfigByRepository(service.repository, deploy.branchName); } - if (!tagsExist) { - await deploy.$query().patchAndFetch({ - buildOutput: null, - buildLogs: null, - buildPipelineId: null, - }); - const codefreshBuildId = await codefresh.buildImage({ - ecrRepo, - envVars: envVariables, - dockerfilePath, - gitOrg, - tag, - revision: fullSha, - repo: repositoryName, - branch: branchName, - initDockerfilePath, - cacheFrom: deploy.dockerImage, - afterBuildPipelineId: service.afterBuildPipelineId, - detatchAfterBuildPipeline: service.detatchAfterBuildPipeline, - runtimeName: service.runtimeName, - buildPipelineName, + // Docker types are already built - next + if (service.type === DeployTypes.DOCKER) { + await this.patchAndUpdateActivityFeed( deploy, - uuid, - initTag, - author, - enabledFeatures, - ecrDomain, - deployCluster: lifecycleDefaults.deployCluster, - }); - const buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; - await this.patchAndUpdateActivityFeed(deploy, { buildLogs }, runUUID); - const buildSuccess = await codefresh.waitForImage(codefreshBuildId); - if (buildSuccess) { - await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); - return true; + { + status: DeployStatus.BUILT, + dockerImage: `${service.dockerImage}:${deploy.tag}`, + }, + runUUID + ); + return true; + } else if (service.type === DeployTypes.GITHUB) { + if (deploy.branchName === null) { + // This means we're using an external host, rather than building from source. + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID); } else { - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID); - return false; + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID); + + await build?.$fetchGraph('pullRequest.[repository]'); + const pullRequest = build?.pullRequest; + const author = pullRequest?.githubLogin; + const enabledFeatures = build?.enabledFeatures || []; + const repository = service?.repository; + const repo = repository?.fullName; + const [owner, name] = repo?.split('/') || []; + const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name); + + let repositoryName: string = service.repository.fullName; + let branchName: string = deploy.branchName; + let dockerfilePath: string = service.dockerfilePath || './Dockerfile'; + let initDockerfilePath: string = service.initDockerfilePath; + + let githubService: YamlService.GithubService; + // TODO This should be updated! + if (config != null && config.version === '0.0.3-alpha-1') { + const yamlService: YamlService.Service = YamlService.getDeployingServicesByName(config, service.name); + if (yamlService != null) { + githubService = yamlService as YamlService.GithubService; + + repositoryName = githubService.github.repository; + branchName = githubService.github.branchName; + dockerfilePath = githubService.github.docker.app.dockerfilePath; + + if (githubService.github.docker.init != null) { + initDockerfilePath = githubService.github.docker.init.dockerfilePath; + } + } + } + + // Verify we actually have a SHA from github before proceeding + if (!fullSha) { + // We were unable to retrieve this branch/repo combo + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); + return false; + } + + const shortSha = fullSha.substring(0, 7); + + getLogger().debug( + { serviceName: service.name, branchName: deploy.branchName }, + 'Building docker image' + ); + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILDING, sha: fullSha }, runUUID); + /** + * @note { svc: index } ensures the hash for each image is unique per service + */ + const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv, { svc: index }); + const envVarsHash = hash(envVariables); + const buildPipelineName = deployable?.dockerBuildPipelineName; + const tag = generateDeployTag({ sha: shortSha, envVarsHash }); + const initTag = generateDeployTag({ prefix: 'lfc-init', sha: shortSha, envVarsHash }); + let ecrRepo = deployable?.ecr; + + const { lifecycleDefaults, app_setup } = await GlobalConfigService.getInstance().getAllConfigs(); + const { ecrDomain, ecrRegistry: registry } = lifecycleDefaults; + + const serviceName = deploy.build?.enableFullYaml ? deployable?.name : deploy.service?.name; + ecrRepo = constructEcrRepoPath(deployable?.ecr, serviceName, ecrDomain); + + const tagsExist = + (await codefresh.tagExists({ tag, ecrRepo, uuid })) && + (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); + + getLogger().debug({ tagsExist }, 'Tags exist check'); + const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; + if (!ecrDomain || !registry) { + getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); + return false; + } + if (!tagsExist) { + await deploy.$query().patchAndFetch({ + buildOutput: null, + buildLogs: null, + buildPipelineId: null, + }); + + const codefreshBuildId = await codefresh.buildImage({ + ecrRepo, + envVars: envVariables, + dockerfilePath, + gitOrg, + tag, + revision: fullSha, + repo: repositoryName, + branch: branchName, + initDockerfilePath, + cacheFrom: deploy.dockerImage, + afterBuildPipelineId: service.afterBuildPipelineId, + detatchAfterBuildPipeline: service.detatchAfterBuildPipeline, + runtimeName: service.runtimeName, + buildPipelineName, + deploy, + uuid, + initTag, + author, + enabledFeatures, + ecrDomain, + deployCluster: lifecycleDefaults.deployCluster, + }); + const buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; + await this.patchAndUpdateActivityFeed(deploy, { buildLogs }, runUUID); + const buildSuccess = await codefresh.waitForImage(codefreshBuildId); + if (buildSuccess) { + await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); + return true; + } else { + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID); + return false; + } + } else { + await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); + await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID); + return true; + } } } else { - await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); - await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID); - return true; + getLogger().debug({ type: service.type }, 'Build type not recognized'); + return false; } - } - } else { - getLogger().debug({ type: service.type }, 'Build type not recognized'); - return false; - } - return true; - } else { - switch (deployable.type) { - case DeployTypes.GITHUB: - return this.buildImageForHelmAndGithub(deploy, runUUID); - case DeployTypes.DOCKER: - await this.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.BUILT, - dockerImage: `${deployable.dockerImage}:${deploy.tag}`, - }, - runUUID - ); - getLogger().info('Image: public, marked built'); return true; - case DeployTypes.HELM: { - try { - const chartType = await determineChartType(deploy); - - if (chartType !== ChartType.PUBLIC) { + } else { + switch (deployable.type) { + case DeployTypes.GITHUB: return this.buildImageForHelmAndGithub(deploy, runUUID); - } - - let fullSha = null; - - await deploy.$fetchGraph('deployable.repository'); - if (deploy.deployable?.repository) { + case DeployTypes.DOCKER: + await this.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.BUILT, + dockerImage: `${deployable.dockerImage}:${deploy.tag}`, + }, + runUUID + ); + getLogger().info('Image: public, marked built'); + return true; + case DeployTypes.HELM: { try { - fullSha = await github.getShaForDeploy(deploy); - } catch (shaError) { - getLogger().debug( - { error: shaError }, - 'Could not get SHA for PUBLIC helm chart, continuing without it' + const chartType = await determineChartType(deploy); + + if (chartType !== ChartType.PUBLIC) { + return this.buildImageForHelmAndGithub(deploy, runUUID); + } + + let fullSha = null; + + await deploy.$fetchGraph('deployable.repository'); + if (deploy.deployable?.repository) { + try { + fullSha = await github.getShaForDeploy(deploy); + } catch (shaError) { + getLogger().debug( + { error: shaError }, + 'Could not get SHA for PUBLIC helm chart, continuing without it' + ); + } + } + + await this.patchAndUpdateActivityFeed( + deploy, + { + status: DeployStatus.BUILT, + statusMessage: 'Helm chart does not need to be built', + ...(fullSha && { sha: fullSha }), + }, + runUUID ); + return true; + } catch (error) { + getLogger().warn({ error }, 'Error processing Helm deployment'); + return false; } } - - await this.patchAndUpdateActivityFeed( - deploy, - { - status: DeployStatus.BUILT, - statusMessage: 'Helm chart does not need to be built', - ...(fullSha && { sha: fullSha }), - }, - runUUID - ); - return true; - } catch (error) { - getLogger().warn({ error }, 'Error processing Helm deployment'); - return false; + default: + getLogger().debug({ type: deployable.type }, 'Build type not recognized'); + return false; } } - default: - getLogger().debug({ type: deployable.type }, 'Build type not recognized'); - return false; + } catch (e) { + getLogger().error({ error: e }, 'Uncaught error building docker image'); + return false; } } - } catch (e) { - getLogger().error({ error: e }, 'Uncaught error building docker image'); - return false; - } + ); } public async patchAndUpdateActivityFeed( From c5e41f6031dd32bbd145f325148171375011a241 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 17:53:18 -0800 Subject: [PATCH 05/23] drop [bot] made issue_comment webhooks --- src/pages/api/webhooks/github.ts | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index b5719aa..16af4f2 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -34,7 +34,15 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const isVerified = github.verifyWebhookSignature(req); if (!isVerified) throw new Error('Webhook not verified'); - getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${req.headers['x-github-event']}`); + const event = req.headers['x-github-event'] as string; + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${event}`); + + const isBot = sender?.includes('[bot]') === true; + if (event === 'issue_comment' && isBot) { + getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).debug('Skipped: bot-triggered issue_comment'); + res.status(200).end(); + return; + } if (!['web', 'all'].includes(LIFECYCLE_MODE)) { getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Skipped: wrong LIFECYCLE_MODE'); From 36c1326fe25537d694d30871eb27fda7e2ab9d6f Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 17:58:41 -0800 Subject: [PATCH 06/23] add dd-trace service names for redis and pg --- dd-trace.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dd-trace.js b/dd-trace.js index 9575e4f..7e8d65c 100644 --- a/dd-trace.js +++ b/dd-trace.js @@ -35,3 +35,11 @@ tracer.use('next', { tracer.use('net', false); tracer.use('dns', false); + +tracer.use('ioredis', { + service: 'lifecycle-redis', +}); + +tracer.use('pg', { + service: 'lifecycle-postgres', +}); From 514f713fcb1d2ee78e666a034203da4c51706b86 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 18:04:54 -0800 Subject: [PATCH 07/23] drop dd trace for bot issue_comment activity --- src/pages/api/webhooks/github.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index 16af4f2..df9581e 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -15,6 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; +import tracer from 'dd-trace'; import * as github from 'server/lib/github'; import { LIFECYCLE_MODE } from 'shared/index'; import { stringify } from 'flatted'; @@ -39,6 +40,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const isBot = sender?.includes('[bot]') === true; if (event === 'issue_comment' && isBot) { + tracer.scope().active()?.setTag('manual.drop', true); getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).debug('Skipped: bot-triggered issue_comment'); res.status(200).end(); return; From 5301455d4b577607be80b2819a95bdaa29e00c8c Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 18:23:21 -0800 Subject: [PATCH 08/23] standardize webhook and helm log messages --- src/server/lib/codefresh/utils/index.ts | 2 ++ src/server/lib/nativeHelm/helm.ts | 4 ++-- src/server/services/webhook.ts | 26 +++++++++++-------------- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/src/server/lib/codefresh/utils/index.ts b/src/server/lib/codefresh/utils/index.ts index d00e2ec..71aa572 100644 --- a/src/server/lib/codefresh/utils/index.ts +++ b/src/server/lib/codefresh/utils/index.ts @@ -17,6 +17,7 @@ import { generateYaml } from 'server/lib/codefresh/utils/generateYaml'; import { generateCodefreshCmd } from 'server/lib/codefresh/utils/generateCodefreshCmd'; import { CF, CF_CHECKOUT_STEP, CF_BUILD_STEP, CF_AFTER_BUILD_STEP } from 'server/lib/codefresh/constants'; +import { updateLogContext } from 'server/lib/logger/index'; export const constructBuildArgs = (envVars = {}) => { const envVarsItems = Object.keys(envVars); @@ -88,6 +89,7 @@ export const getCodefreshPipelineIdFromOutput = (output: string) => { for (const line of lines) { const trimmedLine = line.trim(); if (regex.test(trimmedLine)) { + updateLogContext({ pipelineId: trimmedLine }); return trimmedLine; } } diff --git a/src/server/lib/nativeHelm/helm.ts b/src/server/lib/nativeHelm/helm.ts index a466f21..1785e06 100644 --- a/src/server/lib/nativeHelm/helm.ts +++ b/src/server/lib/nativeHelm/helm.ts @@ -248,7 +248,7 @@ export async function shouldUseNativeHelm(deploy: Deploy): Promise { export async function deployNativeHelm(deploy: Deploy): Promise { const { deployable, build } = deploy; - getLogger().info('Helm: deploying (native)'); + getLogger().info('Helm: deploying method=native'); if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) { await applyHttpScaleObjectManifestYaml(deploy, build.namespace); @@ -311,7 +311,7 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService, const deployPipelineId = getCodefreshPipelineIdFromOutput(output); const statusMessage = 'Starting deployment via Helm'; - getLogger().info(`Helm: deploying (Codefresh) pipelineId=${deployPipelineId}`); + getLogger().info(`Helm: deploying method=codefresh`); await deployService.patchAndUpdateActivityFeed( deploy, diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index 7e1d4f9..0ceb0a9 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -117,8 +117,10 @@ export default class WebhookService extends BaseService { } getLogger({ buildUuid: build.uuid }).info(`Triggering webhooks for build status: ${build.status}`); for (const webhook of configFileWebhooks) { - getLogger({ buildUuid: build.uuid }).info(`Running webhook: ${webhook.name}`); - await this.runYamlConfigFileWebhookForBuild(webhook, build); + await withLogContext({ webhookName: webhook.name, webhookType: webhook.type }, async () => { + getLogger().info(`Webhook: running name=${webhook.name}`); + await this.runYamlConfigFileWebhookForBuild(webhook, build); + }); } getLogger({ stage: LogStage.WEBHOOK_COMPLETE, buildUuid: build.uuid }).info( `Webhooks completed: count=${configFileWebhooks.length} status=${build.status}` @@ -147,9 +149,7 @@ export default class WebhookService extends BaseService { switch (webhook.type) { case 'codefresh': { const buildId: string = await this.db.services.Codefresh.triggerYamlConfigWebhookPipeline(webhook, data); - getLogger({ buildUuid: build.uuid }).info( - `Webhook (${webhook.name}) triggered: buildId=${buildId} url=https://g.codefresh.io/build/${buildId}` - ); + getLogger().info(`Webhook: triggered buildId=${buildId} url=https://g.codefresh.io/build/${buildId}`); metadata = { link: `https://g.codefresh.io/build/${buildId}`, }; @@ -177,13 +177,11 @@ export default class WebhookService extends BaseService { metadata: { status: 'starting' }, status: 'executing', }); - getLogger({ buildUuid: build.uuid }).info(`Docker webhook (${webhook.name}) invoked`); + getLogger().info(`Webhook: invoking`); // Execute webhook (this waits for completion) const result = await executeDockerWebhook(webhook, build, data); - getLogger({ buildUuid: build.uuid }).info( - `Docker webhook (${webhook.name}) executed: jobName=${result.jobName}` - ); + getLogger().info(`Webhook: executed jobName=${result.jobName}`); // Update the invocation record with final status await invocation.$query().patch({ @@ -209,13 +207,11 @@ export default class WebhookService extends BaseService { metadata: { status: 'starting' }, status: 'executing', }); - getLogger({ buildUuid: build.uuid }).info(`Command webhook (${webhook.name}) invoked`); + getLogger().info(`Webhook: invoking`); // Execute webhook (this waits for completion) const result = await executeCommandWebhook(webhook, build, data); - getLogger({ buildUuid: build.uuid }).info( - `Command webhook (${webhook.name}) executed: jobName=${result.jobName}` - ); + getLogger().info(`Webhook: executed jobName=${result.jobName}`); // Update the invocation record with final status await invocation.$query().patch({ @@ -233,9 +229,9 @@ export default class WebhookService extends BaseService { throw new Error(`Unsupported webhook type: ${webhook.type}`); } - getLogger({ buildUuid: build.uuid }).debug(`Webhook history added for runUUID: ${build.runUUID}`); + getLogger().debug(`Webhook: history added runUUID=${build.runUUID}`); } catch (error) { - getLogger({ buildUuid: build.uuid }).error({ error }, 'Error invoking webhook'); + getLogger({ error }).error('Webhook: invocation failed'); // Still create a failed invocation record await this.db.models.WebhookInvocations.create({ From 41d13160db3f660860624802a72d7ee577284e16 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 18:27:33 -0800 Subject: [PATCH 09/23] standardize more log messages --- src/server/lib/cli.ts | 4 +-- src/server/lib/codefresh/index.ts | 2 +- src/server/lib/kubernetes.ts | 36 +++++++++++++-------------- src/server/lib/queueManager.ts | 2 +- src/server/lib/redisClient.ts | 2 +- src/server/lib/webhook/index.ts | 4 +-- src/server/services/activityStream.ts | 10 +++----- src/server/services/build.ts | 24 +++++++++--------- src/server/services/deploy.ts | 33 ++++++++++++------------ src/server/services/github.ts | 14 ++++++----- src/server/services/globalConfig.ts | 4 +-- 11 files changed, 67 insertions(+), 68 deletions(-) diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index dcd9e9b..929c3a4 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -191,13 +191,13 @@ export async function deleteBuild(build: Build) { { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name }, async () => { const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type; - getLogger().info('Deleting CLI deploy'); + getLogger().info('CLI: deleting'); return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy); } ); }) ); - getLogger({ buildUuid }).info('Deleted CLI resources'); + getLogger({ buildUuid }).info('CLI: deleted'); } catch (e) { getLogger({ buildUuid, error: e }).error('Error deleting CLI resources'); } diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts index 4407437..9954d0f 100644 --- a/src/server/lib/codefresh/index.ts +++ b/src/server/lib/codefresh/index.ts @@ -30,7 +30,7 @@ export const tagExists = async ({ tag, ecrRepo = 'lifecycle-deployments', uuid = try { const command = `aws ecr describe-images --repository-name=${repoName} --image-ids=imageTag=${tag} --no-paginate --no-cli-auto-prompt --registry-id ${registryId}`; await shellPromise(command); - getLogger().info(`ECR: tag=${tag} exists in ${repoName}`); + getLogger().info(`ECR: exists tag=${tag} repo=${repoName}`); return true; } catch (error) { getLogger().debug(`ECR: tag=${tag} not found in ${repoName}`); diff --git a/src/server/lib/kubernetes.ts b/src/server/lib/kubernetes.ts index 527062e..77d49d3 100644 --- a/src/server/lib/kubernetes.ts +++ b/src/server/lib/kubernetes.ts @@ -199,7 +199,7 @@ export async function createOrUpdateNamespace({ buildUUID, }); - getLogger({ namespace: name }).info(`Creating/updating namespace ${logMessage}`); + getLogger({ namespace: name }).info(`Deploy: creating namespace ${logMessage}`); const namespace = { apiVersion: 'v1', @@ -221,10 +221,10 @@ export async function createOrUpdateNamespace({ await client.patchNamespace(name, patch, undefined, undefined, undefined, undefined, undefined, { headers: { 'Content-Type': 'application/json-patch+json' }, }); - getLogger({ namespace: name }).info(`Updated namespace ${patchMessage}`); + getLogger({ namespace: name }).info(`Deploy: updated namespace ${patchMessage}`); return; } else { - getLogger({ namespace: name }).info('Namespace: static, skipping update'); + getLogger({ namespace: name }).info('Deploy: skipped namespace update reason=static'); return; } } @@ -338,11 +338,11 @@ export async function createOrUpdateServiceAccount({ namespace, role }: { namesp */ export async function applyManifests(build: Build): Promise { if (!build.manifest || build.manifest.trim().length === 0) { - getLogger().info('Deploying via DeploymentManager'); + getLogger().info('Deploy: starting method=deploymentManager'); return []; } - getLogger().info('Deploying via legacy manifest'); + getLogger().info('Deploy: starting method=legacyManifest'); const kc = new k8s.KubeConfig(); kc.loadFromDefault(); @@ -509,13 +509,13 @@ export async function waitForPodReady(build: Build) { const logCtx = { namespace, repo: fullName, branch: branchName, sha }; let retries = 0; - getLogger(logCtx).info('Pods: waiting for creation'); + getLogger(logCtx).info('Deploy: waiting for pods state=creation'); // eslint-disable-next-line no-constant-condition while (true) { const pods = await getPods({ uuid, namespace }); if (pods.length > 0) { - getLogger(logCtx).info('Pods: created'); + getLogger(logCtx).info('Deploy: pods created'); break; } else if (retries < 60) { retries += 1; @@ -528,7 +528,7 @@ export async function waitForPodReady(build: Build) { retries = 0; - getLogger(logCtx).info('Pods: waiting for ready state'); + getLogger(logCtx).info('Deploy: waiting for pods state=ready'); // eslint-disable-next-line no-constant-condition while (true) { let isReady = false; @@ -549,7 +549,7 @@ export async function waitForPodReady(build: Build) { } if (isReady) { - getLogger(logCtx).info('Pods: ready'); + getLogger(logCtx).info('Deploy: pods ready'); return true; } if (retries < 180) { @@ -572,7 +572,7 @@ export async function deleteBuild(build: Build) { await shellPromise( `kubectl delete all,pvc,mapping,Httpscaledobjects -l lc_uuid=${build.uuid} --namespace ${build.namespace}` ); - getLogger({ namespace: build.namespace }).info('Kubernetes: resources deleted'); + getLogger({ namespace: build.namespace }).info('Deploy: resources deleted'); } catch (e) { getLogger({ namespace: build.namespace, @@ -590,10 +590,10 @@ export async function deleteNamespace(name: string) { try { await shellPromise(`kubectl delete ns ${name} --grace-period 120`); - getLogger({ namespace: name }).info('Namespace: deleted'); + getLogger({ namespace: name }).info('Deploy: namespace deleted'); } catch (e) { if (e.includes('Error from server (NotFound): namespaces')) { - getLogger({ namespace: name }).info('Namespace: not found, skipping'); + getLogger({ namespace: name }).info('Deploy: namespace skipped reason=notFound'); } else { getLogger({ namespace: name, error: e }).error('Error deleting namespace'); } @@ -649,7 +649,7 @@ export function generateManifest({ const manifest = `${disks}---\n${builds}---\n${nodePorts}---\n${grpcMappings}---\n${loadBalancers}---\n${externalNameServices}`; const isDev = APP_ENV?.includes('dev') ?? false; if (!isDev) { - getLogger({ manifest }).info('Generated kubernetes manifest'); + getLogger({ manifest }).info('Manifest: generated'); } return manifest; } @@ -1191,7 +1191,7 @@ export function generateDeployManifests( 'tags.datadoghq.com/version': buildUUID, }; - if (build.isStatic) getLogger().info('Building static environment'); + if (build.isStatic) getLogger().info('Build: static environment=true'); const yamlManifest = yaml.dump( { @@ -1639,7 +1639,7 @@ export async function patchIngress(ingressName: string, bannerSnippet: any, name `kubectl patch ingress ${ingressName} --namespace ${namespace} --type merge --patch-file ${localPath}` ); - getLogger({ ingressName, namespace }).info('Successfully patched ingress'); + getLogger({ ingressName, namespace }).info('Deploy: ingress patched'); } catch (error) { getLogger({ ingressName, namespace, error }).warn('Unable to patch ingress, banner might not work'); throw error; @@ -1725,7 +1725,7 @@ export function generateDeployManifest({ }, }); } else { - getLogger().info('No manifest generated for deploy'); + getLogger().info('Manifest: skipped reason=empty'); return ''; } } @@ -2160,7 +2160,7 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { const logCtx = { deployUuid: uuid, service: deployableName, namespace }; let retries = 0; - getLogger(logCtx).info('Waiting for pods'); + getLogger(logCtx).info('Deploy: waiting for pods'); while (retries < 60) { const k8sApi = getK8sApi(); @@ -2215,7 +2215,7 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { }); if (allReady) { - getLogger({ ...logCtx, podCount: pods.length }).info('Pods ready'); + getLogger({ ...logCtx, podCount: pods.length }).info('Deploy: pods ready'); return true; } diff --git a/src/server/lib/queueManager.ts b/src/server/lib/queueManager.ts index 62e6c98..b4f8faf 100644 --- a/src/server/lib/queueManager.ts +++ b/src/server/lib/queueManager.ts @@ -122,6 +122,6 @@ export default class QueueManager { } } } - getLogger().info('All queues closed successfully'); + getLogger().info('Queue: closed'); } } diff --git a/src/server/lib/redisClient.ts b/src/server/lib/redisClient.ts index a0e71ff..727c9a8 100644 --- a/src/server/lib/redisClient.ts +++ b/src/server/lib/redisClient.ts @@ -93,7 +93,7 @@ export class RedisClient { public async close(): Promise { try { await Promise.all([this.redis.quit(), this.subscriber.quit(), this.bullConn.quit()]); - getLogger().info('All Redis connections closed successfully'); + getLogger().info('Redis: closed'); } catch (error) { getLogger().warn({ error }, 'Error closing Redis connections, forcing disconnect'); this.redis.disconnect(); diff --git a/src/server/lib/webhook/index.ts b/src/server/lib/webhook/index.ts index 7ce4a62..c0e5483 100644 --- a/src/server/lib/webhook/index.ts +++ b/src/server/lib/webhook/index.ts @@ -99,7 +99,7 @@ export async function executeCommandWebhook( async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Promise { const executionId = nanoid(); getLogger().info( - `Starting ${jobConfig.webhookType} webhook: webhookName=${jobConfig.webhookName} executionId=${executionId}` + `Webhook: starting type=${jobConfig.webhookType} name=${jobConfig.webhookName} executionId=${executionId}` ); try { @@ -115,7 +115,7 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro const jobResult = await waitForJobAndGetLogs(job.metadata.name, jobConfig.namespace, `[WEBHOOK ${build.uuid}]`); getLogger().info( - `Webhook execution completed: webhookName=${jobConfig.webhookName} success=${jobResult.success} status=${jobResult.status}` + `Webhook: completed name=${jobConfig.webhookName} success=${jobResult.success} status=${jobResult.status}` ); return { diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index d5ab400..14afc8e 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -140,7 +140,7 @@ export default class ActivityStream extends BaseService { try { if (isRedeployRequested) { - getLogger().info('Redeploy triggered from comment edit'); + getLogger().info('Deploy: redeploy reason=commentEdit'); await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId, runUUID: runUuid, @@ -420,7 +420,7 @@ export default class ActivityStream extends BaseService { if (targetGithubRepositoryId) { getLogger().info( - `Repo-filtered GitHub deployments: processing ${deploysForGithubDeployment.length}/${deploys.length} deploys` + `Deploy: filtered deployCount=${deploysForGithubDeployment.length} totalCount=${deploys.length} targetRepoId=${targetGithubRepositoryId}` ); } @@ -662,9 +662,7 @@ export default class ActivityStream extends BaseService { const isDeployedWithActiveErrors = isDeployed && hasErroringActiveDeploys; if (isDeployedWithActiveErrors) { const deployStatuses = deploys.map(({ branchName, uuid, status }) => ({ branchName, uuid, status })); - getLogger().info( - `Deployed build has erroring deploys: ${JSON.stringify(deployStatuses)} buildStatus=${buildStatus}` - ); + getLogger().info(`Build: deployedWithErrors status=${buildStatus} deploys=${JSON.stringify(deployStatuses)}`); metrics .increment('deployWithErrors') .event('Deploy Finished with Erroring Deploys', `${eventDetails.description} with erroring deploys`); @@ -1199,7 +1197,7 @@ export default class ActivityStream extends BaseService { if (fastlyServiceId) { await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly'); } - getLogger().info(`Fastly purgeFastlyServiceCache success fastlyServiceId=${fastlyServiceId}`); + getLogger().info(`Fastly: purged serviceId=${fastlyServiceId}`); } catch (error) { getLogger().error({ error }, 'Fastly purgeFastlyServiceCache error'); } diff --git a/src/server/services/build.ts b/src/server/services/build.ts index f62b3f1..db16bac 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -83,7 +83,7 @@ export default class BuildService extends BaseService { if (!buildId) { getLogger().error('No build ID found for cleanup'); } - getLogger().info('Queuing build for deletion'); + getLogger().info('Build: queuing action=delete'); await this.db.services.BuildService.deleteQueue.add('delete', { buildId, ...extractContextForQueue() }); } } @@ -619,7 +619,7 @@ export default class BuildService extends BaseService { githubDeployments, namespace: `env-${uuid}`, })); - getLogger().info(`Created build for pull request: branch=${options.repositoryBranchName}`); + getLogger().info(`Build: created branch=${options.repositoryBranchName}`); return build; } @@ -716,7 +716,7 @@ export default class BuildService extends BaseService { buildId: build.id, ...extractContextForQueue(), }); - getLogger().info('Deleted build'); + getLogger().info('Build: deleted'); await this.updateStatusAndComment(build, BuildStatus.TORN_DOWN, build.runUUID, true, true).catch((error) => { getLogger().warn({ error }, `Failed to update status to ${BuildStatus.TORN_DOWN}`); }); @@ -812,7 +812,7 @@ export default class BuildService extends BaseService { await deploy.$query().patch({ status: DeployStatus.BUILT }); } const configUUIDs = configDeploys.map((deploy) => deploy?.uuid).join(','); - getLogger().info(`Config deploys marked built: ${configUUIDs}`); + getLogger().info(`Build: config deploys marked built uuids=${configUUIDs}`); } catch (error) { getLogger().error({ error }, 'Failed to update configuration type deploy as built'); } @@ -868,7 +868,7 @@ export default class BuildService extends BaseService { return false; }); - if (!result) getLogger().info(`CLI deploy unsuccessful: deployUuid=${deploy.uuid}`); + if (!result) getLogger().info(`CLI: deploy failed deployUuid=${deploy.uuid}`); return result; }) ) @@ -954,7 +954,7 @@ export default class BuildService extends BaseService { } const result = await this.db.services.Deploy.buildImage(deploy, build.enableFullYaml, index); getLogger().debug(`buildImage completed: deployUuid=${deploy.uuid} result=${result}`); - if (!result) getLogger().info(`Build image unsuccessful: deployUuid=${deploy.uuid}`); + if (!result) getLogger().info(`Build: image failed deployUuid=${deploy.uuid}`); return result; }) ); @@ -1217,9 +1217,9 @@ export default class BuildService extends BaseService { updateLogContext({ buildUuid: build.uuid }); } - getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Deleting build'); + getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Build: deleting'); await this.db.services.BuildService.deleteBuild(build); - getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Build deleted'); + getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Build: deleted'); } catch (error) { getLogger({ stage: LogStage.CLEANUP_FAILED }).error( { error }, @@ -1247,7 +1247,7 @@ export default class BuildService extends BaseService { updateLogContext({ buildUuid: build.uuid }); } - getLogger({ stage: LogStage.BUILD_STARTING }).info('Build started'); + getLogger({ stage: LogStage.BUILD_STARTING }).info('Build: started'); await build?.$fetchGraph('[pullRequest, environment]'); await build.pullRequest.$fetchGraph('[repository]'); @@ -1262,7 +1262,7 @@ export default class BuildService extends BaseService { githubRepositoryId ); - getLogger({ stage: LogStage.BUILD_COMPLETE }).info('Build completed'); + getLogger({ stage: LogStage.BUILD_COMPLETE }).info('Build: completed'); } catch (error) { if (error instanceof ParsingError || error instanceof ValidationError) { this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error); @@ -1302,10 +1302,10 @@ export default class BuildService extends BaseService { updateLogContext({ buildUuid: build.uuid }); } - getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build queued'); + getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: queued'); if (!build.pullRequest.deployOnUpdate) { - getLogger().info('Skipping: deployOnUpdate disabled'); + getLogger().info('Deploy: skipping reason=deployOnUpdateDisabled'); return; } // Enqueue a standard resolve build diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts index b69ed17..e51daee 100644 --- a/src/server/services/deploy.ts +++ b/src/server/services/deploy.ts @@ -152,7 +152,7 @@ export default class DeployService extends BaseService { ).catch((error) => { getLogger().error({ error }, 'Failed to create deploys from deployables'); }); - getLogger().info('Deploys initialized'); + getLogger().info('Deploy: initialized'); } else { const serviceInitFunc = async (service: Service, active: boolean): Promise => { const newDeploys: Deploy[] = []; @@ -180,7 +180,7 @@ export default class DeployService extends BaseService { ); }) ); - getLogger().info(`Deploys created: count=${newDeploys.length}`); + getLogger().info(`Deploy: created count=${newDeploys.length}`); return newDeploys; }; @@ -383,13 +383,13 @@ export default class DeployService extends BaseService { } if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) { - getLogger().info(`Aurora: already built, skipping`); + getLogger().info('Aurora: skipped reason=alreadyBuilt'); return true; } const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name); if (existingDbEndpoint) { - getLogger().info(`Aurora: exists, skipping`); + getLogger().info('Aurora: skipped reason=exists'); await deploy.$query().patch({ cname: existingDbEndpoint, status: DeployStatus.BUILT, @@ -471,7 +471,7 @@ export default class DeployService extends BaseService { getLogger().warn({ error }, 'Failed to update activity feed'); } ); - getLogger().info('Codefresh: no changes, marked built'); + getLogger().info('Codefresh: skipped reason=noChanges status=built'); result = true; } else { let buildLogs: string; @@ -489,7 +489,7 @@ export default class DeployService extends BaseService { getLogger().error({ error }, 'Failed to receive codefresh build id'); return null; }); - getLogger().info('Codefresh: build triggered'); + getLogger().info('Codefresh: triggered'); if (codefreshBuildId != null) { buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`; @@ -505,10 +505,10 @@ export default class DeployService extends BaseService { ).catch((error) => { getLogger().warn({ error }, 'Failed to update activity feed'); }); - getLogger().info(`Codefresh: waiting for build url=${buildLogs}`); + getLogger().info(`Codefresh: waiting url=${buildLogs}`); await cli.waitForCodefresh(codefreshBuildId); const buildOutput = await getLogs(codefreshBuildId); - getLogger().info('Codefresh: build completed'); + getLogger().info('Codefresh: completed'); await this.patchAndUpdateActivityFeed( deploy, { @@ -740,7 +740,7 @@ export default class DeployService extends BaseService { }, runUUID ); - getLogger().info('Image: public, marked built'); + getLogger().info('Image: skipped reason=public status=built'); return true; case DeployTypes.HELM: { try { @@ -887,7 +887,7 @@ export default class DeployService extends BaseService { if (deploy.branchName === null) { // This means we're using an external host, rather than building from source. await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID); - getLogger().info('Deploy is marked ready for external Host'); + getLogger().info('Deploy: ready reason=externalHost'); } else { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID); @@ -987,7 +987,7 @@ export default class DeployService extends BaseService { }; if (['buildkit', 'kaniko'].includes(deployable.builder?.engine)) { - getLogger().info(`Image: building (${deployable.builder.engine})`); + getLogger().info(`Image: building engine=${deployable.builder.engine}`); const nativeOptions = { ...buildOptions, @@ -1023,7 +1023,7 @@ export default class DeployService extends BaseService { } } - getLogger().info('Image: building (Codefresh)'); + getLogger().info('Image: building engine=codefresh'); const buildPipelineId = await codefresh.buildImage(buildOptions); const buildLogs = `https://g.codefresh.io/build/${buildPipelineId}`; @@ -1043,7 +1043,7 @@ export default class DeployService extends BaseService { return false; } } else { - getLogger().info('Image: exists, skipping build'); + getLogger().info('Image: skipped reason=exists'); await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain }); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID); return true; @@ -1065,7 +1065,7 @@ export default class DeployService extends BaseService { const dependentDeploy = deploys.find((d) => d.uuid === waitingForService); if (dependentDeploy.uuid === waitingForService) { - getLogger().info({ waitingFor: waitingForService }, 'Waiting for service to complete'); + getLogger().info(`Build: waiting service=${waitingForService}`); await this.patchAndUpdateActivityFeed( deploy, @@ -1105,7 +1105,7 @@ export default class DeployService extends BaseService { // about the output of that build, we can just pass an empty string as the pattern if (!item.pattern || item.pattern.trim() === '') { extractedValues[item.envKey] = ''; - getLogger().info({ envKey: item.envKey }, 'Empty pattern, assuming build dependency'); + getLogger().info(`Build: dependency envKey=${item.envKey} pattern=empty`); return; } @@ -1120,8 +1120,7 @@ export default class DeployService extends BaseService { ); } else { getLogger().info( - { pattern: item.pattern, serviceName, pipelineId, envKey: item.envKey }, - 'No match found for pattern, value will be empty' + `Build: noMatch pattern=${item.pattern} service=${serviceName} pipelineId=${pipelineId} envKey=${item.envKey}` ); } }); diff --git a/src/server/services/github.ts b/src/server/services/github.ts index 56dd5fc..6bf8572 100644 --- a/src/server/services/github.ts +++ b/src/server/services/github.ts @@ -56,7 +56,7 @@ export default class GithubService extends Service { labels, }, }: PullRequestEvent) { - getLogger({}).info(`PR: ${action} ${fullName}/${branch}`); + getLogger({}).info(`PR: ${action} repo=${fullName} branch=${branch}`); const isOpened = [GithubPullRequestActions.OPENED, GithubPullRequestActions.REOPENED].includes( action as GithubPullRequestActions ); @@ -181,7 +181,7 @@ export default class GithubService extends Service { if (!pullRequest || isBot) return; await pullRequest.$fetchGraph('[build, repository]'); - getLogger().info(`PR: comment edited by ${commentCreatorUsername}`); + getLogger().info(`PR: edited by=${commentCreatorUsername}`); await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body); } catch (error) { getLogger().error({ error }, `Unable to handle Github Issue Comment event`); @@ -219,7 +219,7 @@ export default class GithubService extends Service { status, autoDeploy: false, }); - getLogger().info(`Labels: ${action}${labels.length ? ` [${labels.map(({ name }) => name).join(', ')}]` : ''}`); + getLogger().info(`Label: ${action} labels=[${labels.map(({ name }) => name).join(',')}]`); if (pullRequest.deployOnUpdate === false) { // when pullRequest.deployOnUpdate is false, it means that there is no `lifecycle-deploy!` label @@ -305,12 +305,14 @@ export default class GithubService extends Service { hasFailedDeploys = failedDeploys.length > 0; if (hasFailedDeploys) { - getLogger().info(`Push: ${failedDeploys.length} failed deploys, full redeploy ${repoName}/${branchName}`); + getLogger().info( + `Push: redeploying reason=failedDeploys count=${failedDeploys.length} repo=${repoName} branch=${branchName}` + ); } } if (!hasFailedDeploys) { - getLogger().info(`Push: deploying ${repoName}/${branchName}`); + getLogger().info(`Push: deploying repo=${repoName} branch=${branchName}`); } await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { @@ -357,7 +359,7 @@ export default class GithubService extends Service { if (!build) return; - getLogger().info('Push: redeploying static env'); + getLogger().info(`Push: redeploying reason=staticEnv`); await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId: build?.id, ...extractContextForQueue(), diff --git a/src/server/services/globalConfig.ts b/src/server/services/globalConfig.ts index 87c4c97..78c71a2 100644 --- a/src/server/services/globalConfig.ts +++ b/src/server/services/globalConfig.ts @@ -216,7 +216,7 @@ export default class GlobalConfigService extends BaseService { return withLogContext({ correlationId: correlationId || `cache-refresh-${Date.now()}` }, async () => { try { - getLogger({ stage: LogStage.CONFIG_REFRESH }).info('Refreshing GlobalConfig and Github cache'); + getLogger({ stage: LogStage.CONFIG_REFRESH }).info('Config: refreshing type=global_config,github_token'); await this.getAllConfigs(true); await this.getGithubClientToken(true); getLogger({ stage: LogStage.CONFIG_REFRESH }).debug('GlobalConfig and Github cache refreshed successfully'); @@ -236,7 +236,7 @@ export default class GlobalConfigService extends BaseService { async setConfig(key: string, value: any): Promise { try { await this.db.knex('global_config').insert({ key, config: value }).onConflict('key').merge(); - getLogger().info(`Set global config value: key=${key}`); + getLogger().info(`Config: set key=${key}`); } catch (err: any) { getLogger().error({ error: err }, `Error setting global config value: key=${key}`); throw err; From 6b35d489d19f231a618eb3ed355ba8858e71ad84 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 18:45:51 -0800 Subject: [PATCH 10/23] centralize buildUuid in log context --- src/pages/api/v1/admin/ttl/cleanup.ts | 2 +- src/pages/api/v1/ai/chat.ts | 296 +++++++++--------- src/pages/api/v1/builds/[uuid]/deploy.ts | 13 +- src/pages/api/v1/builds/[uuid]/graph.ts | 47 +-- src/pages/api/v1/builds/[uuid]/index.ts | 46 +-- .../v1/builds/[uuid]/jobs/[jobName]/events.ts | 61 ++-- .../v1/builds/[uuid]/jobs/[jobName]/logs.ts | 15 +- .../v1/builds/[uuid]/services/[name]/build.ts | 10 +- .../[uuid]/services/[name]/buildLogs.ts | 55 ++-- .../services/[name]/buildLogs/[jobName].ts | 13 +- .../services/[name]/deployLogs/[jobName].ts | 15 +- .../[uuid]/services/[name]/deployment.ts | 74 ++--- .../[uuid]/services/[name]/logs/[jobName].ts | 71 ++--- src/pages/api/v1/builds/[uuid]/torndown.ts | 75 ++--- src/pages/api/v1/builds/[uuid]/webhooks.ts | 41 ++- src/server/lib/cli.ts | 21 +- src/server/services/codefresh.ts | 5 +- src/server/services/deployable.ts | 2 +- src/server/services/label.ts | 17 +- src/server/services/override.ts | 13 +- src/server/services/ttlCleanup.ts | 97 +++--- src/server/services/webhook.ts | 28 +- 22 files changed, 509 insertions(+), 508 deletions(-) diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts index 55d025d..50449cc 100644 --- a/src/pages/api/v1/admin/ttl/cleanup.ts +++ b/src/pages/api/v1/admin/ttl/cleanup.ts @@ -197,7 +197,7 @@ async function triggerTTLCleanup(req: NextApiRequest, res: NextApiResponse) { const job = await ttlCleanupService.ttlCleanupQueue.add('manual-ttl-cleanup', { dryRun, correlationId }); getLogger({ stage: LogStage.CLEANUP_STARTING }).info( - `TTL cleanup job triggered manually: jobId=${job.id} dryRun=${dryRun}` + `TTL: cleanup job triggered manually jobId=${job.id} dryRun=${dryRun}` ); return res.status(200).json({ diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts index 39b9800..b1cfdac 100644 --- a/src/pages/api/v1/ai/chat.ts +++ b/src/pages/api/v1/ai/chat.ts @@ -20,7 +20,7 @@ import AIAgentContextService from 'server/services/ai/context/gatherer'; import AIAgentConversationService from 'server/services/ai/conversation/storage'; import AIAgentService from 'server/services/aiAgent'; import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'POST') { @@ -54,177 +54,171 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) }); } - const aiAgentContextService = new AIAgentContextService(defaultDb, defaultRedis); - const conversationService = new AIAgentConversationService(defaultDb, defaultRedis); - const llmService = new AIAgentService(defaultDb, defaultRedis); + return withLogContext({ buildUuid }, async () => { + const aiAgentContextService = new AIAgentContextService(defaultDb, defaultRedis); + const conversationService = new AIAgentConversationService(defaultDb, defaultRedis); + const llmService = new AIAgentService(defaultDb, defaultRedis); - try { - if (provider && modelId) { - await llmService.initializeWithMode('investigate', provider, modelId); - } else { - await llmService.initialize(); + try { + if (provider && modelId) { + await llmService.initializeWithMode('investigate', provider, modelId); + } else { + await llmService.initialize(); + } + } catch (error) { + getLogger().error({ error }, 'Failed to initialize LLM service'); + res.write( + `data: ${JSON.stringify({ + error: error.message, + code: 'LLM_INIT_ERROR', + })}\n\n` + ); + return res.end(); } - } catch (error) { - getLogger({ buildUuid }).error({ error }, 'Failed to initialize LLM service'); - res.write( - `data: ${JSON.stringify({ - error: error.message, - code: 'LLM_INIT_ERROR', - })}\n\n` - ); - return res.end(); - } - if (clearHistory) { - await conversationService.clearConversation(buildUuid); - } + if (clearHistory) { + await conversationService.clearConversation(buildUuid); + } - const conversation = await conversationService.getConversation(buildUuid); - const conversationHistory = conversation?.messages || []; + const conversation = await conversationService.getConversation(buildUuid); + const conversationHistory = conversation?.messages || []; - let context; - try { - context = await aiAgentContextService.gatherFullContext(buildUuid); - } catch (error) { - getLogger({ buildUuid }).error({ error }, 'Failed to gather context'); - res.write( - `data: ${JSON.stringify({ - error: `Build not found: ${error.message}`, - code: 'CONTEXT_ERROR', - })}\n\n` - ); - return res.end(); - } + let context; + try { + context = await aiAgentContextService.gatherFullContext(buildUuid); + } catch (error) { + getLogger().error({ error }, 'Failed to gather context'); + res.write( + `data: ${JSON.stringify({ + error: `Build not found: ${error.message}`, + code: 'CONTEXT_ERROR', + })}\n\n` + ); + return res.end(); + } - let aiResponse = ''; - let isJsonResponse = false; - let totalInvestigationTimeMs = 0; - try { - const mode = await llmService.classifyUserIntent(message, conversationHistory); - getLogger({ buildUuid }).info(`Classified user intent as: ${mode}`); - - const result = await llmService.processQueryStream( - message, - context, - conversationHistory, - (chunk) => { - res.write(`data: ${JSON.stringify({ type: 'chunk', content: chunk })}\n\n`); - }, - (activity) => { - res.write(`data: ${JSON.stringify(activity)}\n\n`); - if (typeof (res as any).flush === 'function') { - (res as any).flush(); - } - }, - undefined, - mode - ); + let aiResponse = ''; + let isJsonResponse = false; + let totalInvestigationTimeMs = 0; + try { + const mode = await llmService.classifyUserIntent(message, conversationHistory); + getLogger().info(`AI: classified user intent mode=${mode}`); + + const result = await llmService.processQueryStream( + message, + context, + conversationHistory, + (chunk) => { + res.write(`data: ${JSON.stringify({ type: 'chunk', content: chunk })}\n\n`); + }, + (activity) => { + res.write(`data: ${JSON.stringify(activity)}\n\n`); + if (typeof (res as any).flush === 'function') { + (res as any).flush(); + } + }, + undefined, + mode + ); - aiResponse = result.response; - isJsonResponse = result.isJson; - totalInvestigationTimeMs = result.totalInvestigationTimeMs; - - // If this is a JSON investigation response, send it as a special complete_json event - // This ensures frontend receives the cleaned JSON instead of trying to parse accumulated chunks - if (isJsonResponse) { - // Inject repository info into the JSON response for GitHub links - try { - const parsed = JSON.parse(aiResponse); - if (parsed.type === 'investigation_complete' && context.lifecycleContext?.pullRequest) { - const fullName = context.lifecycleContext.pullRequest.fullName; - const branch = context.lifecycleContext.pullRequest.branch; - if (fullName && branch) { - const [owner, name] = fullName.split('/'); - parsed.repository = { owner, name, branch }; - - // Ensure all string fields are properly escaped for JSON serialization - const sanitizeForJson = (obj: any): any => { - if (typeof obj === 'string') { - // Already properly escaped by backend - no need to re-escape - return obj; - } else if (Array.isArray(obj)) { - return obj.map((item) => sanitizeForJson(item)); - } else if (obj && typeof obj === 'object') { - const sanitized: any = {}; - for (const key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - sanitized[key] = sanitizeForJson(obj[key]); + aiResponse = result.response; + isJsonResponse = result.isJson; + totalInvestigationTimeMs = result.totalInvestigationTimeMs; + + if (isJsonResponse) { + try { + const parsed = JSON.parse(aiResponse); + if (parsed.type === 'investigation_complete' && context.lifecycleContext?.pullRequest) { + const fullName = context.lifecycleContext.pullRequest.fullName; + const branch = context.lifecycleContext.pullRequest.branch; + if (fullName && branch) { + const [owner, name] = fullName.split('/'); + parsed.repository = { owner, name, branch }; + + const sanitizeForJson = (obj: any): any => { + if (typeof obj === 'string') { + return obj; + } else if (Array.isArray(obj)) { + return obj.map((item) => sanitizeForJson(item)); + } else if (obj && typeof obj === 'object') { + const sanitized: any = {}; + for (const key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + sanitized[key] = sanitizeForJson(obj[key]); + } } + return sanitized; } - return sanitized; - } - return obj; - }; + return obj; + }; - const sanitized = sanitizeForJson(parsed); - aiResponse = JSON.stringify(sanitized, null, 2); + const sanitized = sanitizeForJson(parsed); + aiResponse = JSON.stringify(sanitized, null, 2); - // Validate the final JSON before sending - JSON.parse(aiResponse); // This will throw if invalid + JSON.parse(aiResponse); + } } + } catch (e) { + getLogger().error( + { error: e instanceof Error ? e.message : String(e), responseLength: aiResponse.length }, + 'JSON validation failed for investigation response' + ); + aiResponse = + '⚠️ Investigation completed but response formatting failed. Please try asking a more specific question.'; + isJsonResponse = false; } - } catch (e) { - getLogger({ buildUuid }).error( - { error: e instanceof Error ? e.message : String(e), responseLength: aiResponse.length }, - 'JSON validation failed for investigation response' - ); - // Convert to plain text message - aiResponse = - '⚠️ Investigation completed but response formatting failed. Please try asking a more specific question.'; - isJsonResponse = false; // Treat as plain text - } - if (isJsonResponse) { + if (isJsonResponse) { + res.write( + `data: ${JSON.stringify({ type: 'complete_json', content: aiResponse, totalInvestigationTimeMs })}\n\n` + ); + } + } + } catch (error: any) { + getLogger().error({ error }, 'LLM query failed'); + + if ( + error?.status === 429 || + error?.error?.error?.type === 'rate_limit_error' || + error?.message?.includes('RATE_LIMIT_EXCEEDED') || + error?.message?.includes('quota exceeded') + ) { + res.write( + `data: ${JSON.stringify({ + error: + 'Rate limit exceeded. Please wait a moment and try again. The AI service is currently handling many requests.', + code: 'RATE_LIMIT_EXCEEDED', + retryAfter: 60, + })}\n\n` + ); + } else { res.write( - `data: ${JSON.stringify({ type: 'complete_json', content: aiResponse, totalInvestigationTimeMs })}\n\n` + `data: ${JSON.stringify({ + error: error?.message || error?.toString() || 'AI service error', + code: 'LLM_API_ERROR', + })}\n\n` ); } + res.end(); + return; } - } catch (error: any) { - getLogger({ buildUuid }).error({ error }, 'LLM query failed'); - - // Check if it's a rate limit error - if ( - error?.status === 429 || - error?.error?.error?.type === 'rate_limit_error' || - error?.message?.includes('RATE_LIMIT_EXCEEDED') || - error?.message?.includes('quota exceeded') - ) { - res.write( - `data: ${JSON.stringify({ - error: - 'Rate limit exceeded. Please wait a moment and try again. The AI service is currently handling many requests.', - code: 'RATE_LIMIT_EXCEEDED', - retryAfter: 60, - })}\n\n` - ); - } else { - res.write( - `data: ${JSON.stringify({ - error: error?.message || error?.toString() || 'AI service error', - code: 'LLM_API_ERROR', - })}\n\n` - ); - } - res.end(); - return; - } - await conversationService.addMessage(buildUuid, { - role: 'user', - content: message, - timestamp: Date.now(), - isSystemAction, - }); + await conversationService.addMessage(buildUuid, { + role: 'user', + content: message, + timestamp: Date.now(), + isSystemAction, + }); - await conversationService.addMessage(buildUuid, { - role: 'assistant', - content: aiResponse, - timestamp: Date.now(), - }); + await conversationService.addMessage(buildUuid, { + role: 'assistant', + content: aiResponse, + timestamp: Date.now(), + }); - res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`); - res.end(); + res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`); + res.end(); + }); } catch (error: any) { getLogger().error({ error }, 'Unexpected error in AI agent chat'); res.write(`data: ${JSON.stringify({ error: error?.message || 'Internal error' })}\n\n`); diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index e8c20d7..52b8adc 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -85,16 +85,15 @@ import BuildService from 'server/services/build'; // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { const correlationId = `api-redeploy-${Date.now()}-${nanoid(8)}`; + const { uuid } = req.query; - return withLogContext({ correlationId }, async () => { + return withLogContext({ correlationId, buildUuid: uuid as string }, async () => { if (req.method !== 'POST') { return res.status(405).json({ error: `${req.method} is not allowed` }); } - const { uuid } = req.query; - try { - getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Redeploy requested for build ${uuid}`); + getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: redeploy requested uuid=${uuid}`); const buildService = new BuildService(); const build: Build = await buildService.db.models.Build.query() @@ -102,7 +101,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { .withGraphFetched('deploys.deployable'); if (!build) { - getLogger({ buildUuid: uuid as string }).debug('Build not found'); + getLogger().debug('Build not found'); return res.status(404).json({ error: `Build not found for ${uuid}` }); } @@ -114,14 +113,14 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { correlationId, }); - getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: build.uuid }).info(`Redeploy queued for build ${uuid}`); + getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: redeploy queued uuid=${uuid}`); return res.status(200).json({ status: 'success', message: `Redeploy for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.BUILD_FAILED }).error({ error }, `Unable to proceed with redeploy for build ${uuid}`); + getLogger({ stage: LogStage.BUILD_FAILED, error }).error(`Unable to proceed with redeploy for build ${uuid}`); return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); } }); diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts index da5d776..cfff0a8 100644 --- a/src/pages/api/v1/builds/[uuid]/graph.ts +++ b/src/pages/api/v1/builds/[uuid]/graph.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next/types'; import { generateGraph } from 'server/lib/dependencyGraph'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { Build } from 'server/models'; import BuildService from 'server/services/build'; @@ -84,30 +84,31 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const { uuid } = req.query; - try { - const buildService = new BuildService(); + return withLogContext({ buildUuid: uuid as string }, async () => { + try { + const buildService = new BuildService(); - const build: Build = await buildService.db.models.Build.query() - .findOne({ - uuid, - }) - .withGraphFetched('[deploys.deployable, deployables]'); + const build: Build = await buildService.db.models.Build.query() + .findOne({ + uuid, + }) + .withGraphFetched('[deploys.deployable, deployables]'); - if (Object.keys(build.dependencyGraph).length === 0) { - // generate the graph if it does not exist - const dependencyGraph = await generateGraph(build, 'TB'); - await build.$query().patchAndFetch({ - dependencyGraph, + if (Object.keys(build.dependencyGraph).length === 0) { + const dependencyGraph = await generateGraph(build, 'TB'); + await build.$query().patchAndFetch({ + dependencyGraph, + }); + } + + return res.status(200).json({ + status: 'success', + message: `Dependency graph for ${uuid} returned.`, + dependencyGraph: build.dependencyGraph, }); + } catch (error) { + getLogger().error({ error }, 'Error fetching dependency graph'); + res.status(500).json({ error: 'An unexpected error occurred.' }); } - - return res.status(200).json({ - status: 'success', - message: `Dependency graph for ${uuid} returned.`, - dependencyGraph: build.dependencyGraph, - }); - } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, 'Error fetching dependency graph'); - res.status(500).json({ error: 'An unexpected error occurred.' }); - } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts index c7071f9..e2a6c39 100644 --- a/src/pages/api/v1/builds/[uuid]/index.ts +++ b/src/pages/api/v1/builds/[uuid]/index.ts @@ -22,9 +22,8 @@ import BuildService from 'server/services/build'; import OverrideService from 'server/services/override'; async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { - const { uuid } = req.query; - try { + const { uuid } = req.query; const buildService = new BuildService(); const build = await buildService.db.models.Build.query() @@ -48,13 +47,13 @@ async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { ); if (!build) { - getLogger({ buildUuid: uuid as string }).debug('Build not found'); + getLogger().debug('Build not found'); return res.status(404).json({ error: 'Build not found' }); } return res.status(200).json(build); } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, 'Error fetching build'); + getLogger({ error }).error('Error fetching build'); return res.status(500).json({ error: 'An unexpected error occurred' }); } } @@ -64,7 +63,7 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlatio const { uuid: newUuid } = req.body; if (!newUuid || typeof newUuid !== 'string') { - getLogger({ buildUuid: uuid as string }).debug('Missing or invalid uuid in request body'); + getLogger().debug('Missing or invalid uuid in request body'); return res.status(400).json({ error: 'uuid is required' }); } @@ -74,25 +73,25 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlatio const build: Build = await override.db.models.Build.query().findOne({ uuid }).withGraphFetched('pullRequest'); if (!build) { - getLogger({ buildUuid: uuid as string }).debug('Build not found, cannot patch uuid'); + getLogger().debug('Build not found, cannot patch uuid'); return res.status(404).json({ error: 'Build not found' }); } if (newUuid === build.uuid) { - getLogger({ buildUuid: uuid as string }).debug(`Attempted to update UUID to same value: newUuid=${newUuid}`); + getLogger().debug(`Attempted to update UUID to same value: newUuid=${newUuid}`); return res.status(400).json({ error: 'UUID must be different' }); } const validation = await override.validateUuid(newUuid); if (!validation.valid) { - getLogger({ buildUuid: uuid as string }).debug(`UUID validation failed: error=${validation.error}`); + getLogger().debug(`UUID validation failed: error=${validation.error}`); return res.status(400).json({ error: validation.error }); } const result = await override.updateBuildUuid(build, newUuid); if (build.pullRequest?.deployOnUpdate) { - getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: build.uuid }).info(`Triggering redeploy after UUID update`); + getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Triggering redeploy after UUID update`); await new BuildService().resolveAndDeployBuildQueue.add('resolve-deploy', { buildId: build.id, runUUID: nanoid(), @@ -106,7 +105,7 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlatio }, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, `Error updating UUID to newUuid=${newUuid}`); + getLogger({ error }).error(`Error updating UUID to newUuid=${newUuid}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } } @@ -344,18 +343,19 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(400).json({ error: 'Invalid UUID' }); } - // Only PATCH needs correlationId for queue operations - if (req.method === 'PATCH') { - const correlationId = `api-build-update-${Date.now()}-${nanoid(8)}`; - return withLogContext({ correlationId }, async () => { - return updateBuild(req, res, correlationId); - }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + if (req.method === 'PATCH') { + const correlationId = `api-build-update-${Date.now()}-${nanoid(8)}`; + return withLogContext({ correlationId }, async () => { + return updateBuild(req, res, correlationId); + }); + } - switch (req.method) { - case 'GET': - return retrieveBuild(req, res); - default: - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + switch (req.method) { + case 'GET': + return retrieveBuild(req, res); + default: + return res.status(405).json({ error: `${req.method} is not allowed` }); + } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts index afc059f..8976348 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts @@ -142,7 +142,7 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; @@ -166,7 +166,7 @@ interface EventsResponse { events: K8sEvent[]; } -async function getJobEvents(jobName: string, namespace: string, buildUuid: string): Promise { +async function getJobEvents(jobName: string, namespace: string): Promise { const kc = new k8s.KubeConfig(); kc.loadFromDefault(); const coreV1Api = kc.makeApiClient(k8s.CoreV1Api); @@ -216,48 +216,51 @@ async function getJobEvents(jobName: string, namespace: string, buildUuid: strin return events; } catch (error) { - getLogger({ buildUuid }).error({ error }, `jobName=${jobName} Error fetching events`); + getLogger().error({ error }, `jobName=${jobName} Error fetching events`); throw error; } } const eventsHandler = async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, jobName } = req.query; - const logger = getLogger({ buildUuid: uuid as string }); - if (req.method !== 'GET') { - logger.warn(`method=${req.method} Method not allowed`); - res.setHeader('Allow', ['GET']); - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + const logger = getLogger(); - if (typeof uuid !== 'string' || typeof jobName !== 'string') { - logger.warn(`uuid=${uuid} jobName=${jobName} Missing or invalid query parameters`); - return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' }); - } + if (req.method !== 'GET') { + logger.warn(`method=${req.method} Method not allowed`); + res.setHeader('Allow', ['GET']); + return res.status(405).json({ error: `${req.method} is not allowed` }); + } - try { - const namespace = `env-${uuid}`; + if (typeof uuid !== 'string' || typeof jobName !== 'string') { + logger.warn(`uuid=${uuid} jobName=${jobName} Missing or invalid query parameters`); + return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' }); + } - const events = await getJobEvents(jobName, namespace, uuid); + try { + const namespace = `env-${uuid}`; - const response: EventsResponse = { - events, - }; + const events = await getJobEvents(jobName, namespace); - return res.status(200).json(response); - } catch (error) { - logger.error({ error }, `jobName=${jobName} Error getting events`); + const response: EventsResponse = { + events, + }; + + return res.status(200).json(response); + } catch (error) { + logger.error({ error }, `jobName=${jobName} Error getting events`); - if (error instanceof HttpError) { - if (error.response?.statusCode === 404) { - return res.status(404).json({ error: 'Environment or job not found.' }); + if (error instanceof HttpError) { + if (error.response?.statusCode === 404) { + return res.status(404).json({ error: 'Environment or job not found.' }); + } + return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); } - return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); - } - return res.status(500).json({ error: 'Internal server error occurred.' }); - } + return res.status(500).json({ error: 'Internal server error occurred.' }); + } + }); }; export default eventsHandler; diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts index 181938f..ecfdcf4 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../../services/[name]/logs/[jobName]'; /** @@ -98,13 +98,14 @@ import unifiedLogStreamHandler from '../../services/[name]/logs/[jobName]'; */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { const { uuid, jobName } = req.query; - getLogger({ buildUuid: uuid as string }).info( - `method=${req.method} jobName=${jobName} Job logs endpoint called, delegating to unified handler` - ); - req.query.type = 'webhook'; + return withLogContext({ buildUuid: uuid as string }, async () => { + getLogger().info(`method=${req.method} jobName=${jobName} Job logs endpoint called, delegating to unified handler`); - req.query.name = undefined; + req.query.type = 'webhook'; - return unifiedLogStreamHandler(req, res); + req.query.name = undefined; + + return unifiedLogStreamHandler(req, res); + }); } diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts index daf6b04..9cf6d46 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts @@ -100,7 +100,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, name } = req.query; const correlationId = `api-service-redeploy-${Date.now()}-${nanoid(8)}`; - return withLogContext({ correlationId }, async () => { + return withLogContext({ correlationId, buildUuid: uuid as string }, async () => { try { const githubService = new GithubService(); const build: Build = await githubService.db.models.Build.query() @@ -112,14 +112,14 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { const buildId = build.id; if (!build) { - getLogger({ buildUuid: uuid as string }).debug(`Build not found`); + getLogger().debug(`Build not found`); return res.status(404).json({ error: `Build not found for ${uuid}` }); } const deploy = build.deploys.find((deploy) => deploy.deployable.name === name); if (!deploy) { - getLogger({ buildUuid: uuid as string }).debug(`Deployable not found: service=${name}`); + getLogger().debug(`Deployable not found: service=${name}`); res.status(404).json({ error: `${name} service is not found in ${uuid} build.` }); return; } @@ -135,9 +135,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { ...extractContextForQueue(), }); - getLogger({ stage: LogStage.BUILD_QUEUED, buildUuid: uuid as string }).info( - `Service redeploy queued: service=${name}` - ); + getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: service redeploy queued service=${name}`); const deployService = new DeployService(); diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts index 9a7ab3f..a8e6811 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { HttpError } from '@kubernetes/client-node'; import { BuildJobInfo, getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs'; @@ -143,39 +143,40 @@ interface BuildLogsListResponse { // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, name } = req.query; - const logger = getLogger({ buildUuid: uuid as string }); - if (req.method !== 'GET') { - logger.warn(`API: method not allowed method=${req.method}`); - res.setHeader('Allow', ['GET']); - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + if (req.method !== 'GET') { + getLogger().warn(`API: method not allowed method=${req.method}`); + res.setHeader('Allow', ['GET']); + return res.status(405).json({ error: `${req.method} is not allowed` }); + } - if (typeof uuid !== 'string' || typeof name !== 'string') { - logger.warn(`API: invalid parameters uuid=${uuid} name=${name}`); - return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); - } + if (typeof uuid !== 'string' || typeof name !== 'string') { + getLogger().warn(`API: invalid parameters uuid=${uuid} name=${name}`); + return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); + } - try { - const namespace = `env-${uuid}`; + try { + const namespace = `env-${uuid}`; - const buildJobs = await getNativeBuildJobs(name, namespace); + const buildJobs = await getNativeBuildJobs(name, namespace); - const response: BuildLogsListResponse = { - builds: buildJobs, - }; + const response: BuildLogsListResponse = { + builds: buildJobs, + }; - return res.status(200).json(response); - } catch (error) { - logger.error({ error }, `API: build logs fetch failed service=${name}`); + return res.status(200).json(response); + } catch (error) { + getLogger().error({ error }, `API: build logs fetch failed service=${name}`); - if (error instanceof HttpError) { - if (error.response?.statusCode === 404) { - return res.status(404).json({ error: 'Environment or service not found.' }); + if (error instanceof HttpError) { + if (error.response?.statusCode === 404) { + return res.status(404).json({ error: 'Environment or service not found.' }); + } + return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); } - return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); - } - return res.status(500).json({ error: 'Internal server error occurred.' }); - } + return res.status(500).json({ error: 'Internal server error occurred.' }); + } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts index 501049a..0fc84f4 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../logs/[jobName]'; /** @@ -98,11 +98,12 @@ import unifiedLogStreamHandler from '../logs/[jobName]'; */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { const { uuid, jobName } = req.query; - getLogger({ buildUuid: uuid as string }).info( - `method=${req.method} jobName=${jobName} Build logs endpoint called, delegating to unified handler` - ); - req.query.type = 'build'; + return withLogContext({ buildUuid: uuid as string }, async () => { + getLogger().info(`API: build logs endpoint called method=${req.method} jobName=${jobName}`); - return unifiedLogStreamHandler(req, res); + req.query.type = 'build'; + + return unifiedLogStreamHandler(req, res); + }); } diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts index d21d04c..04964df 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts @@ -131,18 +131,21 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import unifiedLogStreamHandler from '../logs/[jobName]'; const deployLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, jobName } = req.query; - getLogger({ buildUuid: uuid as string }).info( - `method=${req.method} jobName=${jobName} Deploy logs endpoint called, delegating to unified handler` - ); - req.query.type = 'deploy'; + return withLogContext({ buildUuid: uuid as string }, async () => { + getLogger().info( + `method=${req.method} jobName=${jobName} Deploy logs endpoint called, delegating to unified handler` + ); - return unifiedLogStreamHandler(req, res); + req.query.type = 'deploy'; + + return unifiedLogStreamHandler(req, res); + }); }; export default deployLogStreamHandler; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts index 86011c4..4931874 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; import { Deploy } from 'server/models'; @@ -245,52 +245,52 @@ async function getGitHubDeploymentDetails( const handler = async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, name } = req.query; - if (req.method !== 'GET') { - getLogger({ buildUuid: uuid as string }).warn(`Method not allowed: method=${req.method}`); - res.setHeader('Allow', ['GET']); - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + if (req.method !== 'GET') { + getLogger().warn(`Method not allowed: method=${req.method}`); + res.setHeader('Allow', ['GET']); + return res.status(405).json({ error: `${req.method} is not allowed` }); + } - if (typeof uuid !== 'string' || typeof name !== 'string') { - getLogger({ buildUuid: uuid as string }).warn(`Missing or invalid query parameters: uuid=${uuid} name=${name}`); - return res.status(400).json({ error: 'Missing or invalid parameters' }); - } + if (typeof uuid !== 'string' || typeof name !== 'string') { + getLogger().warn(`Missing or invalid query parameters: uuid=${uuid} name=${name}`); + return res.status(400).json({ error: 'Missing or invalid parameters' }); + } - const deployUuid = `${name}-${uuid}`; + const deployUuid = `${name}-${uuid}`; - try { - const namespace = `env-${uuid}`; + try { + const namespace = `env-${uuid}`; - getLogger({ buildUuid: uuid }).debug( - `Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}` - ); + getLogger().debug(`Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}`); - const helmDetails = await getHelmDeploymentDetails(namespace, deployUuid); - if (helmDetails) { - getLogger({ buildUuid: uuid }).debug(`Found Helm deployment details: deployUuid=${deployUuid}`); - return res.status(200).json(helmDetails); - } + const helmDetails = await getHelmDeploymentDetails(namespace, deployUuid); + if (helmDetails) { + getLogger().debug(`Found Helm deployment details: deployUuid=${deployUuid}`); + return res.status(200).json(helmDetails); + } - const githubDetails = await getGitHubDeploymentDetails(namespace, deployUuid); - if (githubDetails) { - getLogger({ buildUuid: uuid }).debug(`Found GitHub-type deployment details: deployUuid=${deployUuid}`); - return res.status(200).json(githubDetails); - } + const githubDetails = await getGitHubDeploymentDetails(namespace, deployUuid); + if (githubDetails) { + getLogger().debug(`Found GitHub-type deployment details: deployUuid=${deployUuid}`); + return res.status(200).json(githubDetails); + } - getLogger({ buildUuid: uuid }).warn(`No deployment details found: deployUuid=${deployUuid}`); - return res.status(404).json({ error: 'Deployment not found' }); - } catch (error) { - getLogger({ buildUuid: uuid }).error({ error }, `Error getting deployment details: deployUuid=${deployUuid}`); + getLogger().warn(`No deployment details found: deployUuid=${deployUuid}`); + return res.status(404).json({ error: 'Deployment not found' }); + } catch (error) { + getLogger().error({ error }, `Error getting deployment details: deployUuid=${deployUuid}`); - if (error instanceof HttpError) { - if (error.response?.statusCode === 404) { - return res.status(404).json({ error: 'Deployment not found' }); + if (error instanceof HttpError) { + if (error.response?.statusCode === 404) { + return res.status(404).json({ error: 'Deployment not found' }); + } + return res.status(502).json({ error: 'Failed to communicate with Kubernetes' }); } - return res.status(502).json({ error: 'Failed to communicate with Kubernetes' }); - } - return res.status(500).json({ error: 'Internal server error' }); - } + return res.status(500).json({ error: 'Internal server error' }); + } + }); }; export default handler; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts index 8c6611f..ba882cd 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts @@ -159,56 +159,57 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { LogStreamingService } from 'server/services/logStreaming'; import { HttpError } from '@kubernetes/client-node'; const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => { const { uuid, name, jobName, type } = req.query; - const logger = getLogger({ buildUuid: uuid as string }); - if (req.method !== 'GET') { - logger.warn(`API: method not allowed method=${req.method}`); - res.setHeader('Allow', ['GET']); - return res.status(405).json({ error: `${req.method} is not allowed` }); - } + return withLogContext({ buildUuid: uuid as string }, async () => { + if (req.method !== 'GET') { + getLogger().warn(`API: method not allowed method=${req.method}`); + res.setHeader('Allow', ['GET']); + return res.status(405).json({ error: `${req.method} is not allowed` }); + } + + const isWebhookRequest = type === 'webhook'; - const isWebhookRequest = type === 'webhook'; + if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) { + getLogger().warn(`API: invalid parameters uuid=${uuid} name=${name} jobName=${jobName} type=${type}`); + return res.status(400).json({ error: 'Missing or invalid parameters' }); + } - if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) { - logger.warn(`API: invalid parameters uuid=${uuid} name=${name} jobName=${jobName} type=${type}`); - return res.status(400).json({ error: 'Missing or invalid parameters' }); - } + if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) { + getLogger().warn(`API: invalid type parameter type=${type}`); + return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' }); + } - if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) { - logger.warn(`API: invalid type parameter type=${type}`); - return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' }); - } + try { + const logService = new LogStreamingService(); - try { - const logService = new LogStreamingService(); + const response = await logService.getLogStreamInfo( + uuid, + jobName, + name as string | undefined, + type as string | undefined + ); - const response = await logService.getLogStreamInfo( - uuid, - jobName, - name as string | undefined, - type as string | undefined - ); + return res.status(200).json(response); + } catch (error: any) { + getLogger().error({ error }, `API: log stream info fetch failed jobName=${jobName} service=${name}`); - return res.status(200).json(response); - } catch (error: any) { - logger.error({ error }, `API: log stream info fetch failed jobName=${jobName} service=${name}`); + if (error.message === 'Build not found') { + return res.status(404).json({ error: 'Build not found' }); + } - if (error.message === 'Build not found') { - return res.status(404).json({ error: 'Build not found' }); - } + if (error instanceof HttpError || error.message?.includes('Kubernetes') || error.statusCode === 502) { + return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); + } - if (error instanceof HttpError || error.message?.includes('Kubernetes') || error.statusCode === 502) { - return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' }); + return res.status(500).json({ error: 'Internal server error occurred.' }); } - - return res.status(500).json({ error: 'Internal server error occurred.' }); - } + }); }; export default unifiedLogStreamHandler; diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts index 7236cf5..bfcbe07 100644 --- a/src/pages/api/v1/builds/[uuid]/torndown.ts +++ b/src/pages/api/v1/builds/[uuid]/torndown.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger/index'; import { Build } from 'server/models'; import { BuildStatus, DeployStatus } from 'shared/constants'; @@ -97,51 +97,54 @@ import BuildService from 'server/services/build'; // eslint-disable-next-line import/no-anonymous-default-export export default async (req: NextApiRequest, res: NextApiResponse) => { if (req.method !== 'PATCH') { - getLogger({}).debug(`Method not allowed: method=${req.method}`); + getLogger().debug(`Method not allowed: method=${req.method}`); return res.status(405).json({ error: `${req.method} is not allowed` }); } const uuid = req.query?.uuid; - try { - if (!uuid) { - getLogger({}).debug('The uuid is required'); - return res.status(500).json({ error: 'The uuid is required' }); - } - const buildService = new BuildService(); + if (!uuid) { + getLogger().debug('The uuid is required'); + return res.status(500).json({ error: 'The uuid is required' }); + } - const build: Build = await buildService.db.models.Build.query() - .findOne({ - uuid, - }) - .withGraphFetched('[deploys]'); + return withLogContext({ buildUuid: uuid as string }, async () => { + try { + const buildService = new BuildService(); - if (build.isStatic || !build) { - getLogger({ buildUuid: uuid as string }).debug('Build does not exist or is static environment'); - return res.status(404).json({ error: `The build doesn't exist or is static environment` }); - } + const build: Build = await buildService.db.models.Build.query() + .findOne({ + uuid, + }) + .withGraphFetched('[deploys]'); - const deploysIds = build.deploys.map((deploy) => deploy.id); + if (build.isStatic || !build) { + getLogger().debug('Build does not exist or is static environment'); + return res.status(404).json({ error: `The build doesn't exist or is static environment` }); + } - await buildService.db.models.Build.query().findById(build.id).patch({ - status: BuildStatus.TORN_DOWN, - statusMessage: 'Namespace was deleted successfully', - }); + const deploysIds = build.deploys.map((deploy) => deploy.id); - await buildService.db.models.Deploy.query() - .whereIn('id', deploysIds) - .patch({ status: DeployStatus.TORN_DOWN, statusMessage: 'Namespace was deleted successfully' }); + await buildService.db.models.Build.query().findById(build.id).patch({ + status: BuildStatus.TORN_DOWN, + statusMessage: 'Namespace was deleted successfully', + }); - const updatedDeploys = await buildService.db.models.Deploy.query() - .whereIn('id', deploysIds) - .select('id', 'uuid', 'status'); + await buildService.db.models.Deploy.query() + .whereIn('id', deploysIds) + .patch({ status: DeployStatus.TORN_DOWN, statusMessage: 'Namespace was deleted successfully' }); - return res.status(200).json({ - status: `The namespace env-${uuid} it was delete sucessfuly`, - namespacesUpdated: updatedDeploys, - }); - } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, 'Error in cleanup API'); - return res.status(500).json({ error: 'An unexpected error occurred.' }); - } + const updatedDeploys = await buildService.db.models.Deploy.query() + .whereIn('id', deploysIds) + .select('id', 'uuid', 'status'); + + return res.status(200).json({ + status: `The namespace env-${uuid} it was delete sucessfuly`, + namespacesUpdated: updatedDeploys, + }); + } catch (error) { + getLogger().error({ error }, 'Error in cleanup API'); + return res.status(500).json({ error: 'An unexpected error occurred.' }); + } + }); }; diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts index a32fbf9..450bf2f 100644 --- a/src/pages/api/v1/builds/[uuid]/webhooks.ts +++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts @@ -195,20 +195,22 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(400).json({ error: 'Invalid UUID' }); } - try { - switch (req.method) { - case 'GET': - return retrieveWebhooks(req, res); - case 'POST': - return invokeWebhooks(req, res); - default: - res.setHeader('Allow', ['GET', 'POST']); - return res.status(405).json({ error: `${req.method} is not allowed.` }); + return withLogContext({ buildUuid: uuid }, async () => { + try { + switch (req.method) { + case 'GET': + return retrieveWebhooks(req, res); + case 'POST': + return invokeWebhooks(req, res); + default: + res.setHeader('Allow', ['GET', 'POST']); + return res.status(405).json({ error: `${req.method} is not allowed.` }); + } + } catch (error) { + getLogger({ error }).error(`Error handling ${req.method} request`); + res.status(500).json({ error: 'An unexpected error occurred.' }); } - } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, `Error handling ${req.method} request`); - res.status(500).json({ error: 'An unexpected error occurred.' }); - } + }); }; async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { @@ -225,12 +227,12 @@ async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { const buildId = build.id; if (!build) { - getLogger({ buildUuid: uuid as string }).debug('Build not found'); + getLogger().debug('Build not found'); return res.status(404).json({ error: `Build not found for ${uuid}` }); } if (!build.webhooksYaml) { - getLogger({ buildUuid: uuid as string }).debug('No webhooks found for build'); + getLogger().debug('No webhooks found for build'); return res.status(204).json({ status: 'no_content', message: `No webhooks found for build ${uuid}.`, @@ -243,17 +245,14 @@ async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { correlationId, }); - getLogger({ stage: LogStage.WEBHOOK_PROCESSING, buildUuid: uuid as string }).info( - 'Webhook invocation queued via API' - ); + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).info('Webhook invocation queued via API'); return res.status(200).json({ status: 'success', message: `Webhook for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error( - { error }, + getLogger({ stage: LogStage.WEBHOOK_PROCESSING, error }).error( `Unable to proceed with webhook for build ${uuid}` ); return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` }); @@ -294,7 +293,7 @@ async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) { }, }); } catch (error) { - getLogger({ buildUuid: uuid as string }).error({ error }, 'Failed to retrieve webhooks'); + getLogger({ error }).error('Failed to retrieve webhooks'); return res.status(500).json({ error: `Unable to retrieve webhooks for build ${uuid}.` }); } } diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index 929c3a4..05b94b3 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -18,7 +18,7 @@ import { merge } from 'lodash'; import { Build, Deploy, Service, Deployable } from 'server/models'; import { CLIDeployTypes, DeployTypes } from 'shared/constants'; import { shellPromise } from './shell'; -import { getLogger, withLogContext } from './logger/index'; +import { getLogger, withLogContext, updateLogContext } from './logger/index'; import GlobalConfigService from 'server/services/globalConfig'; import { DatabaseSettings } from 'server/services/types/globalConfig'; @@ -65,7 +65,8 @@ export async function cliDeploy(deploy: Deploy) { */ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Service, deployable: Deployable) { const buildUuid = build?.uuid; - getLogger({ buildUuid }).debug('Invoking the codefresh CLI to deploy this deploy'); + updateLogContext({ buildUuid }); + getLogger().debug('Invoking the codefresh CLI to deploy this deploy'); const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv); @@ -88,9 +89,9 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser const command = `codefresh run ${serviceDeployPipelineId} -b "${deploy.branchName}" ${variables.join( ' ' )} ${deployTrigger} -d`; - getLogger({ buildUuid }).debug(`About to run codefresh command: command=${command}`); + getLogger().debug(`About to run codefresh command: command=${command}`); const output = await shellPromise(command); - getLogger({ buildUuid }).debug(`Codefresh run output: output=${output}`); + getLogger().debug(`Codefresh run output: output=${output}`); const id = output.trim(); return id; } @@ -101,7 +102,8 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser */ export async function codefreshDestroy(deploy: Deploy) { const buildUuid = deploy?.build?.uuid; - getLogger({ buildUuid }).debug('Invoking the codefresh CLI to delete this deploy'); + updateLogContext({ buildUuid }); + getLogger().debug('Invoking the codefresh CLI to delete this deploy'); try { /** Reset the SHA so we will re-run the pipelines post destroy */ @@ -141,12 +143,12 @@ export async function codefreshDestroy(deploy: Deploy) { const command = `codefresh run ${destroyPipelineId} -b "${serviceBranchName}" ${variables.join( ' ' )} ${destroyTrigger} -d`; - getLogger({ buildUuid }).debug(`Destroy command: command=${command}`); + getLogger().debug(`Destroy command: command=${command}`); const output = await shellPromise(command); const id = output?.trim(); return id; } catch (error) { - getLogger({ buildUuid, error }).error('Error destroying Codefresh pipeline'); + getLogger({ error }).error('Error destroying Codefresh pipeline'); throw error; } } @@ -172,6 +174,7 @@ export async function waitForCodefresh(id: string) { */ export async function deleteBuild(build: Build) { const buildUuid = build?.uuid; + updateLogContext({ buildUuid }); try { const buildId = build?.id; @@ -197,9 +200,9 @@ export async function deleteBuild(build: Build) { ); }) ); - getLogger({ buildUuid }).info('CLI: deleted'); + getLogger().info('CLI: deleted'); } catch (e) { - getLogger({ buildUuid, error: e }).error('Error deleting CLI resources'); + getLogger({ error: e }).error('Error deleting CLI resources'); } } diff --git a/src/server/services/codefresh.ts b/src/server/services/codefresh.ts index 77d2284..1837874 100644 --- a/src/server/services/codefresh.ts +++ b/src/server/services/codefresh.ts @@ -17,12 +17,13 @@ import BaseService from './_service'; import * as YamlService from 'server/models/yaml'; import { triggerPipeline } from 'server/lib/codefresh'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, updateLogContext } from 'server/lib/logger/index'; export default class CodefreshService extends BaseService { async triggerYamlConfigWebhookPipeline(webhook: YamlService.Webhook, data: Record): Promise { let buildId: string; const buildUuid = data?.buildUUID; + updateLogContext({ buildUuid }); if ( webhook.state !== undefined && webhook.type !== undefined && @@ -31,7 +32,7 @@ export default class CodefreshService extends BaseService { ) { buildId = await triggerPipeline(webhook.pipelineId, webhook.trigger, data); } else { - getLogger({ buildUuid, webhook }).error( + getLogger({ webhook }).error( `Invalid webhook configuration: name=${webhook.name ?? ''} pipelineId=${webhook.pipelineId} trigger=${ webhook.trigger }` diff --git a/src/server/services/deployable.ts b/src/server/services/deployable.ts index c51337b..8057970 100644 --- a/src/server/services/deployable.ts +++ b/src/server/services/deployable.ts @@ -1028,7 +1028,7 @@ export default class DeployableService extends BaseService { }).error('Failed to upsert deployables'); throw error; } - getLogger({ buildUUID }).info(`Created/Updated ${deployables.length} deployables`); + getLogger({ buildUUID }).info(`Deployable: upserted count=${deployables.length}`); return deployables; } diff --git a/src/server/services/label.ts b/src/server/services/label.ts index 7801650..39a6bf6 100644 --- a/src/server/services/label.ts +++ b/src/server/services/label.ts @@ -18,7 +18,7 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger/index'; import { waitForColumnValue } from 'shared/utils'; import { updatePullRequestLabels } from 'server/lib/github'; import { getDeployLabel } from 'server/lib/utils'; @@ -73,23 +73,24 @@ export default class LabelService extends Service { const { repository, build } = pullRequest; const buildUuid = build?.uuid || 'unknown'; + updateLogContext({ buildUuid }); if (!repository) { throw new Error(`Repository not found for pull request ${pullRequestId}`); } - getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).info( - `Processing label ${action} for PR ${pullRequest.pullRequestNumber}` + getLogger({ stage: LogStage.LABEL_PROCESSING }).info( + `Label: processing action=${action} pr=${pullRequest.pullRequestNumber}` ); if (waitForComment && !pullRequest.commentId) { - getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).debug( + getLogger({ stage: LogStage.LABEL_PROCESSING }).debug( 'Waiting for comment_id to be set before updating labels' ); // 60 attempts * 5 seconds = 5 minutes const updatedPullRequest = await waitForColumnValue(pullRequest, 'commentId', 60, 5000); if (!updatedPullRequest) { - getLogger({ stage: LogStage.LABEL_PROCESSING, buildUuid }).warn( + getLogger({ stage: LogStage.LABEL_PROCESSING }).warn( 'Timeout waiting for comment_id while updating labels after 5 minutes' ); } @@ -102,7 +103,7 @@ export default class LabelService extends Service { if (!currentLabels.includes(deployLabel)) { updatedLabels = [...currentLabels, deployLabel]; } else { - getLogger({ stage: LogStage.LABEL_COMPLETE, buildUuid }).debug( + getLogger({ stage: LogStage.LABEL_COMPLETE }).debug( `Deploy label "${deployLabel}" already exists on PR, skipping update` ); return; @@ -120,8 +121,8 @@ export default class LabelService extends Service { labels: updatedLabels, }); - getLogger({ stage: LogStage.LABEL_COMPLETE, buildUuid }).info( - `Successfully ${action === 'enable' ? 'added' : 'removed'} ${deployLabel} label` + getLogger({ stage: LogStage.LABEL_COMPLETE }).info( + `Label: ${action === 'enable' ? 'added' : 'removed'} label=${deployLabel}` ); } catch (error) { getLogger({ stage: LogStage.LABEL_FAILED }).error( diff --git a/src/server/services/override.ts b/src/server/services/override.ts index 39bebaf..d493410 100644 --- a/src/server/services/override.ts +++ b/src/server/services/override.ts @@ -15,7 +15,7 @@ */ import BaseService from './_service'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger, updateLogContext } from 'server/lib/logger/index'; import { Build } from 'server/models'; import * as k8s from 'server/lib/kubernetes'; import DeployService from './deploy'; @@ -73,7 +73,8 @@ export default class OverrideService extends BaseService { const oldUuid = build.uuid; const oldNamespace = build.namespace; - getLogger({ buildUuid: oldUuid }).info(`Updating UUID to '${newUuid}'`); + updateLogContext({ buildUuid: oldUuid, newUuid }); + getLogger().info(`Override: updating newUuid=${newUuid}`); try { return await this.db.models.Build.transact(async (trx) => { @@ -107,11 +108,9 @@ export default class OverrideService extends BaseService { const updatedBuild = await this.db.models.Build.query(trx).findById(build.id); k8s.deleteNamespace(oldNamespace).catch((error) => { - getLogger({ buildUuid: oldUuid }).warn({ error }, `Failed to delete old namespace ${oldNamespace}`); + getLogger().warn({ error }, `Failed to delete old namespace ${oldNamespace}`); }); - getLogger({ buildUuid: newUuid }).info( - `Successfully updated UUID from '${oldUuid}' to '${newUuid}', updated ${deploys.length} deploys` - ); + getLogger().info(`Override: updated oldUuid=${oldUuid} newUuid=${newUuid} deploysUpdated=${deploys.length}`); return { build: updatedBuild, @@ -119,7 +118,7 @@ export default class OverrideService extends BaseService { }; }); } catch (error) { - getLogger({ buildUuid: oldUuid }).error({ error }, `Failed to update UUID to '${newUuid}'`); + getLogger().error({ error }, `Failed to update UUID to '${newUuid}'`); throw error; } } diff --git a/src/server/services/ttlCleanup.ts b/src/server/services/ttlCleanup.ts index 0e38a46..2da9af8 100644 --- a/src/server/services/ttlCleanup.ts +++ b/src/server/services/ttlCleanup.ts @@ -18,7 +18,7 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, updateLogContext, getLogger, LogStage } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { updatePullRequestLabels, createOrUpdatePullRequestComment, getPullRequestLabels } from 'server/lib/github'; import { getKeepLabel, getDisabledLabel, getDeployLabel } from 'server/lib/utils'; @@ -79,33 +79,30 @@ export default class TTLCleanupService extends Service { const staleEnvironments = await this.findStaleEnvironments(config.inactivityDays, config.excludedRepositories); getLogger({ stage: LogStage.CLEANUP_STARTING }).info( - `TTL: found ${staleEnvironments.length} stale envs (${config.inactivityDays}d inactive)` + `TTL: found stale environments count=${staleEnvironments.length} inactivityDays=${config.inactivityDays}` ); let successCount = 0; let errorCount = 0; for (const env of staleEnvironments) { - try { - if (dryRun) { - getLogger({ buildUuid: env.buildUUID }).info( - `TTL: [DRY RUN] would cleanup ${env.namespace} PR#${env.pullRequest.pullRequestNumber}` - ); - successCount++; - } else { - getLogger({ buildUuid: env.buildUUID }).info( - `TTL: cleaning ${env.namespace} PR#${env.pullRequest.pullRequestNumber}` - ); - await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun); - successCount++; + await withLogContext({ buildUuid: env.buildUUID }, async () => { + try { + if (dryRun) { + getLogger().info( + `TTL: dry run would cleanup namespace=${env.namespace} pr=${env.pullRequest.pullRequestNumber}` + ); + successCount++; + } else { + getLogger().info(`TTL: cleaning namespace=${env.namespace} pr=${env.pullRequest.pullRequestNumber}`); + await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun); + successCount++; + } + } catch (error) { + errorCount++; + getLogger().error({ error }, `Failed to cleanup environment: namespace=${env.namespace}`); } - } catch (error) { - errorCount++; - getLogger({ buildUuid: env.buildUUID }).error( - { error }, - `Failed to cleanup environment: namespace=${env.namespace}` - ); - } + }); } getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info( @@ -161,7 +158,7 @@ export default class TTLCleanupService extends Service { const namespaces = namespacesResponse.body.items; - getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: scanning ${namespaces.length} namespaces`); + getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: scanning namespaces count=${namespaces.length}`); // Fetch dynamic labels once at the start const keepLabel = await getKeepLabel(); @@ -178,7 +175,7 @@ export default class TTLCleanupService extends Service { const expireAtUnix = labels['lfc/ttl-expireAtUnix']; if (!expireAtUnix) { - getLogger({}).debug(`Namespace ${nsName} has no TTL expiration label, skipping`); + getLogger().debug(`Namespace ${nsName} has no TTL expiration label, skipping`); continue; } @@ -190,53 +187,52 @@ export default class TTLCleanupService extends Service { const daysExpired = Math.floor((now - expireTime) / (1000 * 60 * 60 * 24)); - const buildUUID = labels['lfc/uuid']; // Use lfc/uuid (intentional difference) + const buildUUID = labels['lfc/uuid']; if (!buildUUID) { - getLogger({}).warn(`Namespace ${nsName} has no lfc/uuid label, skipping`); + getLogger().warn(`Namespace ${nsName} has no lfc/uuid label, skipping`); continue; } - getLogger({ buildUuid: buildUUID }).debug(`Namespace ${nsName} expired ${daysExpired} days ago`); + updateLogContext({ buildUuid: buildUUID }); + + getLogger().debug(`Namespace ${nsName} expired ${daysExpired} days ago`); const build = await this.db.models.Build.query() .findOne({ uuid: buildUUID }) .withGraphFetched('[pullRequest.repository]'); if (!build) { - getLogger({ buildUuid: buildUUID }).warn(`No build found for namespace ${nsName}, skipping`); + getLogger().warn(`No build found for namespace ${nsName}, skipping`); continue; } if (build.status === 'torn_down' || build.status === 'pending') { - getLogger({ buildUuid: buildUUID }).debug(`Build is already ${build.status}, skipping`); + getLogger().debug(`Build is already ${build.status}, skipping`); continue; } if (build.isStatic) { - getLogger({ buildUuid: buildUUID }).debug(`Build is static environment, skipping`); + getLogger().debug(`Build is static environment, skipping`); continue; } const pullRequest = build.pullRequest; if (!pullRequest) { - getLogger({ buildUuid: buildUUID }).warn(`No pull request found, skipping`); + getLogger().warn(`No pull request found, skipping`); continue; } if (pullRequest.status !== 'open') { - getLogger({ buildUuid: buildUUID }).debug(`PR is ${pullRequest.status}, skipping`); + getLogger().debug(`PR is ${pullRequest.status}, skipping`); continue; } if (excludedRepositories.length > 0 && excludedRepositories.includes(pullRequest.fullName)) { - getLogger({ buildUuid: buildUUID }).debug( - `Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping` - ); + getLogger().debug(`Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping`); continue; } - // Fetch current labels from GitHub to avoid stale data due to webhook incidents let currentLabels: string[]; try { currentLabels = await getPullRequestLabels({ @@ -245,37 +241,30 @@ export default class TTLCleanupService extends Service { fullName: pullRequest.fullName, }); - getLogger({ buildUuid: buildUUID }).debug( - `Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}` - ); + getLogger().debug(`Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}`); - // Sync labels back to DB if they differ (self-healing) const dbLabels = this.parseLabels(pullRequest.labels); if (JSON.stringify(currentLabels.sort()) !== JSON.stringify(dbLabels.sort())) { - getLogger({ buildUuid: buildUUID }).debug('TTL: label drift detected, syncing to DB'); + getLogger().debug('TTL: label drift detected, syncing to DB'); await pullRequest.$query().patch({ labels: JSON.stringify(currentLabels) as any, }); } } catch (error) { - getLogger({ buildUuid: buildUUID }).warn({ error }, `Failed to fetch labels from GitHub, falling back to DB`); - // Fallback to DB labels if GitHub API fails + getLogger().warn({ error }, `Failed to fetch labels from GitHub, falling back to DB`); currentLabels = this.parseLabels(pullRequest.labels); } if (currentLabels.includes(keepLabel)) { - getLogger({ buildUuid: buildUUID }).debug(`Has ${keepLabel} label (verified from GitHub), skipping`); + getLogger().debug(`Has ${keepLabel} label (verified from GitHub), skipping`); continue; } if (currentLabels.includes(disabledLabel)) { - getLogger({ buildUuid: buildUUID }).debug( - `Already has ${disabledLabel} label (verified from GitHub), skipping` - ); + getLogger().debug(`Already has ${disabledLabel} label (verified from GitHub), skipping`); continue; } - // Store current labels and drift status for dry-run reporting const dbLabels = this.parseLabels(pullRequest.labels); const hadLabelDrift = JSON.stringify(currentLabels.sort()) !== JSON.stringify(dbLabels.sort()); @@ -313,7 +302,9 @@ export default class TTLCleanupService extends Service { const buildUuid = build.uuid; const repository = pullRequest.repository; - getLogger({ buildUuid }).info(`TTL: cleaning ${namespace} PR#${pullRequest.pullRequestNumber}`); + updateLogContext({ buildUuid }); + + getLogger().info(`TTL: cleaning namespace=${namespace} pr=${pullRequest.pullRequestNumber}`); // Fetch dynamic labels at runtime const deployLabel = await getDeployLabel(); @@ -331,7 +322,7 @@ export default class TTLCleanupService extends Service { labels: updatedLabels, }); - getLogger({ buildUuid }).debug(`TTL: labels updated PR#${pullRequest.pullRequestNumber}`); + getLogger().debug(`TTL: labels updated PR#${pullRequest.pullRequestNumber}`); const commentMessage = await this.generateCleanupComment(inactivityDays, commentTemplate); @@ -344,7 +335,7 @@ export default class TTLCleanupService extends Service { etag: null, }); - getLogger({ buildUuid }).debug(`TTL: cleanup comment posted PR#${pullRequest.pullRequestNumber}`); + getLogger().debug(`TTL: cleanup comment posted PR#${pullRequest.pullRequestNumber}`); await pullRequest.$query().patch({ labels: JSON.stringify(updatedLabels) as any, @@ -354,7 +345,7 @@ export default class TTLCleanupService extends Service { const metrics = new Metrics('ttl.cleanup', { repositoryName: pullRequest.fullName }); metrics.increment('total', { dry_run: dryRun.toString() }); } catch (error) { - getLogger({ buildUuid }).error( + getLogger().error( { error }, `Failed to cleanup stale environment: namespace=${namespace} prNumber=${pullRequest.pullRequestNumber}` ); @@ -395,7 +386,7 @@ export default class TTLCleanupService extends Service { const config = await this.getTTLConfig(); if (!config.enabled) { - getLogger({}).debug('TTL: disabled in config'); + getLogger().debug('TTL: disabled in config'); return; } @@ -411,6 +402,6 @@ export default class TTLCleanupService extends Service { } ); - getLogger({}).info(`TTL: scheduled every ${config.checkIntervalMinutes}min`); + getLogger().info(`TTL: scheduled interval=${config.checkIntervalMinutes}min`); } } diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index 0ceb0a9..42a0bce 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -48,6 +48,10 @@ export default class WebhookService extends BaseService { throw new WebhookError('Pull Request and Build cannot be null when upserting webhooks'); } + if (build?.uuid) { + updateLogContext({ buildUuid: build.uuid }); + } + await pullRequest.$fetchGraph('repository'); // if build is in classic mode, we should not proceed with yaml webhooks since db webhooks are not supported anymore @@ -62,12 +66,10 @@ export default class WebhookService extends BaseService { if (yamlConfig?.environment?.webhooks != null) { webhooks = yamlConfig.environment.webhooks; await build.$query().patch({ webhooksYaml: JSON.stringify(webhooks) }); - getLogger({ buildUuid: build.uuid }).info( - `Updated build with webhooks from config: webhooks=${JSON.stringify(webhooks)}` - ); + getLogger().info(`Webhook: config updated webhooks=${JSON.stringify(webhooks)}`); } else { await build.$query().patch({ webhooksYaml: null }); - getLogger({ buildUuid: build.uuid }).info('No webhooks found in config'); + getLogger().info('Webhook: config empty'); } } return webhooks; @@ -78,11 +80,13 @@ export default class WebhookService extends BaseService { * @param build the build for which we want to run webhooks against */ async runWebhooksForBuild(build: Build): Promise { + updateLogContext({ buildUuid: build.uuid }); + // Check feature flag - if disabled, skip all webhooks // Only skips if explicitly set to false. If undefined/missing, webhooks execute (default behavior) const { features } = await this.db.services.GlobalConfig.getAllConfigs(); if (features?.webhooks === false) { - getLogger({ buildUuid: build.uuid }).debug('Webhooks feature flag is disabled, skipping webhook execution'); + getLogger().debug('Webhooks feature flag is disabled, skipping webhook execution'); return; } @@ -92,15 +96,13 @@ export default class WebhookService extends BaseService { case BuildStatus.TORN_DOWN: break; default: - getLogger({ buildUuid: build.uuid }).debug(`Skipping Lifecycle Webhooks execution for status: ${build.status}`); + getLogger().debug(`Skipping Lifecycle Webhooks execution for status: ${build.status}`); return; } // if build is not full yaml and no webhooks defined in YAML config, we should not run webhooks (no more db webhook support) if (!build.enableFullYaml && build.webhooksYaml == null) { - getLogger({ buildUuid: build.uuid }).debug( - `Skipping Lifecycle Webhooks (non yaml config build) execution for status: ${build.status}` - ); + getLogger().debug(`Skipping Lifecycle Webhooks (non yaml config build) execution for status: ${build.status}`); return; } const webhooks: YamlService.Webhook[] = JSON.parse(build.webhooksYaml); @@ -112,18 +114,18 @@ export default class WebhookService extends BaseService { const configFileWebhooks: YamlService.Webhook[] = webhooks.filter((webhook) => webhook.state === build.status); // if no webhooks defined in YAML config, we should not run webhooks if (configFileWebhooks != null && configFileWebhooks.length < 1) { - getLogger({ buildUuid: build.uuid }).info(`No webhooks found to be triggered for build status: ${build.status}`); + getLogger().info(`Webhook: skipped reason=noMatch status=${build.status}`); return; } - getLogger({ buildUuid: build.uuid }).info(`Triggering webhooks for build status: ${build.status}`); + getLogger().info(`Webhook: triggering status=${build.status}`); for (const webhook of configFileWebhooks) { await withLogContext({ webhookName: webhook.name, webhookType: webhook.type }, async () => { getLogger().info(`Webhook: running name=${webhook.name}`); await this.runYamlConfigFileWebhookForBuild(webhook, build); }); } - getLogger({ stage: LogStage.WEBHOOK_COMPLETE, buildUuid: build.uuid }).info( - `Webhooks completed: count=${configFileWebhooks.length} status=${build.status}` + getLogger({ stage: LogStage.WEBHOOK_COMPLETE }).info( + `Webhook: completed count=${configFileWebhooks.length} status=${build.status}` ); } From a7f1daa4b6445819d1193e8271f941f0be3eed2e Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 19:06:07 -0800 Subject: [PATCH 11/23] dont log bot events --- src/pages/api/webhooks/github.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index df9581e..65e1c63 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -36,16 +36,16 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { if (!isVerified) throw new Error('Webhook not verified'); const event = req.headers['x-github-event'] as string; - getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${event}`); const isBot = sender?.includes('[bot]') === true; if (event === 'issue_comment' && isBot) { tracer.scope().active()?.setTag('manual.drop', true); - getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).debug('Skipped: bot-triggered issue_comment'); res.status(200).end(); return; } + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${event}`); + if (!['web', 'all'].includes(LIFECYCLE_MODE)) { getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Skipped: wrong LIFECYCLE_MODE'); return; From 1c2e4fa789000ce44c84511dea83088bab456e30 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 19:17:10 -0800 Subject: [PATCH 12/23] fix service names --- dd-trace.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/dd-trace.js b/dd-trace.js index 7e8d65c..dca809e 100644 --- a/dd-trace.js +++ b/dd-trace.js @@ -16,7 +16,13 @@ 'use strict'; -const tracer = require('dd-trace').init(); +const tracer = require('dd-trace').init({ + serviceMapping: { + redis: 'lifecycle-redis', + ioredis: 'lifecycle-redis', + pg: 'lifecycle-postgres', + }, +}); const blocklist = [/^\/api\/health/, /^\/api\/jobs/, /^\/_next\/static/, /^\/_next\/webpack-hmr/]; @@ -35,11 +41,3 @@ tracer.use('next', { tracer.use('net', false); tracer.use('dns', false); - -tracer.use('ioredis', { - service: 'lifecycle-redis', -}); - -tracer.use('pg', { - service: 'lifecycle-postgres', -}); From 8c209f2f9ece14e846d60ed503fa429a70825f91 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 22:49:41 -0800 Subject: [PATCH 13/23] standardize log messages to Category: action key=value format --- src/pages/api/health.ts | 4 +- src/pages/api/v1/admin/ttl/cleanup.ts | 8 +- src/pages/api/v1/ai/chat.ts | 10 +-- src/pages/api/v1/ai/models.ts | 4 +- src/pages/api/v1/builds/[uuid]/deploy.ts | 2 +- src/pages/api/v1/builds/[uuid]/graph.ts | 2 +- src/pages/api/v1/builds/[uuid]/index.ts | 4 +- .../v1/builds/[uuid]/jobs/[jobName]/events.ts | 10 +-- .../v1/builds/[uuid]/jobs/[jobName]/logs.ts | 2 +- .../[uuid]/services/[name]/buildLogs.ts | 2 +- .../[uuid]/services/[name]/deployment.ts | 8 +- .../[uuid]/services/[name]/logs/[jobName].ts | 6 +- src/pages/api/v1/builds/[uuid]/torndown.ts | 2 +- src/pages/api/v1/builds/[uuid]/webhooks.ts | 8 +- src/pages/api/v1/builds/index.ts | 2 +- src/pages/api/v1/config/cache.ts | 4 +- src/pages/api/v1/deploy-summary.ts | 2 +- src/pages/api/v1/deployables.ts | 2 +- src/pages/api/v1/deploys.ts | 2 +- src/pages/api/v1/pull-requests/[id]/builds.ts | 2 +- src/pages/api/v1/pull-requests/[id]/index.ts | 2 +- src/pages/api/v1/pull-requests/index.ts | 2 +- src/pages/api/v1/repos/index.ts | 2 +- src/pages/api/v1/schema/validate.ts | 2 +- src/pages/api/v1/setup/callback.ts | 16 ++-- src/pages/api/v1/setup/index.ts | 4 +- src/pages/api/v1/setup/installed.ts | 12 +-- src/pages/api/v1/users/index.ts | 2 +- src/pages/api/webhooks/github.ts | 8 +- src/server/database.ts | 10 +-- src/server/jobs/index.ts | 16 ++-- src/server/lib/__tests__/utils.test.ts | 15 +++- src/server/lib/buildEnvVariables.ts | 8 +- src/server/lib/cli.ts | 4 +- src/server/lib/codefresh/index.ts | 4 +- .../lib/configFileWebhookEnvVariables.ts | 2 +- src/server/lib/fastly.ts | 4 +- src/server/lib/github/cacheRequest.ts | 17 ++-- src/server/lib/github/deployments.ts | 6 +- src/server/lib/github/index.ts | 46 +++++------ src/server/lib/github/utils/index.ts | 18 ++--- src/server/lib/helm/helm.ts | 2 +- src/server/lib/k8sStreamer.ts | 45 +++++++---- src/server/lib/kubernetes.ts | 40 ++++----- src/server/lib/kubernetes/JobMonitor.ts | 22 ++--- .../lib/kubernetes/common/serviceAccount.ts | 8 +- .../lib/kubernetesApply/applyManifest.ts | 6 +- src/server/lib/kubernetesApply/logs.ts | 8 +- src/server/lib/logStreamingHelper.ts | 50 ++++++------ src/server/lib/nativeBuild/engines.ts | 4 +- src/server/lib/nativeHelm/utils.ts | 4 +- src/server/lib/queueManager.ts | 4 +- src/server/lib/redisClient.ts | 2 +- src/server/lib/response.ts | 8 +- src/server/lib/tracer/index.ts | 16 ++-- src/server/lib/utils.ts | 17 ++-- src/server/lib/webhook/index.ts | 2 +- src/server/lib/yamlConfigValidator.ts | 2 +- src/server/models/config/utils.ts | 8 +- src/server/models/yaml/Config.ts | 13 ++- src/server/models/yaml/YamlService.ts | 4 +- src/server/services/activityStream.ts | 81 +++++++++---------- .../services/ai/conversation/manager.ts | 11 +-- .../services/ai/orchestration/orchestrator.ts | 19 +++-- .../services/ai/orchestration/safety.ts | 32 ++++---- src/server/services/ai/providers/gemini.ts | 47 ++++++----- src/server/services/ai/service.ts | 23 ++++-- .../services/ai/streaming/jsonBuffer.ts | 8 +- .../services/ai/streaming/responseHandler.ts | 18 ++--- src/server/services/aiAgent.ts | 9 +-- src/server/services/build.ts | 73 +++++++++-------- src/server/services/deploy.ts | 70 ++++++++-------- src/server/services/deployable.ts | 36 ++++----- src/server/services/github.ts | 26 +++--- src/server/services/globalConfig.ts | 10 +-- src/server/services/ingress.ts | 4 +- src/server/services/override.ts | 6 +- src/server/services/pullRequest.ts | 4 +- src/server/services/repository.ts | 4 +- src/server/services/service.ts | 2 +- src/server/services/ttlCleanup.ts | 12 +-- src/server/services/webhook.ts | 2 +- src/shared/utils.ts | 8 +- 83 files changed, 512 insertions(+), 544 deletions(-) diff --git a/src/pages/api/health.ts b/src/pages/api/health.ts index 3fc8a9e..d25aaaf 100644 --- a/src/pages/api/health.ts +++ b/src/pages/api/health.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import { defaultDb } from 'server/lib/dependencies'; -import logger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import RedisClient from 'server/lib/redisClient'; export default async function healthHandler(req: NextApiRequest, res: NextApiResponse) { @@ -30,7 +30,7 @@ export default async function healthHandler(req: NextApiRequest, res: NextApiRes await defaultDb.knex.raw('SELECT 1'); res.status(200).json({ status: 'Healthy' }); } catch (error) { - logger.error(`Health check failed. Error:\n ${error}`); + getLogger().error({ error }, 'Health: check failed'); return res.status(500).json({ status: 'Unhealthy', error: `An error occurred while performing health check.` }); } } diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts index 50449cc..b9eaad5 100644 --- a/src/pages/api/v1/admin/ttl/cleanup.ts +++ b/src/pages/api/v1/admin/ttl/cleanup.ts @@ -157,7 +157,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger().error({ error }, 'Error occurred on TTL cleanup operation'); + getLogger().error({ error }, 'TTL: cleanup operation failed'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -169,13 +169,13 @@ async function getTTLConfig(res: NextApiResponse) { const ttlConfig = globalConfig.ttl_cleanup; if (!ttlConfig) { - getLogger().warn('TTL cleanup configuration not found in global config'); + getLogger().warn('TTL: config not found'); return res.status(404).json({ error: 'TTL cleanup configuration not found' }); } return res.status(200).json({ config: ttlConfig }); } catch (error) { - getLogger().error({ error }, 'Error occurred retrieving TTL cleanup config'); + getLogger().error({ error }, 'TTL: config retrieval failed'); return res.status(500).json({ error: 'Unable to retrieve TTL cleanup configuration' }); } } @@ -206,7 +206,7 @@ async function triggerTTLCleanup(req: NextApiRequest, res: NextApiResponse) { dryRun, }); } catch (error) { - getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error occurred triggering TTL cleanup'); + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'TTL: cleanup trigger failed'); return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' }); } }); diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts index b1cfdac..1bef50e 100644 --- a/src/pages/api/v1/ai/chat.ts +++ b/src/pages/api/v1/ai/chat.ts @@ -66,7 +66,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) await llmService.initialize(); } } catch (error) { - getLogger().error({ error }, 'Failed to initialize LLM service'); + getLogger().error({ error }, 'AI: init failed'); res.write( `data: ${JSON.stringify({ error: error.message, @@ -87,7 +87,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) try { context = await aiAgentContextService.gatherFullContext(buildUuid); } catch (error) { - getLogger().error({ error }, 'Failed to gather context'); + getLogger().error({ error }, 'AI: context gather failed'); res.write( `data: ${JSON.stringify({ error: `Build not found: ${error.message}`, @@ -161,7 +161,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } catch (e) { getLogger().error( { error: e instanceof Error ? e.message : String(e), responseLength: aiResponse.length }, - 'JSON validation failed for investigation response' + 'AI: JSON validation failed' ); aiResponse = '⚠️ Investigation completed but response formatting failed. Please try asking a more specific question.'; @@ -175,7 +175,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } } } catch (error: any) { - getLogger().error({ error }, 'LLM query failed'); + getLogger().error({ error }, 'AI: query failed'); if ( error?.status === 429 || @@ -220,7 +220,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) res.end(); }); } catch (error: any) { - getLogger().error({ error }, 'Unexpected error in AI agent chat'); + getLogger().error({ error }, 'AI: chat request failed'); res.write(`data: ${JSON.stringify({ error: error?.message || 'Internal error' })}\n\n`); res.end(); } diff --git a/src/pages/api/v1/ai/models.ts b/src/pages/api/v1/ai/models.ts index 66505f7..a1e9249 100644 --- a/src/pages/api/v1/ai/models.ts +++ b/src/pages/api/v1/ai/models.ts @@ -32,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) } if (!aiAgentConfig.providers || !Array.isArray(aiAgentConfig.providers)) { - getLogger().warn('aiAgent config missing providers array'); + getLogger().warn('AI: config missing providers array'); return res.status(200).json({ models: [] }); } @@ -56,7 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) return res.status(200).json({ models }); } catch (error: any) { - getLogger().error({ error }, 'Failed to fetch available models'); + getLogger().error({ error }, 'AI: models fetch failed'); return res.status(500).json({ error: 'Failed to fetch available models' }); } } diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index 52b8adc..7d5b08a 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -120,7 +120,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { message: `Redeploy for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.BUILD_FAILED, error }).error(`Unable to proceed with redeploy for build ${uuid}`); + getLogger({ stage: LogStage.BUILD_FAILED, error }).error(`Build: redeploy failed uuid=${uuid}`); return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); } }); diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts index cfff0a8..4a1ae19 100644 --- a/src/pages/api/v1/builds/[uuid]/graph.ts +++ b/src/pages/api/v1/builds/[uuid]/graph.ts @@ -107,7 +107,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { dependencyGraph: build.dependencyGraph, }); } catch (error) { - getLogger().error({ error }, 'Error fetching dependency graph'); + getLogger().error({ error }, 'Build: dependency graph fetch failed'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }); diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts index e2a6c39..c945250 100644 --- a/src/pages/api/v1/builds/[uuid]/index.ts +++ b/src/pages/api/v1/builds/[uuid]/index.ts @@ -53,7 +53,7 @@ async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) { return res.status(200).json(build); } catch (error) { - getLogger({ error }).error('Error fetching build'); + getLogger({ error }).error('API: build fetch failed'); return res.status(500).json({ error: 'An unexpected error occurred' }); } } @@ -105,7 +105,7 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlatio }, }); } catch (error) { - getLogger({ error }).error(`Error updating UUID to newUuid=${newUuid}`); + getLogger({ error }).error(`API: UUID update failed newUuid=${newUuid}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } } diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts index 8976348..2a06087 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts @@ -216,7 +216,7 @@ async function getJobEvents(jobName: string, namespace: string): Promise { const { uuid, jobName } = req.query; return withLogContext({ buildUuid: uuid as string }, async () => { - const logger = getLogger(); - if (req.method !== 'GET') { - logger.warn(`method=${req.method} Method not allowed`); + getLogger().warn(`API: method not allowed method=${req.method}`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } if (typeof uuid !== 'string' || typeof jobName !== 'string') { - logger.warn(`uuid=${uuid} jobName=${jobName} Missing or invalid query parameters`); + getLogger().warn(`API: invalid params uuid=${uuid} jobName=${jobName}`); return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' }); } @@ -249,7 +247,7 @@ const eventsHandler = async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - logger.error({ error }, `jobName=${jobName} Error getting events`); + getLogger().error({ error }, `API: events fetch failed jobName=${jobName}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts index ecfdcf4..f6e9bd9 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts @@ -100,7 +100,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) const { uuid, jobName } = req.query; return withLogContext({ buildUuid: uuid as string }, async () => { - getLogger().info(`method=${req.method} jobName=${jobName} Job logs endpoint called, delegating to unified handler`); + getLogger().info(`API: job logs called method=${req.method} jobName=${jobName}`); req.query.type = 'webhook'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts index a8e6811..236b67b 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts @@ -152,7 +152,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { } if (typeof uuid !== 'string' || typeof name !== 'string') { - getLogger().warn(`API: invalid parameters uuid=${uuid} name=${name}`); + getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`); return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' }); } diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts index 4931874..06a388f 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts @@ -247,13 +247,13 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { return withLogContext({ buildUuid: uuid as string }, async () => { if (req.method !== 'GET') { - getLogger().warn(`Method not allowed: method=${req.method}`); + getLogger().warn(`API: method not allowed method=${req.method}`); res.setHeader('Allow', ['GET']); return res.status(405).json({ error: `${req.method} is not allowed` }); } if (typeof uuid !== 'string' || typeof name !== 'string') { - getLogger().warn(`Missing or invalid query parameters: uuid=${uuid} name=${name}`); + getLogger().warn(`API: invalid query params uuid=${uuid} name=${name}`); return res.status(400).json({ error: 'Missing or invalid parameters' }); } @@ -276,10 +276,10 @@ const handler = async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(githubDetails); } - getLogger().warn(`No deployment details found: deployUuid=${deployUuid}`); + getLogger().warn(`API: deployment not found deployUuid=${deployUuid}`); return res.status(404).json({ error: 'Deployment not found' }); } catch (error) { - getLogger().error({ error }, `Error getting deployment details: deployUuid=${deployUuid}`); + getLogger().error({ error }, `API: deployment details error deployUuid=${deployUuid}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts index ba882cd..6b07cad 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts @@ -176,12 +176,12 @@ const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse const isWebhookRequest = type === 'webhook'; if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) { - getLogger().warn(`API: invalid parameters uuid=${uuid} name=${name} jobName=${jobName} type=${type}`); + getLogger().warn(`API: invalid params uuid=${uuid} name=${name} jobName=${jobName} type=${type}`); return res.status(400).json({ error: 'Missing or invalid parameters' }); } if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) { - getLogger().warn(`API: invalid type parameter type=${type}`); + getLogger().warn(`API: invalid type param type=${type}`); return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' }); } @@ -197,7 +197,7 @@ const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse return res.status(200).json(response); } catch (error: any) { - getLogger().error({ error }, `API: log stream info fetch failed jobName=${jobName} service=${name}`); + getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${name}`); if (error.message === 'Build not found') { return res.status(404).json({ error: 'Build not found' }); diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts index bfcbe07..07d93bf 100644 --- a/src/pages/api/v1/builds/[uuid]/torndown.ts +++ b/src/pages/api/v1/builds/[uuid]/torndown.ts @@ -143,7 +143,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { namespacesUpdated: updatedDeploys, }); } catch (error) { - getLogger().error({ error }, 'Error in cleanup API'); + getLogger().error({ error }, 'Build: teardown failed'); return res.status(500).json({ error: 'An unexpected error occurred.' }); } }); diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts index 450bf2f..d1d1c0f 100644 --- a/src/pages/api/v1/builds/[uuid]/webhooks.ts +++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts @@ -207,7 +207,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger({ error }).error(`Error handling ${req.method} request`); + getLogger({ error }).error(`API: webhook request failed method=${req.method}`); res.status(500).json({ error: 'An unexpected error occurred.' }); } }); @@ -252,9 +252,7 @@ async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) { message: `Webhook for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.WEBHOOK_PROCESSING, error }).error( - `Unable to proceed with webhook for build ${uuid}` - ); + getLogger({ stage: LogStage.WEBHOOK_PROCESSING, error }).error(`Webhook: invoke failed uuid=${uuid}`); return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` }); } }); @@ -293,7 +291,7 @@ async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) { }, }); } catch (error) { - getLogger({ error }).error('Failed to retrieve webhooks'); + getLogger({ error }).error('API: webhooks fetch failed'); return res.status(500).json({ error: `Unable to retrieve webhooks for build ${uuid}.` }); } } diff --git a/src/pages/api/v1/builds/index.ts b/src/pages/api/v1/builds/index.ts index 2e442ad..179a8e4 100644 --- a/src/pages/api/v1/builds/index.ts +++ b/src/pages/api/v1/builds/index.ts @@ -204,7 +204,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger({ error }).error('Failed to fetch builds'); + getLogger({ error }).error('API: builds fetch failed'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/config/cache.ts b/src/pages/api/v1/config/cache.ts index 7c00b88..f1ea090 100644 --- a/src/pages/api/v1/config/cache.ts +++ b/src/pages/api/v1/config/cache.ts @@ -107,7 +107,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(405).json({ error: `${req.method} is not allowed.` }); } } catch (error) { - getLogger().error({ error }, 'Error occurred on config cache operation'); + getLogger().error({ error }, 'Config: cache operation failed'); res.status(500).json({ error: 'An unexpected error occurred.' }); } }; @@ -118,7 +118,7 @@ async function getCachedConfig(res: NextApiResponse, refresh: boolean = false) { const configs = await configService.getAllConfigs(refresh); return res.status(200).json({ configs }); } catch (error) { - getLogger().error({ error }, 'Error occurred retrieving cache config'); + getLogger().error({ error }, 'Config: cache retrieval failed'); return res.status(500).json({ error: `Unable to retrieve global config values` }); } } diff --git a/src/pages/api/v1/deploy-summary.ts b/src/pages/api/v1/deploy-summary.ts index dfe467f..2b5891b 100644 --- a/src/pages/api/v1/deploy-summary.ts +++ b/src/pages/api/v1/deploy-summary.ts @@ -158,7 +158,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(result.rows); } catch (error) { - getLogger({ error }).error(`Failed to fetch deploy summary: buildId=${parsedBuildId}`); + getLogger({ error }).error(`API: deploy summary fetch failed buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deployables.ts b/src/pages/api/v1/deployables.ts index 34e96b0..f8e7d4a 100644 --- a/src/pages/api/v1/deployables.ts +++ b/src/pages/api/v1/deployables.ts @@ -159,7 +159,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deployables); } catch (error) { - getLogger({ error }).error(`Failed to fetch deployables: buildId=${parsedBuildId}`); + getLogger({ error }).error(`API: deployables fetch failed buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts index bfb0b11..4270cfd 100644 --- a/src/pages/api/v1/deploys.ts +++ b/src/pages/api/v1/deploys.ts @@ -222,7 +222,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(deploys); } catch (error) { - getLogger({ error }).error(`Failed to fetch deploys: buildId=${parsedBuildId}`); + getLogger({ error }).error(`API: deploys fetch failed buildId=${parsedBuildId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/builds.ts b/src/pages/api/v1/pull-requests/[id]/builds.ts index 9ab2e43..161050f 100644 --- a/src/pages/api/v1/pull-requests/[id]/builds.ts +++ b/src/pages/api/v1/pull-requests/[id]/builds.ts @@ -155,7 +155,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(builds); } catch (error) { - getLogger().error({ error }, `Failed to fetch builds for pull request: id=${parsedId}`); + getLogger().error({ error }, `API: builds fetch failed pullRequestId=${parsedId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/[id]/index.ts b/src/pages/api/v1/pull-requests/[id]/index.ts index 7f1bfea..f5d0ead 100644 --- a/src/pages/api/v1/pull-requests/[id]/index.ts +++ b/src/pages/api/v1/pull-requests/[id]/index.ts @@ -144,7 +144,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(pullRequest); } catch (error) { - getLogger().error({ error }, `Failed to fetch pull request: id=${parsedId}`); + getLogger().error({ error }, `API: pull request fetch failed id=${parsedId}`); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/pull-requests/index.ts b/src/pages/api/v1/pull-requests/index.ts index 486dabc..0002410 100644 --- a/src/pages/api/v1/pull-requests/index.ts +++ b/src/pages/api/v1/pull-requests/index.ts @@ -271,7 +271,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger().error({ error }, 'Failed to fetch pull requests'); + getLogger().error({ error }, 'API: pull requests fetch failed'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/repos/index.ts b/src/pages/api/v1/repos/index.ts index b849602..6f45b27 100644 --- a/src/pages/api/v1/repos/index.ts +++ b/src/pages/api/v1/repos/index.ts @@ -180,7 +180,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return res.status(200).json(response); } catch (error) { - getLogger().error({ error }, 'Error fetching repos'); + getLogger().error({ error }, 'API: repos fetch failed'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts index af30f62..2237508 100644 --- a/src/pages/api/v1/schema/validate.ts +++ b/src/pages/api/v1/schema/validate.ts @@ -130,7 +130,7 @@ const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse { return res.status(200).json(response); } catch (error) { - getLogger().error({ error }, 'Error fetching users'); + getLogger().error({ error }, 'API: users fetch failed'); return res.status(500).json({ error: 'An unexpected error occurred' }); } }; diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index 65e1c63..9f3c699 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -44,10 +44,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { return; } - getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook received: event=${event}`); + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook: received event=${event}`); if (!['web', 'all'].includes(LIFECYCLE_MODE)) { - getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Skipped: wrong LIFECYCLE_MODE'); + getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Webhook: skipped reason=wrongMode'); return; } @@ -60,10 +60,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { ...extractContextForQueue(), }); - getLogger({ stage: LogStage.WEBHOOK_QUEUED }).info('Webhook queued for processing'); + getLogger({ stage: LogStage.WEBHOOK_QUEUED }).info('Webhook: queued'); res.status(200).end(); } catch (error) { - getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error({ error }, 'Webhook failure'); + getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error({ error }, 'Webhook: processing failed'); res.status(500).end(); } }); diff --git a/src/server/database.ts b/src/server/database.ts index 0024f71..cdbd9fd 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -21,11 +21,7 @@ import { IServices } from 'server/services/types'; import Model from 'server/models/_Model'; import knexfile from '../../knexfile'; -import rootLogger from 'server/lib/logger'; - -const initialLogger = rootLogger.child({ - filename: 'server/database.ts', -}); +import { getLogger } from 'server/lib/logger/index'; export default class Database { models: models.IModels; @@ -52,8 +48,8 @@ export default class Database { this.knexConfig = merge({}, knexfile, knexConfig); } - setLifecycleConfig(config: any = {}, logger = initialLogger) { - logger.debug('setLifecycleConfig: setting config', { config }); + setLifecycleConfig(config: any = {}) { + getLogger().debug('Database: setting lifecycle config'); this.config = merge({}, this.config, config); } diff --git a/src/server/jobs/index.ts b/src/server/jobs/index.ts index 770fb13..025d926 100644 --- a/src/server/jobs/index.ts +++ b/src/server/jobs/index.ts @@ -15,7 +15,7 @@ */ import { IServices } from 'server/services/types'; -import rootLogger from '../lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { defaultDb, redisClient } from 'server/lib/dependencies'; import RedisClient from 'server/lib/redisClient'; import QueueManager from 'server/lib/queueManager'; @@ -23,16 +23,12 @@ import { MAX_GITHUB_API_REQUEST, GITHUB_API_REQUEST_INTERVAL, QUEUE_NAMES } from let isBootstrapped = false; -const logger = rootLogger.child({ - filename: 'jobs/index.ts', -}); - export default function bootstrapJobs(services: IServices) { if (defaultDb.services) { return; } - logger.info(`Bootstrapping jobs...... Yes`); + getLogger().info('Jobs: bootstrapping'); const queueManager = QueueManager.getInstance(); queueManager.registerWorker(QUEUE_NAMES.WEBHOOK_PROCESSING, services.GithubService.processWebhooks, { @@ -115,7 +111,7 @@ export default function bootstrapJobs(services: IServices) { // This function is used to handle graceful shutdowns add things as needed. const handleExit = async (signal: string) => { - logger.info(` ✍️Shutting down (${signal})`); + getLogger().info(`Jobs: shutting down signal=${signal}`); try { const redisClient = RedisClient.getInstance(); const queueManager = QueueManager.getInstance(); @@ -123,15 +119,15 @@ export default function bootstrapJobs(services: IServices) { await redisClient.close(); process.exit(0); } catch (error) { - logger.info(`Unable to shutdown gracefully: ${error}`); + getLogger().error({ error }, 'Jobs: shutdown failed'); process.exit(0); } }; process.on('SIGINT', () => handleExit('SIGINT')); process.on('SIGTERM', () => handleExit('SIGTERM')); - logger.info(' ✍️Signal handlers registered'); + getLogger().info('Jobs: signal handlers registered'); } } - logger.info('Bootstrapping complete'); + getLogger().info('Jobs: bootstrap complete'); } diff --git a/src/server/lib/__tests__/utils.test.ts b/src/server/lib/__tests__/utils.test.ts index 2abb59f..bdefab0 100644 --- a/src/server/lib/__tests__/utils.test.ts +++ b/src/server/lib/__tests__/utils.test.ts @@ -54,8 +54,15 @@ jest.mock('server/services/globalConfig', () => { }; }); -jest.mock('server/lib/logger'); -import logger from 'server/lib/logger'; +jest.mock('server/lib/logger/index', () => ({ + getLogger: jest.fn().mockReturnValue({ + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + debug: jest.fn(), + }), +})); +import { getLogger } from 'server/lib/logger/index'; describe('exec', () => { test('exec success', async () => { @@ -68,8 +75,8 @@ describe('exec', () => { test('exec failure', async () => { const execCmd = jest.fn().mockRejectedValue(new Error('error')); - await exec('cmd', ['arg1', 'arg2'], { logger, execCmd }); - expect(logger.error).toHaveBeenCalledWith('exec: error executing {}'); + await exec('cmd', ['arg1', 'arg2'], { execCmd }); + expect(getLogger().error).toHaveBeenCalledWith('Exec: error executing runner=cmd error={}'); }); test('exec no stdout', async () => { diff --git a/src/server/lib/buildEnvVariables.ts b/src/server/lib/buildEnvVariables.ts index 7fde122..e0bc7db 100644 --- a/src/server/lib/buildEnvVariables.ts +++ b/src/server/lib/buildEnvVariables.ts @@ -59,7 +59,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { error.uuid = deploy.uuid; throw error; } else { - getLogger().warn({ error }, 'Fallback using database Environment Variables'); + getLogger().warn({ error }, 'EnvVars: fallback to database'); } } } @@ -106,7 +106,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { error.uuid = deploy.uuid; throw error; } else { - getLogger().warn({ error }, 'Fallback using database Init Environment Variables'); + getLogger().warn({ error }, 'EnvVars: init fallback to database'); } } } @@ -155,7 +155,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { ), }) .catch((error) => { - getLogger().error({ error }, 'Problem when preparing env variable'); + getLogger().error({ error }, 'EnvVars: preparation failed'); }); if (deploy.deployable?.initDockerfilePath || deploy.service?.initDockerfilePath) { @@ -174,7 +174,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables { ), }) .catch((error) => { - getLogger().error({ error }, 'Problem when preparing init env variable'); + getLogger().error({ error }, 'EnvVars: init preparation failed'); }); } }); diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index 05b94b3..1aa5218 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -148,7 +148,7 @@ export async function codefreshDestroy(deploy: Deploy) { const id = output?.trim(); return id; } catch (error) { - getLogger({ error }).error('Error destroying Codefresh pipeline'); + getLogger({ error }).error('Codefresh: pipeline destroy failed'); throw error; } } @@ -202,7 +202,7 @@ export async function deleteBuild(build: Build) { ); getLogger().info('CLI: deleted'); } catch (e) { - getLogger({ error: e }).error('Error deleting CLI resources'); + getLogger({ error: e }).error('CLI: delete failed'); } } diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts index 9954d0f..ed0ce0d 100644 --- a/src/server/lib/codefresh/index.ts +++ b/src/server/lib/codefresh/index.ts @@ -55,7 +55,7 @@ export const buildImage = async (options: ContainerBuildOptions) => { metrics .increment('total', { error: 'error_with_cli_output', result: 'error', codefreshBuildId: '' }) .event(eventDetails.title, eventDetails.description); - getLogger().error({ output }, `buildImage noCodefreshBuildOutput: no output from Codefresh for ${suffix}`); + getLogger().error({ output }, `Codefresh: build output missing suffix=${suffix}`); if (!hasOutput) throw Error('no output from Codefresh'); } const codefreshBuildId = getCodefreshPipelineIdFromOutput(output); @@ -70,7 +70,7 @@ export const buildImage = async (options: ContainerBuildOptions) => { .event(eventDetails.title, eventDetails.description); return codefreshBuildId; } catch (error) { - getLogger().error({ error }, `buildImage failed for ${suffix}`); + getLogger().error({ error }, `Codefresh: build failed suffix=${suffix}`); throw error; } }; diff --git a/src/server/lib/configFileWebhookEnvVariables.ts b/src/server/lib/configFileWebhookEnvVariables.ts index 41ef63d..6cabc08 100644 --- a/src/server/lib/configFileWebhookEnvVariables.ts +++ b/src/server/lib/configFileWebhookEnvVariables.ts @@ -54,7 +54,7 @@ export class ConfigFileWebhookEnvironmentVariables extends EnvironmentVariables await build?.$fetchGraph('[services, deploys.service.repository]'); } else { - getLogger().fatal("Build and Webhook shouldn't be undefined"); + getLogger().fatal('Webhook: build and webhook undefined'); } return result; diff --git a/src/server/lib/fastly.ts b/src/server/lib/fastly.ts index 78c0afb..fb39818 100644 --- a/src/server/lib/fastly.ts +++ b/src/server/lib/fastly.ts @@ -79,7 +79,7 @@ class Fastly { this.redis.expire(cacheKey, 86400); return id; } catch (error) { - getLogger().warn({ error }, `Fastly lookup failed: serviceName=${name}`); + getLogger().warn({ error }, `Fastly: lookup failed service=${name}`); } } @@ -107,7 +107,7 @@ class Fastly { if (!serviceId) throw new Error('Service ID is missing'); await fastlyPurge.purgeAll({ service_id: serviceId }); } catch (error) { - getLogger().info({ error }, `Fastly cache purge failed: serviceId=${serviceId} serviceType=${fastlyServiceType}`); + getLogger().warn({ error }, `Fastly: purge failed serviceId=${serviceId} type=${fastlyServiceType}`); } } diff --git a/src/server/lib/github/cacheRequest.ts b/src/server/lib/github/cacheRequest.ts index be30b78..b82cb38 100644 --- a/src/server/lib/github/cacheRequest.ts +++ b/src/server/lib/github/cacheRequest.ts @@ -15,24 +15,19 @@ */ import { cloneDeep, merge } from 'lodash'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { GITHUB_API_CACHE_EXPIRATION_SECONDS } from 'shared/constants'; import { createOctokitClient } from 'server/lib/github/client'; import { CacheRequestData } from 'server/lib/github/types'; import { redisClient } from 'server/lib/dependencies'; -const initialLogger = rootLogger.child({ - filename: 'lib/github/cacheRequest.ts', -}); - export async function cacheRequest( endpoint: string, requestData = {} as CacheRequestData, - { logger = initialLogger, cache = redisClient.getRedis(), ignoreCache = false } = {} + { cache = redisClient.getRedis(), ignoreCache = false } = {} ) { const cacheKey = `github:req_cache:${endpoint}`; - const text = `[GITHUB ${cacheKey}][cacheRequest]`; let cached; try { const octokit = await createOctokitClient({ caller: 'cacheRequest' }); @@ -70,16 +65,16 @@ export async function cacheRequest( const data = JSON.parse(cached?.data); return { data }; } catch (error) { - return cacheRequest(endpoint, requestData, { logger, cache, ignoreCache: true }); + return cacheRequest(endpoint, requestData, { cache, ignoreCache: true }); } } else if (error?.status === 404) { - const msg = '[retryCacheRequest] The requested resource was not found. Maybe the branch was deleted?'; - logger.child({ error }).info(`${text} ${msg}`); + const msg = 'The requested resource was not found. Maybe the branch was deleted?'; + getLogger().info(`GitHub: cache request not found endpoint=${endpoint}`); throw new Error(error?.message || msg); } else { const msg = 'cache request request error'; const message = error?.message || msg; - logger.child({ error }).error(`${text} ${msg}`); + getLogger().error(`GitHub: cache request error endpoint=${endpoint} error=${message}`); throw new Error(message); } } diff --git a/src/server/lib/github/deployments.ts b/src/server/lib/github/deployments.ts index add150b..1951de1 100644 --- a/src/server/lib/github/deployments.ts +++ b/src/server/lib/github/deployments.ts @@ -41,7 +41,7 @@ function lifecycleToGithubStatus(status: string) { export async function createOrUpdateGithubDeployment(deploy: Deploy) { getLogger().debug('Creating or updating github deployment'); try { - getLogger().info('Deploy status update'); + getLogger().info('GitHub: deployment status updated'); await deploy.$fetchGraph('build.pullRequest.repository'); const githubDeploymentId = deploy?.githubDeploymentId; const build = deploy?.build; @@ -68,7 +68,7 @@ export async function createOrUpdateGithubDeployment(deploy: Deploy) { await updateDeploymentStatus(deploy, githubDeploymentId); } } catch (error) { - getLogger({ error }).error('Error creating or updating github deployment'); + getLogger({ error }).error('GitHub: deployment update failed'); throw error; } } @@ -104,7 +104,7 @@ export async function createGithubDeployment(deploy: Deploy, ref: string) { getLogger({ error, repo: fullName, - }).error('Error creating github deployment'); + }).error('GitHub: deployment create failed'); throw error; } } diff --git a/src/server/lib/github/index.ts b/src/server/lib/github/index.ts index e189855..a899efa 100644 --- a/src/server/lib/github/index.ts +++ b/src/server/lib/github/index.ts @@ -46,13 +46,12 @@ export async function createOrUpdatePullRequestComment({ headers: { etag }, }); } catch (error) { - const msg = 'Unable to create or update pull request comment'; getLogger({ error, repo: fullName, pr: pullRequestNumber, - }).error(msg); - throw new Error(error?.message || msg); + }).error('GitHub: comment update failed'); + throw new Error(error?.message || 'Unable to create or update pull request comment'); } } @@ -79,7 +78,7 @@ export async function updatePullRequestLabels({ repo: fullName, pr: pullRequestNumber, labels: labels.toString(), - }).error('Unable to update pull request labels'); + }).error('GitHub: labels update failed'); throw error; } } @@ -88,13 +87,12 @@ export async function getPullRequest(owner: string, name: string, pullRequestNum try { return await cacheRequest(`GET /repos/${owner}/${name}/pulls/${pullRequestNumber}`); } catch (error) { - const msg = 'Unable to retrieve pull request'; getLogger({ error, repo: `${owner}/${name}`, pr: pullRequestNumber, - }).error(msg); - throw new Error(error?.message || msg); + }).error('GitHub: pull request fetch failed'); + throw new Error(error?.message || 'Unable to retrieve pull request'); } } @@ -102,13 +100,12 @@ export async function getPullRequestByRepositoryFullName(fullName: string, pullR try { return await cacheRequest(`GET /repos/${fullName}/pulls/${pullRequestNumber}`); } catch (error) { - const msg = 'Unable to retrieve pull request'; getLogger({ error, repo: fullName, pr: pullRequestNumber, - }).error(msg); - throw new Error(error?.message || msg); + }).error('GitHub: pull request fetch failed'); + throw new Error(error?.message || 'Unable to retrieve pull request'); } } @@ -137,7 +134,7 @@ export async function getPullRequestLabels({ error, repo: fullName, pr: pullRequestNumber, - }).error('Unable to fetch labels'); + }).error('GitHub: labels fetch failed'); throw error; } } @@ -152,13 +149,12 @@ export async function createDeploy({ owner, name, branch, installationId }: Repo }, }); } catch (error) { - const msg = 'Unable to create deploy'; getLogger({ error, repo: `${owner}/${name}`, branch, - }).error(msg); - throw new Error(error?.message || msg); + }).error('GitHub: deploy create failed'); + throw new Error(error?.message || 'Unable to create deploy'); } } @@ -199,13 +195,12 @@ export async function getSHAForBranch(branchName: string, owner: string, name: s const ref = await getRefForBranchName(owner, name, branchName); return ref?.data?.object?.sha; } catch (error) { - const msg = 'Unable to retrieve SHA from branch'; getLogger({ error, repo: `${owner}/${name}`, branch: branchName, - }).warn(msg); - throw new Error(error?.message || msg); + }).warn('GitHub: SHA fetch failed'); + throw new Error(error?.message || 'Unable to retrieve SHA from branch'); } } @@ -243,9 +238,8 @@ export async function getYamlFileContent({ fullName, branch = '', sha = '', isJS return configData; } catch (error) { - const msg = 'No lifecycle yaml found or parsed'; - getLogger({ error, repo: fullName, branch }).warn(msg); - throw new ConfigFileNotFound(error?.message || msg); + getLogger({ error, repo: fullName, branch }).warn('GitHub: yaml fetch failed'); + throw new ConfigFileNotFound(error?.message || 'No lifecycle yaml found or parsed'); } } @@ -258,13 +252,12 @@ export async function getYamlFileContentFromPullRequest(fullName: string, pullRe if (!config) throw new Error('Unable to get config from pull request'); return config; } catch (error) { - const msg = 'Unable to retrieve YAML file content from pull request'; getLogger({ error, repo: fullName, pr: pullRequestNumber, - }).warn(msg); - throw new ConfigFileNotFound(error?.message || msg); + }).warn('GitHub: yaml fetch failed'); + throw new ConfigFileNotFound(error?.message || 'Unable to retrieve YAML file content from pull request'); } } @@ -276,13 +269,12 @@ export async function getYamlFileContentFromBranch( const config = await getYamlFileContent({ fullName, branch: branchName }); return config; } catch (error) { - const msg = 'Unable to retrieve YAML file content from branch'; getLogger({ error, repo: fullName, branch: branchName, - }).warn(msg); - throw new ConfigFileNotFound(error?.message || msg); + }).warn('GitHub: yaml fetch failed'); + throw new ConfigFileNotFound(error?.message || 'Unable to retrieve YAML file content from branch'); } } @@ -301,7 +293,7 @@ export async function checkIfCommentExists({ error, repo: fullName, pr: pullRequestNumber, - }).error('Unable to check for comments'); + }).error('GitHub: comments check failed'); return false; } } diff --git a/src/server/lib/github/utils/index.ts b/src/server/lib/github/utils/index.ts index ccb425d..025f679 100644 --- a/src/server/lib/github/utils/index.ts +++ b/src/server/lib/github/utils/index.ts @@ -15,22 +15,20 @@ */ import { Octokit } from '@octokit/core'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { ConstructOctokitClientOptions, GetAppTokenOptions } from 'server/lib/github/types'; -const initialLogger = rootLogger.child({ - filename: 'lib/github/utils.ts', -}); - -export const getAppToken = async ({ installationId, app, logger = initialLogger }: GetAppTokenOptions) => { +export const getAppToken = async ({ installationId, app }: Omit) => { try { const resp = await app({ type: 'installation', installationId }); return resp?.token; } catch (error) { const msg = 'Unable to get App Token'; - logger.child({ error }).error(`[GITHUB createOctokitClient] Unable to create a new client`); + getLogger().error( + `GitHub: unable to get app token installationId=${installationId} error=${error?.message || msg}` + ); throw new Error(error?.message || msg); } }; @@ -45,12 +43,14 @@ export const constructOctokitClient = ({ token }: ConstructOctokitClientOptions) }); }; -export async function getRefForBranchName(owner: string, name: string, branchName: string, logger = initialLogger) { +export async function getRefForBranchName(owner: string, name: string, branchName: string) { try { return await cacheRequest(`GET /repos/${owner}/${name}/git/ref/heads/${branchName}`); } catch (error) { const msg = 'Unable to get ref for Branch Name'; - logger.child({ error }).error(`[GITHUB ${owner}/${name}:${branchName}][getRefForBranchName] ${msg}`); + getLogger().error( + `GitHub: unable to get ref for branch repo=${owner}/${name} branch=${branchName} error=${error?.message || msg}` + ); throw new Error(error?.message || msg); } } diff --git a/src/server/lib/helm/helm.ts b/src/server/lib/helm/helm.ts index 2ca0eac..3ae50d4 100644 --- a/src/server/lib/helm/helm.ts +++ b/src/server/lib/helm/helm.ts @@ -287,7 +287,7 @@ export async function generateCodefreshRunCommand(deploy: Deploy): Promise void; } @@ -52,7 +48,6 @@ export function streamK8sLogs( ): AbortHandle { const { podName, namespace, containerName: rawContainerName, follow, tailLines, timestamps } = params; const containerName = rawContainerName.startsWith('[init] ') ? rawContainerName.substring(7) : rawContainerName; - const logCtx = { podName, namespace, containerName, follow, tailLines }; const kc = new KubeConfig(); kc.loadFromDefault(); @@ -77,7 +72,10 @@ export function streamK8sLogs( } } } catch (e: any) { - logger.error({ ...logCtx, err: e }, 'Error processing log stream data chunk'); + getLogger().error( + { error: e }, + `K8sStream: data chunk processing failed podName=${podName} namespace=${namespace} containerName=${containerName}` + ); } }); @@ -91,7 +89,10 @@ export function streamK8sLogs( } callbacks.onEnd(); } catch (e: any) { - logger.error({ ...logCtx, err: e }, 'Error during log stream end processing'); + getLogger().error( + { error: e }, + `K8sStream: end processing failed podName=${podName} namespace=${namespace} containerName=${containerName}` + ); callbacks.onError(e instanceof Error ? e : new Error(String(e))); } }); @@ -99,7 +100,10 @@ export function streamK8sLogs( stream.on('error', (err) => { if (streamEnded) return; streamEnded = true; - logger.error({ ...logCtx, err }, 'K8s log stream encountered an error event.'); + getLogger().error( + { error: err }, + `K8sStream: error event received podName=${podName} namespace=${namespace} containerName=${containerName}` + ); buffer = ''; callbacks.onError(err); }); @@ -115,12 +119,17 @@ export function streamK8sLogs( k8sRequest = await k8sLog.log(namespace, podName, containerName, stream as Writable, logOptions); - logger.debug(logCtx, 'k8sLog.log promise resolved (stream likely ended or follow=false).'); + getLogger().debug( + `K8sStream: promise resolved podName=${podName} namespace=${namespace} containerName=${containerName} follow=${follow}` + ); if (k8sRequest) { k8sRequest.on('error', (err: Error) => { if (streamEnded) return; - logger.error({ ...logCtx, err }, 'K8s request object emitted error.'); + getLogger().error( + { error: err }, + `K8sStream: request error emitted podName=${podName} namespace=${namespace} containerName=${containerName}` + ); if (stream.writable) { stream.emit('error', err); } else { @@ -137,7 +146,10 @@ export function streamK8sLogs( } catch (err: any) { if (streamEnded) return; if (err.name !== 'AbortError') { - logger.error({ ...logCtx, err }, 'Failed to establish K8s log stream connection.'); + getLogger().error( + { error: err }, + `K8sStream: connection failed podName=${podName} namespace=${namespace} containerName=${containerName}` + ); buffer = ''; if (stream.writable) { stream.emit('error', err); @@ -158,10 +170,15 @@ export function streamK8sLogs( try { k8sRequest.abort(); } catch (abortErr) { - logger.error({ ...logCtx, err: abortErr }, 'Error calling abort() on K8s request.'); + getLogger().error( + { error: abortErr }, + `K8sStream: abort call failed podName=${podName} namespace=${namespace} containerName=${containerName}` + ); } } else { - logger.warn(logCtx, "Abort requested, but K8s request object not available or doesn't have abort method."); + getLogger().warn( + `K8sStream: abort requested but request unavailable podName=${podName} namespace=${namespace} containerName=${containerName}` + ); } stream.destroy(); streamEnded = true; diff --git a/src/server/lib/kubernetes.ts b/src/server/lib/kubernetes.ts index 77d49d3..730cc9a 100644 --- a/src/server/lib/kubernetes.ts +++ b/src/server/lib/kubernetes.ts @@ -47,7 +47,7 @@ async function namespaceExists(client: k8s.CoreV1Api, name: string): Promise setTimeout(r, 5000)); } else { - getLogger(logCtx).warn('No pods found within 5 minutes'); + getLogger(logCtx).warn('Pod: not found timeout=5m'); break; } } @@ -545,7 +545,7 @@ export async function waitForPodReady(build: Build) { return conditions.some((condition) => condition?.type === 'Ready' && condition?.status === 'True'); }); } catch (error) { - getLogger({ ...logCtx, error, isReady }).warn('Error checking pod readiness'); + getLogger({ ...logCtx, error, isReady }).warn('Pod: readiness check failed'); } if (isReady) { @@ -577,7 +577,7 @@ export async function deleteBuild(build: Build) { getLogger({ namespace: build.namespace, error: e, - }).error('Error deleting kubernetes resources'); + }).error('Resources: delete failed'); } } @@ -595,7 +595,7 @@ export async function deleteNamespace(name: string) { if (e.includes('Error from server (NotFound): namespaces')) { getLogger({ namespace: name }).info('Deploy: namespace skipped reason=notFound'); } else { - getLogger({ namespace: name, error: e }).error('Error deleting namespace'); + getLogger({ namespace: name, error: e }).error('Namespace: delete failed'); } } } @@ -1112,7 +1112,7 @@ export function generateDeployManifests( }); break; default: - getLogger({ medium: disk.medium }).warn('Unknown disk medium type'); + getLogger({ medium: disk.medium }).warn(`Disk: unknown medium medium=${disk.medium}`); } }); } @@ -1590,7 +1590,7 @@ async function getExistingIngress(ingressName: string, namespace: string): Promi const response = await k8sApi.readNamespacedIngress(ingressName, namespace); return response.body; } catch (error) { - getLogger({ ingressName, namespace, error }).warn('Failed to get existing ingress'); + getLogger({ ingressName, namespace, error }).warn('Ingress: fetch failed'); return null; } } @@ -1641,7 +1641,7 @@ export async function patchIngress(ingressName: string, bannerSnippet: any, name getLogger({ ingressName, namespace }).info('Deploy: ingress patched'); } catch (error) { - getLogger({ ingressName, namespace, error }).warn('Unable to patch ingress, banner might not work'); + getLogger({ ingressName, namespace, error }).warn('Ingress: patch failed (banner may not work)'); throw error; } } @@ -1669,7 +1669,7 @@ export async function updateSecret(secretName: string, secretData: Record { } if (retries >= 60) { - getLogger(logCtx).warn('No pods found within 5 minutes'); + getLogger(logCtx).warn('Pod: not found timeout=5m'); return false; } @@ -2204,7 +2204,7 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { const pods = allPods.filter((pod) => !pod.metadata?.name?.includes('-deploy-')); if (pods.length === 0) { - getLogger(logCtx).warn('No deployment pods found'); + getLogger(logCtx).warn('Pod: deployment pods not found'); return false; } @@ -2223,6 +2223,6 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise { await new Promise((r) => setTimeout(r, 5000)); } - getLogger(logCtx).warn('Pods not ready within 15 minutes'); + getLogger(logCtx).warn('Pod: not ready timeout=15m'); return false; } diff --git a/src/server/lib/kubernetes/JobMonitor.ts b/src/server/lib/kubernetes/JobMonitor.ts index 52a3fe3..420dbb8 100644 --- a/src/server/lib/kubernetes/JobMonitor.ts +++ b/src/server/lib/kubernetes/JobMonitor.ts @@ -70,7 +70,7 @@ export class JobMonitor { status, }; } catch (error) { - getLogger().error(`Error monitoring job ${this.jobName}: ${error.message}`); + getLogger().error({ error }, `Job: monitor failed name=${this.jobName}`); return { logs: logs || `Job monitoring failed: ${error.message}`, success: false, @@ -146,12 +146,14 @@ export class JobMonitor { ); logs += `\n=== Init Container Logs (${initName}) ===\n${initLogs}\n`; } catch (err: any) { - getLogger().debug(`Could not get logs for init container ${initName}: ${err.message || 'Unknown error'}`); + getLogger().debug( + `K8s: init container logs failed container=${initName} error=${err.message || 'Unknown error'}` + ); } } } } catch (error: any) { - getLogger().debug(`No init containers found for pod ${podName}: ${error.message || 'Unknown error'}`); + getLogger().debug(`K8s: no init containers found pod=${podName} error=${error.message || 'Unknown error'}`); } return logs; @@ -176,7 +178,7 @@ export class JobMonitor { const waiting = statuses.find((s: any) => s.state.waiting); if (waiting && waiting.state.waiting.reason) { getLogger().info( - `Container ${waiting.name} is waiting: ${waiting.state.waiting.reason} - ${ + `Container: waiting name=${waiting.name} reason=${waiting.state.waiting.reason} message=${ waiting.state.waiting.message || 'no message' }` ); @@ -209,7 +211,7 @@ export class JobMonitor { containerNames = containerNames.filter((name) => containerFilters.includes(name)); } } catch (error) { - getLogger().warn(`Could not get container names: ${error}`); + getLogger().warn({ error }, `Container: names fetch failed`); } for (const containerName of containerNames) { @@ -223,7 +225,7 @@ export class JobMonitor { logs += `\n=== Container Logs (${containerName}) ===\n${containerLog}\n`; } } catch (error: any) { - getLogger().warn(`Error getting logs from container ${containerName}: ${error.message}`); + getLogger().warn({ error }, `Container: logs fetch failed name=${containerName}`); logs += `\n=== Container Logs (${containerName}) ===\nError retrieving logs: ${error.message}\n`; } } @@ -278,7 +280,7 @@ export class JobMonitor { ); if (failedStatus.trim() === 'True') { - getLogger().error(`Job ${this.jobName} failed`); + getLogger().error(`Job: failed name=${this.jobName}`); // Check if job was superseded try { @@ -288,13 +290,13 @@ export class JobMonitor { ); if (annotations === 'superseded-by-retry') { - getLogger().info(`${logPrefix || ''} Job ${this.jobName} superseded by newer deployment`); + getLogger().info(`K8s: job superseded name=${this.jobName}`); success = true; status = 'superseded'; } } catch (annotationError: any) { getLogger().debug( - `Could not check supersession annotation for job ${this.jobName}: ${ + `K8s: supersession annotation check failed job=${this.jobName} error=${ annotationError.message || 'Unknown error' }` ); @@ -304,7 +306,7 @@ export class JobMonitor { status = 'succeeded'; } } catch (error) { - getLogger().error(`Failed to check job status for ${this.jobName}:`, error); + getLogger().error({ error }, `Job: status check failed name=${this.jobName}`); } return { success, status }; diff --git a/src/server/lib/kubernetes/common/serviceAccount.ts b/src/server/lib/kubernetes/common/serviceAccount.ts index 3081126..975d14a 100644 --- a/src/server/lib/kubernetes/common/serviceAccount.ts +++ b/src/server/lib/kubernetes/common/serviceAccount.ts @@ -15,11 +15,9 @@ */ import GlobalConfigService from 'server/services/globalConfig'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { setupServiceAccountInNamespace } from '../../nativeHelm/utils'; -const logger = rootLogger.child({ filename: 'lib/kubernetes/serviceAccount.ts' }); - export async function ensureServiceAccountForJob( namespace: string, jobType: 'build' | 'deploy' | 'webhook' @@ -28,8 +26,8 @@ export async function ensureServiceAccountForJob( const serviceAccountName = serviceAccount?.name || 'default'; const role = serviceAccount?.role || 'default'; - logger.info( - `Setting up service account for ${jobType} job: namespace=${namespace} serviceAccount=${serviceAccountName} role=${role}` + getLogger().info( + `ServiceAccount: setting up for job type=${jobType} namespace=${namespace} serviceAccount=${serviceAccountName} role=${role}` ); await setupServiceAccountInNamespace(namespace, serviceAccountName, role); diff --git a/src/server/lib/kubernetesApply/applyManifest.ts b/src/server/lib/kubernetesApply/applyManifest.ts index 24e3b15..940c5b9 100644 --- a/src/server/lib/kubernetesApply/applyManifest.ts +++ b/src/server/lib/kubernetesApply/applyManifest.ts @@ -38,7 +38,7 @@ export async function createKubernetesApplyJob({ const jobName = `${deploy.uuid}-deploy-${jobId}-${shortSha}`.substring(0, 63); const serviceName = deploy.deployable?.name || deploy.service?.name || ''; - getLogger().info(`Creating Kubernetes apply job: jobName=${jobName} service=${serviceName}`); + getLogger().info(`Job: creating name=${jobName} service=${serviceName}`); const configMapName = `${jobName}-manifest`; await createManifestConfigMap(deploy, configMapName, namespace); @@ -132,7 +132,7 @@ export async function createKubernetesApplyJob({ }; const createdJob = await batchApi.createNamespacedJob(namespace, job); - getLogger().info(`Created Kubernetes apply job: jobName=${jobName} jobId=${jobId}`); + getLogger().info(`Job: created name=${jobName} jobId=${jobId}`); return createdJob.body; } @@ -218,7 +218,7 @@ export async function monitorKubernetesJob( await new Promise((resolve) => setTimeout(resolve, 5000)); attempts++; } catch (error) { - getLogger().error({ error }, `Error monitoring job: jobName=${jobName}`); + getLogger({ error }).error(`Job: monitor failed name=${jobName}`); throw error; } } diff --git a/src/server/lib/kubernetesApply/logs.ts b/src/server/lib/kubernetesApply/logs.ts index 4a7b20d..857fc6f 100644 --- a/src/server/lib/kubernetesApply/logs.ts +++ b/src/server/lib/kubernetesApply/logs.ts @@ -103,14 +103,14 @@ export async function getKubernetesApplyLogs(deploy: Deploy, tail?: number): Pro allLogs.push(`=== Logs from pod ${podName} ===\n${podLogs.body}`); } } catch (podError) { - getLogger({ error: podError }).error(`Failed to fetch logs from pod: podName=${podName}`); + getLogger({ error: podError }).error(`Pod: log fetch failed name=${podName}`); allLogs.push(`=== Error fetching logs from pod ${podName} ===\n${(podError as Error).message || podError}`); } } return allLogs.join('\n\n') || 'No logs available'; } catch (error) { - getLogger({ error }).error('Failed to fetch logs'); + getLogger({ error }).error('Logs: fetch failed'); return `Failed to fetch logs: ${(error as Error).message || error}`; } } @@ -243,7 +243,7 @@ export async function streamKubernetesApplyLogs( onClose(); } } catch (error) { - getLogger({ error }).error('Error polling logs'); + getLogger({ error }).error('Logs: poll failed'); if ((error as any).response?.statusCode === 404) { // Pod was deleted, stop polling isActive = false; @@ -261,7 +261,7 @@ export async function streamKubernetesApplyLogs( clearInterval(pollInterval); }; } catch (error) { - getLogger({ error }).error('Failed to start log stream'); + getLogger({ error }).error('Logs: stream start failed'); onError(error as Error); onClose(); return () => {}; diff --git a/src/server/lib/logStreamingHelper.ts b/src/server/lib/logStreamingHelper.ts index 17980bd..6ab6e1a 100644 --- a/src/server/lib/logStreamingHelper.ts +++ b/src/server/lib/logStreamingHelper.ts @@ -14,15 +14,11 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import * as k8s from '@kubernetes/client-node'; import { StreamingInfo, LogSourceStatus, K8sPodInfo, K8sContainerInfo } from 'shared/types'; import { HttpError, V1ContainerStatus } from '@kubernetes/client-node'; -const logger = rootLogger.child({ - filename: __filename, -}); - /** * Reusable logic to get log streaming info for a specific Kubernetes job name, * using the provided namespace. @@ -32,7 +28,7 @@ export async function getLogStreamingInfoForJob( namespace: string ): Promise { if (!jobName) { - logger.warn(`Job name not provided. Cannot get logs.`); + getLogger().warn('LogStreaming: job name not provided'); const statusResponse: LogSourceStatus = { status: 'Unavailable', streamingRequired: false, @@ -45,7 +41,7 @@ export async function getLogStreamingInfoForJob( try { podInfo = await getK8sJobStatusAndPod(jobName, namespace); } catch (k8sError: any) { - logger.error({ k8sError }, `Error calling getK8sJobStatusAndPod for ${jobName}.`); + getLogger().error(`LogStreaming: error fetching job status jobName=${jobName} error=${k8sError.message}`); const errorStatus: LogSourceStatus = { status: 'Unknown', streamingRequired: false, @@ -125,30 +121,28 @@ export async function getLogStreamingInfoForJob( * @returns A promise resolving to K8sPodInfo containing status and container info, or null if not found/error. */ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): Promise { - const logCtx = { jobName, namespace }; - const kc = new k8s.KubeConfig(); kc.loadFromDefault(); const coreV1Api = kc.makeApiClient(k8s.CoreV1Api); const batchV1Api = kc.makeApiClient(k8s.BatchV1Api); try { - logger.debug(logCtx, `Reading Job details for namespace: ${namespace} and jobName: ${jobName}`); + getLogger().debug(`LogStreaming: reading job details namespace=${namespace} jobName=${jobName}`); const jobResponse = await batchV1Api.readNamespacedJob(jobName, namespace); const job = jobResponse.body; if (!job?.spec?.selector?.matchLabels) { if (job?.status?.succeeded) { - logger.warn(logCtx, 'Job succeeded but selector missing.'); + getLogger().warn(`LogStreaming: job succeeded but selector missing jobName=${jobName} namespace=${namespace}`); return { podName: null, namespace, status: 'Succeeded', containers: [] }; } if (job?.status?.failed) { - logger.warn(logCtx, 'Job failed but selector missing.'); + getLogger().warn(`LogStreaming: job failed but selector missing jobName=${jobName} namespace=${namespace}`); const failedCondition = job.status.conditions?.find((c) => c.type === 'Failed' && c.status === 'True'); const failureMessage = failedCondition?.message || 'Job failed'; return { podName: null, namespace, status: 'Failed', containers: [], message: failureMessage }; } - logger.error(logCtx, 'Job found, but missing spec.selector.matchLabels. Cannot find associated pods.'); + getLogger().error(`LogStreaming: job found but missing selector jobName=${jobName} namespace=${namespace}`); return { podName: null, namespace, status: 'Unknown', containers: [] }; } @@ -156,7 +150,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): .map(([key, value]) => `${key}=${value}`) .join(','); - logger.debug({ ...logCtx, labelSelector }, 'Listing Pods with label selector'); + getLogger().debug( + `LogStreaming: listing pods jobName=${jobName} namespace=${namespace} labelSelector=${labelSelector}` + ); const podListResponse = await coreV1Api.listNamespacedPod( namespace, @@ -169,7 +165,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): const pods = podListResponse.body.items; if (!pods || pods.length === 0) { - logger.warn(logCtx, 'No pods found matching the job selector.'); + getLogger().warn(`LogStreaming: no pods found matching job selector jobName=${jobName} namespace=${namespace}`); const jobStatus = job.status; if (jobStatus?.succeeded && jobStatus.succeeded > 0) { return { podName: null, namespace, status: 'Succeeded', containers: [] }; @@ -178,7 +174,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): const failedCondition = jobStatus.conditions?.find((c) => c.type === 'Failed' && c.status === 'True'); const failureReason = failedCondition?.reason || 'Failed'; const failureMessage = failedCondition?.message || 'Job failed'; - logger.warn({ ...logCtx, failureReason }, 'Job indicates failure, but no pods found.'); + getLogger().warn( + `LogStreaming: job indicates failure but no pods found jobName=${jobName} namespace=${namespace} reason=${failureReason}` + ); return { podName: null, namespace, status: 'Failed', containers: [], message: failureMessage }; } return { podName: null, namespace, status: 'NotFound', containers: [] }; @@ -190,11 +188,11 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): const latestPod = pods[0]; if (!latestPod?.metadata?.name || !latestPod?.status) { - logger.error(logCtx, 'Found pod(s), but latest pod is missing metadata or status.'); + getLogger().error(`LogStreaming: pod missing metadata or status jobName=${jobName} namespace=${namespace}`); return null; } const podName = latestPod.metadata.name; - logger.debug({ ...logCtx, podName }, 'Found latest pod'); + getLogger().debug(`LogStreaming: found latest pod jobName=${jobName} namespace=${namespace} podName=${podName}`); let podStatus: K8sPodInfo['status'] = 'Unknown'; const phase = latestPod.status.phase; @@ -259,7 +257,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): return result; } catch (error: any) { if (error instanceof HttpError && error.response?.statusCode === 404) { - logger.warn(logCtx, `Job or associated resource not found (404) ${error.message}`); + getLogger().warn(`LogStreaming: job not found jobName=${jobName} namespace=${namespace} error=${error.message}`); return { podName: null, namespace, @@ -268,7 +266,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): message: 'Job no longer exists. Logs have been cleaned up after 24 hours.', }; } - logger.error({ ...logCtx, err: error }, 'Error getting K8s job/pod status'); + getLogger().error( + `LogStreaming: error getting job/pod status jobName=${jobName} namespace=${namespace} error=${error.message}` + ); return null; } } @@ -280,9 +280,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string): * @returns A promise resolving to K8sPodInfo containing pod status and container info. */ export async function getK8sPodContainers(podName: string, namespace: string = 'lifecycle-app'): Promise { - const logCtx = { podName, namespace }; - - logger.debug(logCtx, 'Fetching container information for pod'); + getLogger().debug(`LogStreaming: fetching container info podName=${podName} namespace=${namespace}`); const kc = new k8s.KubeConfig(); kc.loadFromDefault(); const coreV1Api = kc.makeApiClient(k8s.CoreV1Api); @@ -351,7 +349,7 @@ export async function getK8sPodContainers(podName: string, namespace: string = ' }; } catch (error: any) { if (error instanceof HttpError && error.response?.statusCode === 404) { - logger.warn(logCtx, `Pod not found (404): ${error.message}`); + getLogger().warn(`LogStreaming: pod not found podName=${podName} namespace=${namespace} error=${error.message}`); return { podName: null, namespace, @@ -361,7 +359,9 @@ export async function getK8sPodContainers(podName: string, namespace: string = ' }; } - logger.error({ ...logCtx, err: error }, 'Error getting container information'); + getLogger().error( + `LogStreaming: error getting container info podName=${podName} namespace=${namespace} error=${error.message}` + ); throw error; } } diff --git a/src/server/lib/nativeBuild/engines.ts b/src/server/lib/nativeBuild/engines.ts index 161ef1d..38e245f 100644 --- a/src/server/lib/nativeBuild/engines.ts +++ b/src/server/lib/nativeBuild/engines.ts @@ -399,7 +399,7 @@ EOF`); const { logs, success } = await waitForJobAndGetLogs(jobName, options.namespace, jobTimeout); return { success, logs, jobName }; } catch (error) { - getLogger().error(`Job: log retrieval failed job=${jobName} error=${error.message}`); + getLogger({ error }).error(`Job: log retrieval failed name=${jobName}`); try { const jobStatus = await shellPromise( @@ -412,7 +412,7 @@ EOF`); return { success: true, logs: 'Log retrieval failed but job completed successfully', jobName }; } } catch (statusError) { - getLogger().error(`Job: status check failed job=${jobName} error=${statusError.message}`); + getLogger({ error: statusError }).error(`Job: status check failed name=${jobName}`); } return { success: false, logs: `Build failed: ${error.message}`, jobName }; diff --git a/src/server/lib/nativeHelm/utils.ts b/src/server/lib/nativeHelm/utils.ts index cfbd61e..59ab824 100644 --- a/src/server/lib/nativeHelm/utils.ts +++ b/src/server/lib/nativeHelm/utils.ts @@ -50,7 +50,7 @@ export async function getHelmReleaseStatus(releaseName: string, namespace: strin if (error.message?.includes('release: not found')) { return null; } - getLogger().warn({ error }, `Failed to get status for release: releaseName=${releaseName}`); + getLogger().warn({ error }, `Helm: release status fetch failed name=${releaseName}`); return null; } } @@ -175,7 +175,7 @@ export async function checkIfJobWasSuperseded(jobName: string, namespace: string return annotations === 'superseded-by-retry'; } catch (error) { - getLogger().debug({ error }, `Could not check job supersession status: jobName=${jobName}`); + getLogger().debug({ error }, `Helm: job supersession check failed jobName=${jobName}`); return false; } } diff --git a/src/server/lib/queueManager.ts b/src/server/lib/queueManager.ts index b4f8faf..ead747a 100644 --- a/src/server/lib/queueManager.ts +++ b/src/server/lib/queueManager.ts @@ -109,7 +109,7 @@ export default class QueueManager { try { await worker.close(); } catch (error) { - getLogger().warn({ error: error.message }, `Error closing worker: queueName=${worker.name}`); + getLogger().warn({ error: error.message }, `Queue: worker close failed name=${worker.name}`); } } @@ -118,7 +118,7 @@ export default class QueueManager { try { await queue.close(); } catch (error) { - getLogger().warn({ error: error.message }, `Error closing queue: queueName=${queue.name}`); + getLogger().warn({ error: error.message }, `Queue: close failed name=${queue.name}`); } } } diff --git a/src/server/lib/redisClient.ts b/src/server/lib/redisClient.ts index 727c9a8..2b1520f 100644 --- a/src/server/lib/redisClient.ts +++ b/src/server/lib/redisClient.ts @@ -95,7 +95,7 @@ export class RedisClient { await Promise.all([this.redis.quit(), this.subscriber.quit(), this.bullConn.quit()]); getLogger().info('Redis: closed'); } catch (error) { - getLogger().warn({ error }, 'Error closing Redis connections, forcing disconnect'); + getLogger().warn({ error }, 'Redis: close failed forcing=true'); this.redis.disconnect(); this.subscriber.disconnect(); this.bullConn.disconnect(); diff --git a/src/server/lib/response.ts b/src/server/lib/response.ts index 25b2132..c4e6f64 100644 --- a/src/server/lib/response.ts +++ b/src/server/lib/response.ts @@ -16,11 +16,7 @@ import { NextRequest, NextResponse } from 'next/server'; import { PaginationMetadata } from './paginate'; -import rootLogger from './logger'; - -const logger = rootLogger.child({ - filename: 'server/lib/response.ts', -}); +import { getLogger } from 'server/lib/logger/index'; interface Metadata { pagination?: PaginationMetadata; @@ -79,7 +75,7 @@ export function errorResponse(error: unknown, options: ErrorResponseOptions, req errorStack = error.stack || ''; } - logger.error(`API Error: ${errorMessage}`, { stack: errorStack }); + getLogger().error({ error, stack: errorStack }, `API: error message=${errorMessage}`); const { status } = options; diff --git a/src/server/lib/tracer/index.ts b/src/server/lib/tracer/index.ts index 2b088ac..1f08bd3 100644 --- a/src/server/lib/tracer/index.ts +++ b/src/server/lib/tracer/index.ts @@ -15,11 +15,7 @@ */ import { Span, tracer, TracerOptions } from 'dd-trace'; -import rootLogger from 'server/lib/logger'; - -export const logger = rootLogger.child({ - filename: 'lib/tracer/index.ts', -}); +import { getLogger } from 'server/lib/logger/index'; // Refer to the readme for insights @@ -31,7 +27,7 @@ export class Tracer { private constructor() { if (Tracer.instance) { const errorMsg = 'This class is a singleton!'; - logger.error(errorMsg); + getLogger().error(`Tracer: singleton violation`); throw new Error(errorMsg); } Tracer.instance = this; @@ -65,7 +61,7 @@ export class Tracer { } return this; } catch (error) { - logger.error(`[Tracer][initialize] error: ${error}`); + getLogger().error(`Tracer: initialization error error=${error}`); return this; } } @@ -93,7 +89,7 @@ export class Tracer { } public static Trace(): Function { - return function (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor): any { + return function (_target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor): any { const originalMethod = descriptor?.value; const profiler = Tracer.getInstance(); descriptor.value = function (...args: any[]) { @@ -108,9 +104,7 @@ export class Tracer { if (typeof tracer?.scope === 'function') { tracer.scope().active()?.setTag('error', true); } - logger - .child({ target, descriptor, error }) - .error(`[Tracer][Trace] error decorating ${propertyKey.toString()}`); + getLogger().error(`Tracer: error decorating method=${propertyKey.toString()} error=${error}`); throw error; } }); diff --git a/src/server/lib/utils.ts b/src/server/lib/utils.ts index afde983..0b81f71 100644 --- a/src/server/lib/utils.ts +++ b/src/server/lib/utils.ts @@ -20,21 +20,17 @@ import { GithubPullRequestActions, PullRequestStatus, FallbackLabels } from 'sha import GlobalConfigService from 'server/services/globalConfig'; import { GenerateDeployTagOptions, WaitUntilOptions, EnableKillswitchOptions } from 'server/lib/types'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import { ENVIRONMENT } from 'shared/config'; -const initialLogger = rootLogger.child({ - filename: 'lib/utils.ts', -}); - const execFilePromise = promisify(execFile); -export const exec = async (runner: string, cmd: string[], { logger = initialLogger, execCmd = execFilePromise }) => { +export const exec = async (runner: string, cmd: string[], { execCmd = execFilePromise } = {}) => { try { const out = await execCmd(runner, cmd); return out?.stdout || ''; } catch (err) { - logger.error(`exec: error executing ${JSON.stringify(err)}`); + getLogger().error(`Exec: error executing runner=${runner} error=${JSON.stringify(err)}`); return ''; } }; @@ -153,11 +149,10 @@ export const enableKillSwitch = async ({ action = '', branch = '', fullName = '', - logger = initialLogger, isBotUser = false, labels = [], status = '', -}: EnableKillswitchOptions) => { +}: Omit) => { try { const isOpened = [GithubPullRequestActions.OPENED, GithubPullRequestActions.REOPENED].includes( action as GithubPullRequestActions @@ -194,7 +189,7 @@ export const enableKillSwitch = async ({ const isUnallowed = organizations.includes(owner?.toLowerCase()); return isIgnore || isReleaseBranch || isUnallowed; } catch (error) { - logger.warn(`[UTIL ${fullName}/${branch}][enableKillswitch] ${error}`); + getLogger().warn(`Killswitch: error checking fullName=${fullName} branch=${branch} error=${error}`); return false; } }; @@ -293,7 +288,7 @@ export const isControlCommentsEnabled = async (): Promise => { const labelsConfig = await GlobalConfigService.getInstance().getLabels(); return labelsConfig.defaultControlComments ?? true; } catch (error) { - initialLogger.warn('[isControlCommentsEnabled] Error retrieving config, defaulting to true', error); + getLogger().warn(`Config: error retrieving control comments config error=${error}`); return true; } }; diff --git a/src/server/lib/webhook/index.ts b/src/server/lib/webhook/index.ts index c0e5483..6b632bd 100644 --- a/src/server/lib/webhook/index.ts +++ b/src/server/lib/webhook/index.ts @@ -126,7 +126,7 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro metadata: {}, }; } catch (error) { - getLogger().error({ error }, `Webhook execution failed: webhookName=${jobConfig.webhookName}`); + getLogger().error({ error }, `Webhook: execution failed name=${jobConfig.webhookName}`); const errorMessage = error instanceof Error ? error.message : String(error); return { diff --git a/src/server/lib/yamlConfigValidator.ts b/src/server/lib/yamlConfigValidator.ts index a5839a4..e196afb 100644 --- a/src/server/lib/yamlConfigValidator.ts +++ b/src/server/lib/yamlConfigValidator.ts @@ -85,7 +85,7 @@ export class YamlConfigValidator { throw new ValidationError('Config file is empty.'); } - getLogger().debug(`Validating config file with version: ${version}`); + getLogger().debug(`Config: validating version=${version}`); switch (version.toLowerCase()) { case '1.0.0': case 'latest': diff --git a/src/server/models/config/utils.ts b/src/server/models/config/utils.ts index 4fe4339..769a026 100644 --- a/src/server/models/config/utils.ts +++ b/src/server/models/config/utils.ts @@ -36,7 +36,7 @@ export const resolveRepository = async (repositoryFullName: string) => { const repositories = await Repository.query() .where(raw('LOWER(??)', [key]), '=', name) .catch((error) => { - getLogger().error({ error }, `Unable to find ${repositoryFullName} from Lifecycle Database`); + getLogger().error({ error }, `Repository: not found name=${repositoryFullName}`); return null; }); if (!repositories || repositories?.length === 0) { @@ -44,7 +44,7 @@ export const resolveRepository = async (repositoryFullName: string) => { } return repositories[0]; } catch (err) { - getLogger().error({ error: err }, `Problem resolving repository ${repositoryFullName}`); + getLogger().error({ error: err }, `Repository: resolution failed name=${repositoryFullName}`); } }; @@ -61,7 +61,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b const validator = new YamlConfigValidator(); const isConfigValid = validator.validate(configVersion, config); if (!isConfigValid) { - getLogger().error(`YAML Config validation failed for ${name}/${branchName} version=${configVersion}`); + getLogger().error(`Config: validation failed repo=${name}/${branchName} version=${configVersion}`); // TODO: This is a temporary fix to allow the UI to display the config // throw new Error( // `YAML Config validation failed for ${name}/${branchName} using version Lifecyle Yaml version=${configVersion}` @@ -69,7 +69,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b } return config; } catch (err) { - getLogger().error({ error: err }, `fetchLifecycleConfigByRepository failed`); + getLogger().error({ error: err }, `Config: fetch failed`); return null; } }; diff --git a/src/server/models/yaml/Config.ts b/src/server/models/yaml/Config.ts index 2f05505..f431d2f 100644 --- a/src/server/models/yaml/Config.ts +++ b/src/server/models/yaml/Config.ts @@ -66,10 +66,7 @@ export async function fetchLifecycleConfigByRepository( try { config = await new YamlConfigParser().parseYamlConfigFromBranch(repository.fullName, branchName); } catch (error) { - getLogger({ repository: repository.fullName, branch: branchName }).warn( - { error }, - 'Unable to fetch configuration' - ); + getLogger({ repository: repository.fullName, branch: branchName }).warn({ error }, 'Config: fetch failed'); if (error instanceof EmptyFileError) { config = null; @@ -84,7 +81,7 @@ export async function fetchLifecycleConfigByRepository( } catch (error) { getLogger({ repository: repository.fullName, branch: branchName, version: config.version }).error( { error }, - 'YAML config validation failed' + 'Config: validation failed' ); throw new ValidationError(error); } @@ -109,7 +106,7 @@ export function getDeployingServicesByName(config: LifecycleConfig, serviceName: } } } catch (error) { - getLogger({ serviceName }).error({ error }, 'Failed to get service by name'); + getLogger({ serviceName }).error({ error }, 'Service: lookup failed'); throw error; } @@ -129,7 +126,7 @@ export async function resolveRepository(repositoryFullName: string): Promise { `Unspported Chart: helmChart with name: ${helmService?.chart?.name} is not currently supported` ); getLogger({ chartName: helmService?.chart?.name }).warn( - 'Helm chart not currently supported, proceed with caution' + `Helm: chart not supported name=${helmService?.chart?.name}` ); } @@ -576,7 +576,7 @@ export function getRepositoryName(service: Service): string { break; } } catch (error) { - getLogger({ serviceName: service?.name }).error({ error }, 'Failed to get repository name for service'); + getLogger({ serviceName: service?.name }).error({ error }, 'Service: repository name lookup failed'); throw error; } diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index 14afc8e..d24f7c3 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -82,7 +82,7 @@ export default class ActivityStream extends BaseService { await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]'); const { build } = pullRequest; if (!build) { - getLogger({ stage: LogStage.COMMENT_FAILED }).warn(`Build id not found for pull request with id: ${id}`); + getLogger({ stage: LogStage.COMMENT_FAILED }).warn(`Build: id not found pullRequestId=${id}`); return; } @@ -101,7 +101,10 @@ export default class ActivityStream extends BaseService { getLogger({ stage: LogStage.COMMENT_COMPLETE }).debug(`Comment updated for PR ${id}`); } catch (error) { - getLogger({ stage: LogStage.COMMENT_FAILED }).error({ error }, `Error processing comment for PR ${id}`); + getLogger({ stage: LogStage.COMMENT_FAILED }).error( + { error }, + `Comment: processing failed pullRequestId=${id}` + ); } }); }; @@ -170,7 +173,7 @@ export default class ActivityStream extends BaseService { null, true ).catch((error) => { - getLogger().warn({ error }, 'Failed to update the activity feed for comment edit'); + getLogger().warn({ error }, 'ActivityFeed: comment edit update failed'); }); } } @@ -189,7 +192,7 @@ export default class ActivityStream extends BaseService { runUuid: string; }) { if (!build.id) { - getLogger().error('No build provided to apply overrides from comment edit'); + getLogger().error('Build: missing for comment edit overrides'); return; } @@ -234,7 +237,7 @@ export default class ActivityStream extends BaseService { : deploys.find((d) => d.service.name === serviceName); if (!deploy) { - getLogger().warn(`No deploy found for service: ${serviceName}`); + getLogger().warn(`Deploy: not found service=${serviceName}`); return; } @@ -252,7 +255,7 @@ export default class ActivityStream extends BaseService { active, }) .catch((error) => { - getLogger().error({ error }, `Failed to patch deploy for service=${serviceName} with external URL`); + getLogger().error({ error }, `Deploy: patch failed service=${serviceName} field=externalUrl`); }); } else { getLogger().debug(`Setting branch override: ${branchOrExternalUrl} for deployable: ${deployable?.name}`); @@ -260,7 +263,7 @@ export default class ActivityStream extends BaseService { .$query() .patch({ commentBranchName: branchOrExternalUrl }) .catch((error) => { - getLogger().error({ error }, `Failed to patch deployable for service=${serviceName} with branch`); + getLogger().error({ error }, `Deployable: patch failed service=${serviceName} field=branch`); }); await deploy @@ -273,7 +276,7 @@ export default class ActivityStream extends BaseService { active, }) .catch((error) => { - getLogger().error({ error }, `Failed to patch deploy for service=${serviceName} with branch`); + getLogger().error({ error }, `Deploy: patch failed service=${serviceName} field=branch`); }); } @@ -309,7 +312,7 @@ export default class ActivityStream extends BaseService { }); if (hasGithubMissionControlComment && !pullRequest?.commentId) { - getLogger().error('Status comment already exists but no mission control comment ID found'); + getLogger().error('Comment: mission control id missing'); return; } @@ -329,7 +332,7 @@ export default class ActivityStream extends BaseService { const commentId = response?.data?.id; await pullRequest.$query().patch({ commentId, etag }); } catch (error) { - getLogger().error({ error }, `Failed to update Github mission control comment for ${fullName}/${branchName}`); + getLogger().error({ error }, `GitHub: mission control update failed repo=${fullName}/${branchName}`); } } @@ -347,7 +350,7 @@ export default class ActivityStream extends BaseService { }); if (hasStatusComment && !commentId) { - getLogger().warn('Status comment already exists but no status comment ID found'); + getLogger().warn('Comment: status id missing'); return; } const message = await this.generateStatusCommentForBuild(build, deploys, pullRequest); @@ -392,7 +395,7 @@ export default class ActivityStream extends BaseService { const isDefaultStatusEnabled = await isDefaultStatusCommentsEnabled(); const isShowingStatusComment = isStatic || hasStatusComment || isDefaultStatusEnabled; if (!buildId) { - getLogger().error(`No build ID found for ${fullName}/${branchName}`); + getLogger().error(`Build: id not found repo=${fullName}/${branchName}`); throw new Error('No build ID found for this build!'); } const resource = `build.${buildId}`; @@ -431,7 +434,7 @@ export default class ActivityStream extends BaseService { await this.updateMissionControlComment(build, deploys, pullRequest, repository).catch((error) => { getLogger().warn( { error }, - `Unable to update ${queued} mission control comment fullYaml=${isFullYaml} for ${fullName}/${branchName}` + `Comment: mission control update failed repo=${fullName}/${branchName} fullYaml=${isFullYaml} queued=${queued}` ); }); } else { @@ -443,12 +446,12 @@ export default class ActivityStream extends BaseService { await this.updateStatusComment(build, deploys, pullRequest, repository).catch((error) => { getLogger().warn( { error }, - `Unable to update ${queued} status comment fullYaml=${isFullYaml} for ${fullName}/${branchName}` + `Comment: status update failed repo=${fullName}/${branchName} fullYaml=${isFullYaml} queued=${queued}` ); }); } } catch (error) { - getLogger().error({ error }, `Failed to update the activity feed for ${fullName}/${branchName}`); + getLogger().error({ error }, `ActivityFeed: update failed repo=${fullName}/${branchName}`); } finally { if (lock) { try { @@ -464,7 +467,7 @@ export default class ActivityStream extends BaseService { try { await this.redis.del(resource); } catch (error) { - getLogger().error({ error }, `Failed to forcefully unlock ${resource} for ${fullName}/${branchName}`); + getLogger().error({ error }, `Lock: force unlock failed resource=${resource} repo=${fullName}/${branchName}`); } } @@ -696,7 +699,7 @@ export default class ActivityStream extends BaseService { } message += await this.editCommentForBuild(build, deploys).catch((error) => { - getLogger().error({ error }, `Unable to generate mission control fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: mission control generation failed fullYaml=${build.enableFullYaml}`); return ''; }); @@ -704,7 +707,7 @@ export default class ActivityStream extends BaseService { message += '\n---\n\n'; message += `## 📦 Deployments\n\n`; message += await this.environmentBlock(build).catch((error) => { - getLogger().error({ error }, `Unable to generate environment comment block fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`); return ''; }); } @@ -712,7 +715,7 @@ export default class ActivityStream extends BaseService { message += `\n\nmission control ${isStaging() ? 'stg ' : ''}comment: enabled \n`; return message; } catch (error) { - getLogger().error({ error }, `Failed to generate mission control comment for ${fullName}/${branchName}`); + getLogger().error({ error }, `Comment: mission control generation failed repo=${fullName}/${branchName}`); return message; } } @@ -804,13 +807,13 @@ export default class ActivityStream extends BaseService { message += 'We are busy building your code...\n'; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nHere's where you can find your services after they're deployed:\n`; message += await this.environmentBlock(build).catch((error) => { - getLogger().error({ error }, `Unable to generate environment comment block fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`); return ''; }); @@ -824,26 +827,26 @@ export default class ActivityStream extends BaseService { message += `We're deploying your code. Please stand by....\n\n`; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nHere's where you can find your services after they're deployed:\n`; message += await this.environmentBlock(build).catch((e) => { - getLogger().error( - { error: e }, - `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` - ); + getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); + getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`); return ''; }); } else if (isReadyToDeployBuild) { message += '## 🚀 Ready to deploy\n'; message += `Your code is built. We're ready to deploy whenever you are.\n`; message += await this.deployingBlock(build).catch((e) => { - getLogger().error({ error: e }, `Unable to generate deployment status fullYaml=${build.enableFullYaml}`); + getLogger().error( + { error: e }, + `Comment: deployment status generation failed fullYaml=${build.enableFullYaml}` + ); return ''; }); message += await createDeployMessage(); @@ -854,18 +857,15 @@ export default class ActivityStream extends BaseService { message += `There was a problem deploying your code. Some services may have not rolled out successfully. Here are the URLs for your services:\n\n`; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += await this.environmentBlock(build).catch((e) => { - getLogger().error( - { error: e }, - `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` - ); + getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); + getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`); return ''; }); } else if (build.status === BuildStatus.CONFIG_ERROR) { @@ -875,19 +875,16 @@ export default class ActivityStream extends BaseService { message += '## ✅ Deployed\n'; message += '## Build Status\n'; message += await this.buildStatusBlock(build, deploys, null).catch((error) => { - getLogger().error({ error }, `Unable to generate build status fullYaml=${build.enableFullYaml}`); + getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += `\nWe've deployed your code. Here's where you can find your services:\n`; message += await this.environmentBlock(build).catch((e) => { - getLogger().error( - { error: e }, - `Unable to generate environment comment block fullYaml=${build.enableFullYaml}` - ); + getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`); return ''; }); message += await this.dashboardBlock(build, deploys).catch((e) => { - getLogger().error({ error: e }, `Unable to generate dashboard fullYaml=${build.enableFullYaml}`); + getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`); return ''; }); } else { @@ -1168,7 +1165,7 @@ export default class ActivityStream extends BaseService { { deployId, action: 'create', ...extractContextForQueue() }, { delay: 10000, jobId: `deploy-${deployId}` } ) - .catch((error) => getLogger().warn({ error }, `manageDeployments error with deployId=${deployId}`)); + .catch((error) => getLogger().warn({ error }, `Deploy: management failed deployId=${deployId}`)); } ); }) @@ -1199,7 +1196,7 @@ export default class ActivityStream extends BaseService { } getLogger().info(`Fastly: purged serviceId=${fastlyServiceId}`); } catch (error) { - getLogger().error({ error }, 'Fastly purgeFastlyServiceCache error'); + getLogger().error({ error }, 'Fastly: cache purge failed'); } } } diff --git a/src/server/services/ai/conversation/manager.ts b/src/server/services/ai/conversation/manager.ts index 29dc5f0..0b5ee14 100644 --- a/src/server/services/ai/conversation/manager.ts +++ b/src/server/services/ai/conversation/manager.ts @@ -15,7 +15,7 @@ */ import { LLMProvider, Message, StreamChunk } from '../types/provider'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; export interface ConversationState { summary: string; @@ -41,10 +41,7 @@ export class ConversationManager { } async compress(messages: Message[], llmProvider: LLMProvider, buildUuid?: string): Promise { - const logger = buildUuid - ? rootLogger.child({ component: 'AIAgentConversationManager', buildUuid }) - : rootLogger.child({ component: 'AIAgentConversationManager' }); - logger.info(`Starting conversation compression for ${messages.length} messages`); + getLogger().info(`AI: compression starting messageCount=${messages.length} buildUuid=${buildUuid || 'none'}`); const compressionPrompt = ` Analyze this debugging conversation and create a structured summary. @@ -79,8 +76,8 @@ ${this.formatMessages(messages)} state.messageCount = messages.length; state.compressionLevel = 1; - logger.info( - `Compression complete: ${messages.length} messages -> ${state.tokenCount} tokens, identified ${state.identifiedIssues.length} issues, investigated ${state.investigatedServices.length} services` + getLogger().info( + `AIAgentConversationManager: compression complete messageCount=${messages.length} tokenCount=${state.tokenCount} issueCount=${state.identifiedIssues.length} serviceCount=${state.investigatedServices.length}` ); return state; diff --git a/src/server/services/ai/orchestration/orchestrator.ts b/src/server/services/ai/orchestration/orchestrator.ts index 644b025..53f62a2 100644 --- a/src/server/services/ai/orchestration/orchestrator.ts +++ b/src/server/services/ai/orchestration/orchestrator.ts @@ -20,7 +20,7 @@ import { StreamCallbacks } from '../types/stream'; import { ToolRegistry } from '../tools/registry'; import { ToolSafetyManager } from './safety'; import { LoopDetector } from './loopProtection'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; export interface OrchestrationResult { success: boolean; @@ -55,9 +55,6 @@ export class ToolOrchestrator { let totalToolCalls = 0; let fullResponse = ''; const protection = this.loopDetector.getProtection(); - const logger = buildUuid - ? rootLogger.child({ component: 'AIAgentOrchestrator', buildUuid }) - : rootLogger.child({ component: 'AIAgentOrchestrator' }); this.loopDetector.reset(); @@ -85,7 +82,7 @@ export class ToolOrchestrator { } } } catch (error: any) { - logger.error(`Stream error: ${error.message}`, error); + getLogger().error(`AI: stream error message=${error.message} buildUuid=${buildUuid || 'none'}`); return { success: false, error: error.message || 'Provider error', @@ -106,7 +103,11 @@ export class ToolOrchestrator { totalToolCalls += toolCalls.length; if (totalToolCalls > protection.maxToolCalls) { - logger.warn(`Tool call limit exceeded: ${totalToolCalls} > ${protection.maxToolCalls}`); + getLogger().warn( + `AI: tool call limit exceeded totalToolCalls=${totalToolCalls} maxToolCalls=${ + protection.maxToolCalls + } buildUuid=${buildUuid || 'none'}` + ); return { success: false, error: @@ -187,8 +188,10 @@ export class ToolOrchestrator { }); } - logger.warn( - `Tool loop hit iteration limit: ${iteration}/${protection.maxIterations}, totalToolCalls=${totalToolCalls}` + getLogger().warn( + `AI: iteration limit reached iteration=${iteration} maxIterations=${ + protection.maxIterations + } totalToolCalls=${totalToolCalls} buildUuid=${buildUuid || 'none'}` ); return { success: false, diff --git a/src/server/services/ai/orchestration/safety.ts b/src/server/services/ai/orchestration/safety.ts index a70a3a1..07bc09d 100644 --- a/src/server/services/ai/orchestration/safety.ts +++ b/src/server/services/ai/orchestration/safety.ts @@ -17,7 +17,7 @@ import JsonSchema from 'jsonschema'; import { Tool, ToolResult, ToolSafetyLevel } from '../types/tool'; import { StreamCallbacks } from '../types/stream'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; export class ToolSafetyManager { private requireConfirmation: boolean; @@ -35,13 +35,13 @@ export class ToolSafetyManager { signal?: AbortSignal, buildUuid?: string ): Promise { - const logger = buildUuid - ? rootLogger.child({ component: 'AIAgentSafetyManager', buildUuid }) - : rootLogger.child({ component: 'AIAgentSafetyManager' }); - const validation = this.validateArgs(tool.parameters, args); if (!validation.valid) { - logger.warn(`Tool ${tool.name} failed validation:`, validation.errors); + getLogger().warn( + `AI: validation failed tool=${tool.name} errors=${validation.errors.join(', ')} buildUuid=${ + buildUuid || 'none' + }` + ); return { success: false, error: { @@ -57,7 +57,7 @@ export class ToolSafetyManager { if (confirmDetails) { if (!callbacks.onToolConfirmation) { - logger.error(`Tool ${tool.name} requires confirmation but no confirmation callback provided`); + getLogger().error(`AI: confirmation callback missing tool=${tool.name} buildUuid=${buildUuid || 'none'}`); return { success: false, error: { @@ -91,7 +91,7 @@ export class ToolSafetyManager { return result; } catch (error: any) { if (error.message === 'Tool execution timeout') { - logger.warn(`Tool ${tool.name} timed out after 30 seconds`); + getLogger().warn(`AI: tool timeout tool=${tool.name} timeout=30s buildUuid=${buildUuid || 'none'}`); return { success: false, error: { @@ -103,7 +103,9 @@ export class ToolSafetyManager { }; } - logger.error(`Tool ${tool.name} execution error:`, error); + getLogger().error( + `AI: tool execution failed tool=${tool.name} error=${error?.message} buildUuid=${buildUuid || 'none'}` + ); return { success: false, error: { @@ -150,13 +152,11 @@ export class ToolSafetyManager { private logToolExecution(name: string, args: Record, result: ToolResult, buildUuid?: string): void { if (!result.success && !result.error?.recoverable) { - const logger = buildUuid - ? rootLogger.child({ component: 'AIAgentSafetyManager', buildUuid }) - : rootLogger.child({ component: 'AIAgentSafetyManager' }); - logger.error(`Tool ${name} failed with non-recoverable error: ${result.error?.message}`, { - errorCode: result.error?.code, - recoverable: result.error?.recoverable, - }); + getLogger().error( + `AI: non-recoverable tool error tool=${name} error=${result.error?.message} errorCode=${ + result.error?.code + } buildUuid=${buildUuid || 'none'}` + ); } } } diff --git a/src/server/services/ai/providers/gemini.ts b/src/server/services/ai/providers/gemini.ts index 6ce3af5..ff19a73 100644 --- a/src/server/services/ai/providers/gemini.ts +++ b/src/server/services/ai/providers/gemini.ts @@ -18,9 +18,7 @@ import { GoogleGenerativeAI, SchemaType } from '@google/generative-ai'; import { BaseLLMProvider } from './base'; import { ModelInfo, CompletionOptions, StreamChunk, Message } from '../types/provider'; import { Tool, ToolCall } from '../types/tool'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ component: 'GeminiProvider' }); +import { getLogger } from 'server/lib/logger/index'; export class GeminiProvider extends BaseLLMProvider { name = 'gemini'; @@ -83,9 +81,12 @@ export class GeminiProvider extends BaseLLMProvider { } try { JSON.parse(responseContent); - } catch (e) { - logger.warn( - `Tool response is not valid JSON, sanitizing: ${responseContent.substring(0, 100)}...` + } catch (e: any) { + getLogger().warn( + `GeminiProvider: tool response not valid JSON, sanitizing preview=${responseContent.substring( + 0, + 100 + )}` ); responseContent = JSON.stringify({ content: responseContent }); } @@ -105,8 +106,8 @@ export class GeminiProvider extends BaseLLMProvider { } return messages; } - } catch (e) { - logger.warn(`Failed to parse tool results, treating as text: ${e.message}`); + } catch (e: any) { + getLogger().warn(`GeminiProvider: failed to parse tool results error=${e.message}`); } } return [ @@ -140,10 +141,8 @@ export class GeminiProvider extends BaseLLMProvider { lastCandidate = candidate; if (candidate.finishReason === 'STOP' && (!candidate.content?.parts || candidate.content.parts.length === 0)) { - logger.error( - `Gemini returned STOP with no content. Safety ratings: ${JSON.stringify( - candidate.safetyRatings - )}, full candidate: ${JSON.stringify(candidate)}` + getLogger().error( + `GeminiProvider: returned STOP with no content safetyRatings=${JSON.stringify(candidate.safetyRatings)}` ); } @@ -167,22 +166,22 @@ export class GeminiProvider extends BaseLLMProvider { const response = await result.response; if (accumulatedText.length === 0 && functionCalls.length === 0) { - let responseText = 'N/A'; + let _responseText = 'N/A'; try { - responseText = (response as any).text(); - } catch (e) { - responseText = `Error getting text: ${e.message}`; + _responseText = (response as any).text(); + } catch (e: any) { + _responseText = `Error getting text: ${e.message}`; } - logger.error( - `Gemini returned empty response. Last candidate: ${JSON.stringify( - lastCandidate - )}, promptFeedback: ${JSON.stringify((response as any).promptFeedback)}, response.text: ${responseText}` - ); - logger.error( - `Full response object keys: ${Object.keys(response)}, candidates: ${JSON.stringify( - (response as any).candidates + getLogger().error( + `GeminiProvider: empty response finishReason=${lastCandidate?.finishReason} promptFeedback=${JSON.stringify( + (response as any).promptFeedback )}` ); + getLogger().error( + `GeminiProvider: debug info responseKeys=${Object.keys(response).join(',')} candidatesCount=${ + (response as any).candidates?.length || 0 + }` + ); throw new Error( `Gemini returned an empty response. This may be due to: ` + diff --git a/src/server/services/ai/service.ts b/src/server/services/ai/service.ts index d167957..6537820 100644 --- a/src/server/services/ai/service.ts +++ b/src/server/services/ai/service.ts @@ -39,7 +39,7 @@ import { GitHubClient, } from './tools'; import { DebugContext, DebugMessage } from '../types/aiAgent'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; export interface AIAgentConfig { provider: ProviderType; @@ -101,7 +101,6 @@ export class AIAgentCore { signal: AbortSignal ): Promise { const startTime = Date.now(); - const logger = rootLogger.child({ component: 'AIAgentCore', buildUuid: context.buildUuid }); try { if (context.lifecycleContext.pullRequest.branch) { @@ -121,14 +120,18 @@ export class AIAgentCore { })); if (await this.conversationManager.shouldCompress(messages)) { - logger.info(`Compressing conversation history from ${messages.length} messages`); + getLogger().info( + `AIAgentCore: compressing conversation fromMessageCount=${messages.length} buildUuid=${context.buildUuid}` + ); const state = await this.conversationManager.compress(messages, this.provider, context.buildUuid); messages.splice(0, messages.length - 1); messages.unshift({ role: 'user', content: this.conversationManager.buildPromptFromState(state), }); - logger.info(`Conversation compressed to ${messages.length} messages`); + getLogger().info( + `AIAgentCore: conversation compressed toMessageCount=${messages.length} buildUuid=${context.buildUuid}` + ); } const conversationHistoryForBuilder: DebugMessage[] = messages.map((m) => ({ @@ -172,10 +175,12 @@ export class AIAgentCore { const duration = Date.now() - startTime; - logger.info( - `Query processing ${result.success ? 'completed' : 'failed'}: iterations=${ + getLogger().info( + `AIAgentCore: query processing ${result.success ? 'completed' : 'failed'} iterations=${ result.metrics.iterations - } toolCalls=${result.metrics.toolCalls} duration=${duration}ms isJson=${finalResult.isJson}` + } toolCalls=${result.metrics.toolCalls} duration=${duration}ms isJson=${finalResult.isJson} buildUuid=${ + context.buildUuid + }` ); return { @@ -186,7 +191,9 @@ export class AIAgentCore { } catch (error: any) { const duration = Date.now() - startTime; - logger.error(`Query processing error after ${duration}ms:`, error); + getLogger().error( + `AIAgentCore: query processing error duration=${duration}ms error=${error?.message} buildUuid=${context.buildUuid}` + ); throw error; } diff --git a/src/server/services/ai/streaming/jsonBuffer.ts b/src/server/services/ai/streaming/jsonBuffer.ts index de1d2df..c6d67ea 100644 --- a/src/server/services/ai/streaming/jsonBuffer.ts +++ b/src/server/services/ai/streaming/jsonBuffer.ts @@ -14,9 +14,7 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ component: 'JSONBuffer' }); +import { getLogger } from 'server/lib/logger/index'; export class JSONBuffer { private buffer: string = ''; @@ -46,8 +44,8 @@ export class JSONBuffer { try { return JSON.parse(this.buffer); - } catch (error) { - logger.error({ error, bufferLength: this.buffer.length }, 'Failed to parse JSON buffer'); + } catch (error: any) { + getLogger().error(`JSONBuffer: parse failed bufferLength=${this.buffer.length} error=${error?.message}`); return null; } } diff --git a/src/server/services/ai/streaming/responseHandler.ts b/src/server/services/ai/streaming/responseHandler.ts index d172c32..9ba8d6f 100644 --- a/src/server/services/ai/streaming/responseHandler.ts +++ b/src/server/services/ai/streaming/responseHandler.ts @@ -16,27 +16,23 @@ import { StreamCallbacks } from '../types/stream'; import { JSONBuffer } from './jsonBuffer'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ component: 'AIAgentResponseHandler' }); +import { getLogger } from 'server/lib/logger/index'; export class ResponseHandler { private jsonBuffer: JSONBuffer; private isJsonResponse: boolean = false; private textBuffer: string = ''; - private logger: typeof logger; + private buildUuid?: string; constructor(private callbacks: StreamCallbacks, buildUuid?: string) { this.jsonBuffer = new JSONBuffer(); - this.logger = buildUuid - ? rootLogger.child({ component: 'AIAgentResponseHandler', buildUuid }) - : rootLogger.child({ component: 'AIAgentResponseHandler' }); + this.buildUuid = buildUuid; } handleChunk(text: string): void { if (!this.isJsonResponse && this.isJsonStart(text)) { this.isJsonResponse = true; - this.logger.info('Detected JSON response start, switching to JSON buffering mode'); + getLogger().info(`AI: JSON response detected buildUuid=${this.buildUuid || 'none'}`); this.callbacks.onThinking('Generating structured report...'); this.jsonBuffer.append(text); return; @@ -46,13 +42,13 @@ export class ResponseHandler { this.jsonBuffer.append(text); if (this.jsonBuffer.isComplete()) { - this.logger.info('JSON response complete, parsing structured output'); + getLogger().info(`AI: JSON response complete buildUuid=${this.buildUuid || 'none'}`); const parsed = this.jsonBuffer.parse(); if (parsed) { - this.logger.info(`Parsed structured output of type: ${parsed.type}`); + getLogger().info(`AI: structured output parsed type=${parsed.type} buildUuid=${this.buildUuid || 'none'}`); this.callbacks.onStructuredOutput(parsed); } else { - this.logger.warn('Failed to parse completed JSON buffer'); + getLogger().warn(`AI: JSON parse failed buildUuid=${this.buildUuid || 'none'}`); } } return; diff --git a/src/server/services/aiAgent.ts b/src/server/services/aiAgent.ts index 0cb0ac6..3860623 100644 --- a/src/server/services/aiAgent.ts +++ b/src/server/services/aiAgent.ts @@ -31,9 +31,7 @@ import { PatchK8sResourceTool, GetIssueCommentTool, } from './ai/tools'; -import rootLogger from 'server/lib/logger'; - -const logger = rootLogger.child({ component: 'AIAgentService' }); +import { getLogger } from 'server/lib/logger/index'; export default class AIAgentService extends BaseService { private service: AIAgentCore | null = null; @@ -239,10 +237,7 @@ Respond with ONLY the word INVESTIGATE or FIX, nothing else.`; return 'investigate'; } } catch (error: any) { - logger.error( - { error, errorMessage: error?.message, errorStack: error?.stack }, - 'Failed to classify user intent, defaulting to investigate' - ); + getLogger().error(`AI: classifyUserIntent failed error=${error?.message}`); return 'investigate'; } } diff --git a/src/server/services/build.ts b/src/server/services/build.ts index db16bac..fa5078f 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -81,14 +81,14 @@ export default class BuildService extends BaseService { // Enqueue a deletion job const buildId = build?.id; if (!buildId) { - getLogger().error('No build ID found for cleanup'); + getLogger().error('Build: id missing for=cleanup'); } getLogger().info('Build: queuing action=delete'); await this.db.services.BuildService.deleteQueue.add('delete', { buildId, ...extractContextForQueue() }); } } } catch (e) { - getLogger().error({ error: e }, 'Cleanup build failed'); + getLogger().error({ error: e }, 'Build: cleanup failed'); } } } @@ -381,7 +381,7 @@ export default class BuildService extends BaseService { const environments = await this.getEnvironmentsToBuild(environmentId, repositoryId); if (!environments.length) { - getLogger().debug('No matching environments'); + getLogger().debug('Build: no matching environments'); return; } @@ -400,7 +400,7 @@ export default class BuildService extends BaseService { }); await Promise.all(promises); } catch (err) { - getLogger().fatal({ error: err }, 'Failed to create and deploy build'); + getLogger().fatal({ error: err }, 'Build: create and deploy failed'); } } @@ -413,15 +413,15 @@ export default class BuildService extends BaseService { await this.db.services.Webhook.upsertWebhooksWithYaml(build, build.pullRequest); } catch (error) { if (error instanceof ParsingError) { - getLogger().error({ error }, 'Invalid Lifecycle Config File (parsing error)'); + getLogger().error({ error }, 'Config: parsing failed'); throw error; } else if (error instanceof ValidationError) { - getLogger().error({ error }, 'Invalid Lifecycle Config File (validation error)'); + getLogger().error({ error }, 'Config: validation failed'); throw error; } else { - getLogger().warn({ error }, 'Non-critical error during YAML config import'); + getLogger().warn({ error }, 'Config: import warning'); } } } @@ -490,7 +490,7 @@ export default class BuildService extends BaseService { throw new Error('Missing build or deployment options from environment.'); } } catch (error) { - getLogger().fatal({ error }, 'Failed to create build and deploys'); + getLogger().fatal({ error }, 'Build: create deploys failed'); } } @@ -539,7 +539,7 @@ export default class BuildService extends BaseService { dependencyGraph, }); } catch (error) { - getLogger().warn({ error }, 'Unable to generate dependency graph'); + getLogger().warn({ error }, 'Graph: generation failed'); } // Build Docker Images & Deploy CLI Based Infra At the Same Time @@ -569,12 +569,12 @@ export default class BuildService extends BaseService { } } else { getLogger().warn( - `Build in errored state, not commencing rollout: fullName=${fullName} branchName=${branchName} latestCommit=${latestCommit}` + `Build: errored skipping=rollout fullName=${fullName} branchName=${branchName} latestCommit=${latestCommit}` ); await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true); } } catch (error) { - getLogger().error({ error }, 'Failed to deploy build'); + getLogger().error({ error }, 'Build: deploy failed'); await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true, error); } @@ -660,11 +660,11 @@ export default class BuildService extends BaseService { ): Promise { const buildId = build?.id; if (!buildId) { - getLogger().error('No build ID found for createBuildServiceOverride'); + getLogger().error('Build: id missing for=createBuildServiceOverride'); } const serviceId = service?.id; if (!serviceId) { - getLogger().error('No service ID found for createBuildServiceOverride'); + getLogger().error('Service: id missing for=createBuildServiceOverride'); } const buildServiceOverride = (await this.db.models.BuildServiceOverride.findOne({ @@ -690,13 +690,13 @@ export default class BuildService extends BaseService { updateLogContext({ buildUuid: build.uuid }); } - getLogger().debug('Triggering cleanup'); + getLogger().debug('Build: triggering cleanup'); await this.updateStatusAndComment(build, BuildStatus.TEARING_DOWN, build.runUUID, true, true).catch((error) => { - getLogger().warn({ error }, `Failed to update status to ${BuildStatus.TEARING_DOWN}`); + getLogger().warn({ error }, `Build: status update failed status=${BuildStatus.TEARING_DOWN}`); }); await Promise.all([k8s.deleteBuild(build), cli.deleteBuild(build), uninstallHelmReleases(build)]).catch( - (error) => getLogger().error({ error }, 'Failed to cleanup build') + (error) => getLogger().error({ error }, 'Build: cleanup failed') ); await Promise.all( @@ -718,10 +718,10 @@ export default class BuildService extends BaseService { }); getLogger().info('Build: deleted'); await this.updateStatusAndComment(build, BuildStatus.TORN_DOWN, build.runUUID, true, true).catch((error) => { - getLogger().warn({ error }, `Failed to update status to ${BuildStatus.TORN_DOWN}`); + getLogger().warn({ error }, `Build: status update failed status=${BuildStatus.TORN_DOWN}`); }); } catch (e) { - getLogger().error({ error: e instanceof LifecycleError ? e.getMessage() : e }, 'Error deleting build'); + getLogger().error({ error: e instanceof LifecycleError ? e.getMessage() : e }, 'Build: delete failed'); } } } @@ -767,7 +767,7 @@ export default class BuildService extends BaseService { dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href); } } catch (err) { - getLogger().error({ error: err }, 'Unable to get Fastly dashboard URL'); + getLogger().error({ error: err }, 'Fastly: dashboard URL fetch failed'); } } await build.$query().patch({ dashboardLinks }); @@ -781,7 +781,7 @@ export default class BuildService extends BaseService { updateStatus, error ).catch((e) => { - getLogger().error({ error: e }, 'Unable to update pull request activity stream'); + getLogger().error({ error: e }, 'ActivityStream: update failed'); }); } } finally { @@ -814,7 +814,7 @@ export default class BuildService extends BaseService { const configUUIDs = configDeploys.map((deploy) => deploy?.uuid).join(','); getLogger().info(`Build: config deploys marked built uuids=${configUUIDs}`); } catch (error) { - getLogger().error({ error }, 'Failed to update configuration type deploy as built'); + getLogger().error({ error }, 'Config: deploy update failed'); } } @@ -827,7 +827,7 @@ export default class BuildService extends BaseService { }); const buildId = build?.id; if (!buildId) { - getLogger().error('No build ID found for deployCLIServices'); + getLogger().error('Build: id missing for=deployCLIServices'); } const deploys = await Deploy.query() .where({ buildId, ...(githubRepositoryId ? { githubRepositoryId } : {}) }) @@ -848,7 +848,7 @@ export default class BuildService extends BaseService { const result = await this.db.services.Deploy.deployCLI(deploy); return result; } catch (err) { - getLogger().error({ error: err }, `CLI deploy failed: deployUuid=${deploy?.uuid}`); + getLogger().error({ error: err }, `CLI: deploy failed uuid=${deploy?.uuid}`); return false; } }) @@ -864,18 +864,18 @@ export default class BuildService extends BaseService { getLogger().debug(`Deploy is undefined in deployCLIServices: deploysLength=${deploys.length}`); } const result = await this.db.services.Deploy.deployCLI(deploy).catch((error) => { - getLogger().error({ error }, 'CLI deploy failed'); + getLogger().error({ error }, 'CLI: deploy failed'); return false; }); - if (!result) getLogger().info(`CLI: deploy failed deployUuid=${deploy.uuid}`); + if (!result) getLogger().info(`CLI: deploy failed uuid=${deploy.uuid}`); return result; }) ) ); } } catch (error) { - getLogger().error({ error }, 'CLI build failed'); + getLogger().error({ error }, 'CLI: build failed'); return false; } } @@ -888,7 +888,7 @@ export default class BuildService extends BaseService { async buildImages(build: Build, githubRepositoryId = null): Promise { const buildId = build?.id; if (!buildId) { - getLogger().error('No build ID found for buildImages'); + getLogger().error('Build: id missing for=buildImages'); } const deploys = await Deploy.query() @@ -935,7 +935,7 @@ export default class BuildService extends BaseService { getLogger().debug(`Build results: results=${results.join(',')} final=${finalResult}`); return finalResult; } catch (error) { - getLogger().error({ error }, 'Uncaught Docker Build Error'); + getLogger().error({ error }, 'Docker: build error'); return false; } } else { @@ -960,7 +960,7 @@ export default class BuildService extends BaseService { ); return _.every(results); } catch (error) { - getLogger().error({ error }, 'Uncaught Docker Build Error'); + getLogger().error({ error }, 'Docker: build error'); return false; } } @@ -1068,14 +1068,14 @@ export default class BuildService extends BaseService { await this.updateDeploysImageDetails(build, githubRepositoryId); return true; } catch (e) { - getLogger().warn({ error: e }, 'Problem deploying services to Kubernetes cluster'); + getLogger().warn({ error: e }, 'K8s: deploy failed'); throw e; } } else { try { const buildId = build?.id; if (!buildId) { - getLogger().error('No build ID found for generateAndApplyManifests'); + getLogger().error('Build: id missing for=generateAndApplyManifests'); } const { serviceAccount } = await GlobalConfigService.getInstance().getAllConfigs(); @@ -1129,7 +1129,7 @@ export default class BuildService extends BaseService { return true; } catch (e) { - getLogger().warn({ error: e }, 'Problem deploying services to Kubernetes cluster'); + getLogger().warn({ error: e }, 'K8s: deploy failed'); return false; } } @@ -1161,7 +1161,7 @@ export default class BuildService extends BaseService { await Promise.all( deploys.map((deploy) => deploy.$query().patch({ isRunningLatest: true, runningImage: deploy?.dockerImage })) ); - getLogger().debug('Updated deploys with running image and latest status'); + getLogger().debug('Deploy: updated running image and status'); } /** @@ -1223,7 +1223,7 @@ export default class BuildService extends BaseService { } catch (error) { getLogger({ stage: LogStage.CLEANUP_FAILED }).error( { error }, - `Error processing delete queue for build ${buildId}` + `Queue: delete processing failed buildId=${buildId}` ); } }); @@ -1267,7 +1267,7 @@ export default class BuildService extends BaseService { if (error instanceof ParsingError || error instanceof ValidationError) { this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error); } else { - getLogger({ stage: LogStage.BUILD_FAILED }).fatal({ error }, `Uncaught exception`); + getLogger({ stage: LogStage.BUILD_FAILED }).fatal({ error }, 'Build: uncaught exception'); } } }); @@ -1315,8 +1315,7 @@ export default class BuildService extends BaseService { ...extractContextForQueue(), }); } catch (error) { - const text = `[BUILD ${buildId}][processResolveAndDeployBuildQueue] error processing buildId with the jobId, ${jobId}`; - getLogger().error({ error }, text); + getLogger().error({ error }, `Queue: processing failed buildId=${buildId} jobId=${jobId}`); } }); }; diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts index e51daee..afc84c0 100644 --- a/src/server/services/deploy.ts +++ b/src/server/services/deploy.ts @@ -77,7 +77,7 @@ export default class DeployService extends BaseService { const buildId = build?.id; if (!buildId) { - getLogger().error('findOrCreateDeploys: No build ID found for this build'); + getLogger().error('Deploy: build id missing for=findOrCreateDeploys'); return []; } @@ -96,11 +96,11 @@ export default class DeployService extends BaseService { deployableId: deployable.id, buildId, }).catch((error) => { - getLogger().warn({ error, serviceId: deployable.id }, 'Failed to find deploy'); + getLogger().warn({ error, serviceId: deployable.id }, 'Deploy: find failed'); return null; }); if (deploy) { - getLogger().warn(`Deploy not in batch result but found via fallback: deployableId=${deployable.id}`); + getLogger().warn(`Deploy: fallback find succeeded deployableId=${deployable.id}`); } } @@ -138,7 +138,7 @@ export default class DeployService extends BaseService { const sha = await getShaForDeploy(deploy); patchFields.sha = sha; } catch (error) { - getLogger().debug({ error }, 'Unable to get SHA, continuing'); + getLogger().debug({ error }, 'Deploy: SHA fetch failed continuing=true'); } } @@ -150,7 +150,7 @@ export default class DeployService extends BaseService { await deploy.$query().patch(patchFields); }) ).catch((error) => { - getLogger().error({ error }, 'Failed to create deploys from deployables'); + getLogger().error({ error }, 'Deploy: create from deployables failed'); }); getLogger().info('Deploy: initialized'); } else { @@ -189,12 +189,12 @@ export default class DeployService extends BaseService { environment.defaultServices.map((service) => serviceInitFunc(service, true)), environment.optionalServices.map((service) => serviceInitFunc(service, false)), ]).catch((error) => { - getLogger().error({ error }, 'Failed to create/update deploys'); + getLogger().error({ error }, 'Deploy: create/update failed'); }); } const buildId = build?.id; if (!buildId) { - getLogger().error('findOrCreateDeploy: No build ID found for this build'); + getLogger().error('Deploy: build id missing for=findOrCreateDeploys'); } await this.db.models.Deploy.query().where({ buildId }); @@ -222,18 +222,18 @@ export default class DeployService extends BaseService { const uuid = `${service.name}-${build?.uuid}`; const buildId = build?.id; if (!buildId) { - getLogger().error('findOrCreateDeploy: No build ID found for this build'); + getLogger().error('Deploy: build id missing for=findOrCreateDeploy'); } const serviceId = service?.id; if (!serviceId) { - getLogger().error('findOrCreateDeploy: No service ID found for this service'); + getLogger().error('Deploy: service id missing for=findOrCreateDeploy'); } // Deployable should be find at this point; otherwise, something is very wrong. const deployable: Deployable = await this.db.models.Deployable.query() .findOne({ buildId, serviceId }) .catch((error) => { - getLogger().error({ error, serviceId }, 'Failed to find deployable'); + getLogger().error({ error, serviceId }, 'Deployable: find failed'); return null; }); @@ -241,7 +241,7 @@ export default class DeployService extends BaseService { serviceId, buildId, }).catch((error) => { - getLogger().warn({ error, serviceId }, 'Failed to find deploy'); + getLogger().warn({ error, serviceId }, 'Deploy: find failed'); return null; }); if (deploy != null) { @@ -256,11 +256,11 @@ export default class DeployService extends BaseService { } else { const buildId = build?.id; if (!buildId) { - getLogger().error('findOrCreateDeploy: No build ID found for this build'); + getLogger().error('Deploy: build id missing for=findOrCreateDeploy'); } const serviceId = service?.id; if (!serviceId) { - getLogger().error('findOrCreateDeploy: No service ID found for this service'); + getLogger().error('Deploy: service id missing for=findOrCreateDeploy'); } // Create deploy object if this is new deployment deploy = await this.db.models.Deploy.create({ @@ -364,7 +364,7 @@ export default class DeployService extends BaseService { } return null; } catch (error) { - getLogger().debug({ error }, 'Error checking for existing Aurora database'); + getLogger().debug({ error }, 'Aurora: check failed'); return null; } } @@ -378,7 +378,7 @@ export default class DeployService extends BaseService { await deploy.$fetchGraph('[build, deployable]'); if (!deploy.deployable) { - getLogger().error('Missing deployable for Aurora restore'); + getLogger().error('Aurora: deployable missing for=restore'); return false; } @@ -422,7 +422,7 @@ export default class DeployService extends BaseService { getLogger().info('Aurora: restored'); return true; } catch (e) { - getLogger().error({ error: e }, 'Aurora cluster restore failed'); + getLogger().error({ error: e }, 'Aurora: cluster restore failed'); await deploy.$query().patch({ status: DeployStatus.ERROR, }); @@ -457,7 +457,7 @@ export default class DeployService extends BaseService { }); if (!fullSha) { - getLogger().warn({ owner, name, branch: deploy.branchName }, 'Commit SHA cannot be falsy'); + getLogger().warn({ owner, name, branch: deploy.branchName }, 'Git: SHA missing'); result = false; } else { @@ -468,7 +468,7 @@ export default class DeployService extends BaseService { if (deploy?.sha === buildSha) { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch( (error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); + getLogger().warn({ error }, 'ActivityFeed: update failed'); } ); getLogger().info('Codefresh: skipped reason=noChanges status=built'); @@ -486,7 +486,7 @@ export default class DeployService extends BaseService { }); codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => { - getLogger().error({ error }, 'Failed to receive codefresh build id'); + getLogger().error({ error }, 'Codefresh: build id missing'); return null; }); getLogger().info('Codefresh: triggered'); @@ -503,7 +503,7 @@ export default class DeployService extends BaseService { }, runUUID ).catch((error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); + getLogger().warn({ error }, 'ActivityFeed: update failed'); }); getLogger().info(`Codefresh: waiting url=${buildLogs}`); await cli.waitForCodefresh(codefreshBuildId); @@ -519,12 +519,12 @@ export default class DeployService extends BaseService { }, runUUID ).catch((error) => { - getLogger().warn({ error }, 'Failed to update activity feed'); + getLogger().warn({ error }, 'ActivityFeed: update failed'); }); result = true; } } catch (error) { - getLogger().error({ error, url: buildLogs }, 'Codefresh build failed'); + getLogger().error({ error, url: buildLogs }, 'Codefresh: build failed'); await this.patchAndUpdateActivityFeed( deploy, { @@ -669,10 +669,10 @@ export default class DeployService extends BaseService { (await codefresh.tagExists({ tag, ecrRepo, uuid })) && (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); - getLogger().debug({ tagsExist }, 'Tags exist check'); + getLogger().debug({ tagsExist }, 'Build: tags exist check'); const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; if (!ecrDomain || !registry) { - getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); + getLogger().error({ lifecycleDefaults }, 'ECR: config missing for build'); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); return false; } @@ -723,7 +723,7 @@ export default class DeployService extends BaseService { } } } else { - getLogger().debug({ type: service.type }, 'Build type not recognized'); + getLogger().debug({ type: service.type }, 'Build: type not recognized'); return false; } return true; @@ -775,17 +775,17 @@ export default class DeployService extends BaseService { ); return true; } catch (error) { - getLogger().warn({ error }, 'Error processing Helm deployment'); + getLogger().warn({ error }, 'Helm: deployment processing failed'); return false; } } default: - getLogger().debug({ type: deployable.type }, 'Build type not recognized'); + getLogger().debug({ type: deployable.type }, 'Build: type not recognized'); return false; } } } catch (e) { - getLogger().error({ error: e }, 'Uncaught error building docker image'); + getLogger().error({ error: e }, 'Docker: build error'); return false; } } @@ -823,7 +823,7 @@ export default class DeployService extends BaseService { targetGithubRepositoryId ); } catch (error) { - getLogger().warn({ error }, 'Failed to update the activity feeds'); + getLogger().warn({ error }, 'ActivityFeed: update failed'); } } @@ -846,7 +846,7 @@ export default class DeployService extends BaseService { initDockerImage, }) .catch((error) => { - getLogger().warn({ error }, 'patchDeployWithTag failed'); + getLogger().warn({ error }, 'Deploy: tag patch failed'); }); } @@ -912,7 +912,7 @@ export default class DeployService extends BaseService { // Verify we actually have a SHA from github before proceeding if (!fullSha) { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); - getLogger().error({ owner, name, branch: deploy.branchName }, 'Failed to retrieve SHA to build'); + getLogger().error({ owner, name, branch: deploy.branchName }, 'Git: SHA fetch failed'); return false; } @@ -936,7 +936,7 @@ export default class DeployService extends BaseService { const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG'; if (!ecrDomain || !registry) { - getLogger().error({ lifecycleDefaults }, 'Missing ECR config to build image'); + getLogger().error({ lifecycleDefaults }, 'ECR: config missing for build'); await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID); return false; } @@ -945,7 +945,7 @@ export default class DeployService extends BaseService { (await codefresh.tagExists({ tag, ecrRepo, uuid })) && (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid }))); - getLogger().debug({ tagsExist }, 'Tags exist check'); + getLogger().debug({ tagsExist }, 'Build: tags exist check'); // Check for and skip duplicates if (!tagsExist) { @@ -1039,7 +1039,7 @@ export default class DeployService extends BaseService { return true; } else { await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID); - getLogger().warn({ url: buildLogs }, 'Error building image'); + getLogger().warn({ url: buildLogs }, 'Build: image failed'); return false; } } else { @@ -1125,7 +1125,7 @@ export default class DeployService extends BaseService { } }); } catch (error) { - getLogger().error({ error, pipelineId, serviceName }, 'Error processing pipeline'); + getLogger().error({ error, pipelineId, serviceName }, 'Pipeline: processing failed'); throw error; } } diff --git a/src/server/services/deployable.ts b/src/server/services/deployable.ts index 8057970..b34482c 100644 --- a/src/server/services/deployable.ts +++ b/src/server/services/deployable.ts @@ -211,7 +211,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to generate deployable attributes from database configuration'); + }).error('Deployable: generate attributes from DB failed'); throw error; } @@ -383,7 +383,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to generate deployable attributes from yaml configuration'); + }).error('Deployable: generate attributes from YAML failed'); throw error; } @@ -419,7 +419,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to merge deployable attributes from database with yaml configuration'); + }).error('Deployable: merge attributes failed'); throw error; } @@ -475,7 +475,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to overwrite deployable configuration with yaml configuration'); + }).error('Deployable: overwrite config with YAML failed'); throw error; } } @@ -539,7 +539,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to create or update deployable attributes from database configuration'); + }).error('Deployable: upsert attributes from DB failed'); throw error; } } @@ -643,7 +643,7 @@ export default class DeployableService extends BaseService { buildUUID, service: service.name, error, - }).error('Failed to create or update deployable attributes from yaml configuration'); + }).error('Deployable: upsert attributes from YAML failed'); throw error; } } @@ -716,7 +716,7 @@ export default class DeployableService extends BaseService { buildUUID, service: dbEnvService.name, error, - }).error('Failed during attribution while using database configuration'); + }).error('Deployable: attribution failed source=db'); throw error; } }) @@ -736,7 +736,7 @@ export default class DeployableService extends BaseService { buildUUID, environment: environment.name, error, - }).error('Failed to create or update deployable from database configuration'); + }).error('Deployable: upsert from DB config failed'); throw error; } } @@ -775,7 +775,7 @@ export default class DeployableService extends BaseService { id: yamlEnvService.serviceId, }) .catch((error) => { - getLogger({ buildUUID, error }).warn('Query error'); + getLogger({ buildUUID, error }).warn('Query: failed'); return null; }); @@ -804,7 +804,7 @@ export default class DeployableService extends BaseService { buildUUID, service: yamlEnvService.name, error, - }).error('Failed to create or update deployable from yaml configuration when using service ID'); + }).error('Deployable: create/update from yaml failed source=serviceId'); throw error; } } else { @@ -895,7 +895,7 @@ export default class DeployableService extends BaseService { buildUUID, service: yamlEnvService.name, error, - }).error('Failed to create or update deployable from yaml configuration'); + }).error('Deployable: create/update from yaml failed'); throw error; } } @@ -904,7 +904,7 @@ export default class DeployableService extends BaseService { buildUUID, service: yamlEnvService.name, error, - }).error('Failed to create or update deployable from yaml configuration'); + }).error('Deployable: create/update from yaml failed'); throw error; } }) @@ -963,10 +963,10 @@ export default class DeployableService extends BaseService { } } } else { - getLogger({ buildUUID }).warn('Missing PR branch name'); + getLogger({ buildUUID }).warn('PR: branch name missing'); } } catch (error) { - getLogger({ buildUUID, error }).error('Failed to create or update deployable from yaml configuration'); + getLogger({ buildUUID, error }).error('Deployable: create/update from yaml failed'); throw error; } } @@ -1025,7 +1025,7 @@ export default class DeployableService extends BaseService { buildUUID, environment: environment.name, error, - }).error('Failed to upsert deployables'); + }).error('Deployable: upsert failed'); throw error; } getLogger({ buildUUID }).info(`Deployable: upserted count=${deployables.length}`); @@ -1059,7 +1059,7 @@ export default class DeployableService extends BaseService { buildUUID, service: deployableAttr.name, error, - }).error('Unable to search deployable'); + }).error('Deployable: search failed'); return undefined; }); @@ -1072,7 +1072,7 @@ export default class DeployableService extends BaseService { buildUUID, service: deployableAttr.name, error, - }).error('Unable to patch deployable'); + }).error('Deployable: patch failed'); }); } else { deployable = await this.db.models.Deployable.create(deployableAttr as object).catch((error) => { @@ -1080,7 +1080,7 @@ export default class DeployableService extends BaseService { buildUUID, service: deployableAttr.name, error, - }).error('Unable to create new deployable'); + }).error('Deployable: create failed'); return undefined; }); } diff --git a/src/server/services/github.ts b/src/server/services/github.ts index 6bf8572..6879db2 100644 --- a/src/server/services/github.ts +++ b/src/server/services/github.ts @@ -75,7 +75,7 @@ export default class GithubService extends Service { isJSON: true, })) as LifecycleYamlConfigOptions; } catch (error) { - getLogger({}).warn({ error }, `Unable to fetch lifecycle config for ${fullName}/${branch}`); + getLogger({}).warn({ error }, `Config: fetch failed repo=${fullName}/${branch}`); } } repository = await this.db.services.Repository.findRepository(ownerId, repositoryId, installationId); @@ -148,7 +148,7 @@ export default class GithubService extends Service { pullRequestId, }); if (!build) { - getLogger({}).warn(`No build found for closed pull request ${fullName}/${branch}, skipping deletion`); + getLogger({}).warn(`Build: not found for closed PR repo=${fullName}/${branch}`); return; } await this.db.services.BuildService.deleteBuild(build); @@ -162,7 +162,7 @@ export default class GithubService extends Service { }); } } catch (error) { - getLogger().fatal({ error }, `Unable to handle Github pull request event for ${fullName}/${branch}`); + getLogger().fatal({ error }, `Github: PR event handling failed repo=${fullName} branch=${branch}`); } } @@ -184,7 +184,7 @@ export default class GithubService extends Service { getLogger().info(`PR: edited by=${commentCreatorUsername}`); await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body); } catch (error) { - getLogger().error({ error }, `Unable to handle Github Issue Comment event`); + getLogger().error({ error }, `GitHub: issue comment handling failed`); } }; @@ -229,14 +229,14 @@ export default class GithubService extends Service { const buildId = build?.id; if (!buildId) { - getLogger().error(`No build ID found for this pull request in handleLabelWebhook`); + getLogger().error(`Build: id not found for=handleLabelWebhook`); } await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { buildId, ...extractContextForQueue(), }); } catch (error) { - getLogger().error({ error }, `Error processing label webhook`); + getLogger().error({ error }, `Label: webhook processing failed`); } }; @@ -292,7 +292,7 @@ export default class GithubService extends Service { for (const build of buildsToDeploy) { const buildId = build?.id; if (!buildId) { - getLogger().error(`No build ID found for this build in handlePushWebhook`); + getLogger().error(`Build: id not found for=handlePushWebhook`); } // Only check for failed deploys on PR environments, not static environments let hasFailedDeploys = false; @@ -322,7 +322,7 @@ export default class GithubService extends Service { }); } } catch (error) { - getLogger({}).error({ error }, `Error processing push webhook`); + getLogger({}).error({ error }, `Push: webhook processing failed`); } }; @@ -367,7 +367,7 @@ export default class GithubService extends Service { } catch (error) { getLogger({}).error( { error }, - `Error processing push webhook for static env for branch=${branchName} repositoryId=${githubRepositoryId}` + `Push: static env webhook failed branch=${branchName} repositoryId=${githubRepositoryId}` ); } }; @@ -395,21 +395,21 @@ export default class GithubService extends Service { if (hasLabelChange) return await this.handleLabelWebhook(body); else return await this.handlePullRequestHook(body); } catch (e) { - getLogger({}).error({ error: e }, `Error handling PULL_REQUEST event`); + getLogger({}).error({ error: e }, `GitHub: PULL_REQUEST event handling failed`); throw e; } case GithubWebhookTypes.PUSH: try { return await this.handlePushWebhook(body); } catch (e) { - getLogger({}).error({ error: e }, `Error handling PUSH event`); + getLogger({}).error({ error: e }, `GitHub: PUSH event handling failed`); throw e; } case GithubWebhookTypes.ISSUE_COMMENT: try { return await this.handleIssueCommentWebhook(body); } catch (e) { - getLogger({}).error({ error: e }, `Error handling ISSUE_COMMENT event`); + getLogger({}).error({ error: e }, `GitHub: ISSUE_COMMENT event handling failed`); throw e; } default: @@ -500,7 +500,7 @@ export default class GithubService extends Service { labels: JSON.stringify(labelNames), }); } catch (error) { - getLogger().error({ error }, `Error patching pull request for ${pullRequest?.fullName}/${branch}`); + getLogger().error({ error }, `PR: patch failed repo=${pullRequest?.fullName}/${branch}`); } }; diff --git a/src/server/services/globalConfig.ts b/src/server/services/globalConfig.ts index 78c71a2..5cb49dd 100644 --- a/src/server/services/globalConfig.ts +++ b/src/server/services/globalConfig.ts @@ -142,7 +142,7 @@ export default class GlobalConfigService extends BaseService { if (!labels) throw new Error('Labels configuration not found in global config'); return labels; } catch (error) { - getLogger().error({ error }, 'Error retrieving labels configuration, using fallback defaults'); + getLogger().error({ error }, 'Config: labels fetch failed using=defaults'); // Return fallback defaults on error return { deploy: ['lifecycle-deploy!'], @@ -161,7 +161,7 @@ export default class GlobalConfigService extends BaseService { try { deserializedConfigs[key as keyof GlobalConfig] = JSON.parse(value as string); } catch (e) { - getLogger().error({ error: e }, `Error deserializing config: key=${key}`); + getLogger().error({ error: e }, `Config: deserialize failed key=${key}`); } } return deserializedConfigs as GlobalConfig; @@ -194,7 +194,7 @@ export default class GlobalConfigService extends BaseService { try { await this.getGithubClientToken(true); } catch (error) { - getLogger().error({ error }, 'Error refreshing GlobalConfig cache during boot'); + getLogger().error({ error }, 'Config: cache refresh failed during=boot'); } } @@ -221,7 +221,7 @@ export default class GlobalConfigService extends BaseService { await this.getGithubClientToken(true); getLogger({ stage: LogStage.CONFIG_REFRESH }).debug('GlobalConfig and Github cache refreshed successfully'); } catch (error) { - getLogger({ stage: LogStage.CONFIG_FAILED }).error({ error }, 'Error refreshing GlobalConfig cache'); + getLogger({ stage: LogStage.CONFIG_FAILED }).error({ error }, 'Config: cache refresh failed'); } }); }; @@ -238,7 +238,7 @@ export default class GlobalConfigService extends BaseService { await this.db.knex('global_config').insert({ key, config: value }).onConflict('key').merge(); getLogger().info(`Config: set key=${key}`); } catch (err: any) { - getLogger().error({ error: err }, `Error setting global config value: key=${key}`); + getLogger().error({ error: err }, `Config: set failed key=${key}`); throw err; } } diff --git a/src/server/services/ingress.ts b/src/server/services/ingress.ts index 7a124f7..963d151 100644 --- a/src/server/services/ingress.ts +++ b/src/server/services/ingress.ts @@ -81,7 +81,7 @@ export default class IngressService extends BaseService { }); getLogger({ stage: LogStage.INGRESS_COMPLETE }).info('Ingress: cleaned up'); } catch (e) { - getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error: e }, 'Error cleaning up ingress'); + getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error: e }, 'Ingress: cleanup failed'); } }); }; @@ -200,7 +200,7 @@ export default class IngressService extends BaseService { await fs.promises.writeFile(localPath, manifest, 'utf8'); await shellPromise(`kubectl apply -f ${localPath} --namespace ${namespace}`); } catch (error) { - getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error }, 'Failed to apply ingress manifest'); + getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error }, 'Ingress: manifest apply failed'); } }; } diff --git a/src/server/services/override.ts b/src/server/services/override.ts index d493410..1ae6842 100644 --- a/src/server/services/override.ts +++ b/src/server/services/override.ts @@ -56,7 +56,7 @@ export default class OverrideService extends BaseService { return { valid: false, error: 'UUID is not available' }; } } catch (error) { - getLogger().error({ error }, 'Error checking UUID uniqueness'); + getLogger().error({ error }, 'UUID: uniqueness check failed'); return { valid: false, error: 'Unable to validate UUID' }; } @@ -108,7 +108,7 @@ export default class OverrideService extends BaseService { const updatedBuild = await this.db.models.Build.query(trx).findById(build.id); k8s.deleteNamespace(oldNamespace).catch((error) => { - getLogger().warn({ error }, `Failed to delete old namespace ${oldNamespace}`); + getLogger().warn({ error }, `Namespace: delete failed name=${oldNamespace}`); }); getLogger().info(`Override: updated oldUuid=${oldUuid} newUuid=${newUuid} deploysUpdated=${deploys.length}`); @@ -118,7 +118,7 @@ export default class OverrideService extends BaseService { }; }); } catch (error) { - getLogger().error({ error }, `Failed to update UUID to '${newUuid}'`); + getLogger().error({ error }, `UUID: update failed newUuid=${newUuid}`); throw error; } } diff --git a/src/server/services/pullRequest.ts b/src/server/services/pullRequest.ts index 4db7f7b..d6933a0 100644 --- a/src/server/services/pullRequest.ts +++ b/src/server/services/pullRequest.ts @@ -74,7 +74,7 @@ export default class PullRequestService extends BaseService { ); } } else { - getLogger({ fullName, pullRequestNumber }).error({ error }, 'Failed to create pull request'); + getLogger({ fullName, pullRequestNumber }).error({ error }, 'PR: create failed'); throw error; } } @@ -169,7 +169,7 @@ export default class PullRequestService extends BaseService { await this.db.services.BuildService.cleanupBuilds(); getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Cleanup: closed PRs completed'); } catch (error) { - getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error processing cleanup closed PRs'); + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Cleanup: closed PRs processing failed'); } }); }; diff --git a/src/server/services/repository.ts b/src/server/services/repository.ts index 0213b15..a7ba36e 100644 --- a/src/server/services/repository.ts +++ b/src/server/services/repository.ts @@ -55,7 +55,7 @@ export default class RepositoryService extends BaseService { defaultEnvId, })); } catch (error) { - getLogger({ githubRepositoryId, error }).error('Failed to find or create repository'); + getLogger({ githubRepositoryId, error }).error('Repository: find or create failed'); throw error; } @@ -82,7 +82,7 @@ export default class RepositoryService extends BaseService { ownerId, }); } catch (error) { - getLogger({ githubRepositoryId, error }).error('Failed to find repository'); + getLogger({ githubRepositoryId, error }).error('Repository: find failed'); throw error; } diff --git a/src/server/services/service.ts b/src/server/services/service.ts index 89ec13b..354a00f 100644 --- a/src/server/services/service.ts +++ b/src/server/services/service.ts @@ -78,7 +78,7 @@ export default class ServiceService extends BaseService { } } } catch (error) { - getLogger({ environment: environment.name, error }).error('Failed to find or create default service'); + getLogger({ environment: environment.name, error }).error('Service: find or create failed'); throw error; } diff --git a/src/server/services/ttlCleanup.ts b/src/server/services/ttlCleanup.ts index 2da9af8..96f0212 100644 --- a/src/server/services/ttlCleanup.ts +++ b/src/server/services/ttlCleanup.ts @@ -100,7 +100,7 @@ export default class TTLCleanupService extends Service { } } catch (error) { errorCount++; - getLogger().error({ error }, `Failed to cleanup environment: namespace=${env.namespace}`); + getLogger().error({ error }, `TTL: cleanup failed namespace=${env.namespace}`); } }); } @@ -109,7 +109,7 @@ export default class TTLCleanupService extends Service { `TTL: completed found=${staleEnvironments.length} success=${successCount} errors=${errorCount}` ); } catch (error) { - getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Error in TTL cleanup job'); + getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'TTL: cleanup job failed'); throw error; } }); @@ -189,7 +189,7 @@ export default class TTLCleanupService extends Service { const buildUUID = labels['lfc/uuid']; if (!buildUUID) { - getLogger().warn(`Namespace ${nsName} has no lfc/uuid label, skipping`); + getLogger().warn(`TTL: namespace missing uuid label namespace=${nsName}`); continue; } @@ -202,7 +202,7 @@ export default class TTLCleanupService extends Service { .withGraphFetched('[pullRequest.repository]'); if (!build) { - getLogger().warn(`No build found for namespace ${nsName}, skipping`); + getLogger().warn(`TTL: build not found namespace=${nsName}`); continue; } @@ -219,7 +219,7 @@ export default class TTLCleanupService extends Service { const pullRequest = build.pullRequest; if (!pullRequest) { - getLogger().warn(`No pull request found, skipping`); + getLogger().warn('TTL: pull request not found'); continue; } @@ -251,7 +251,7 @@ export default class TTLCleanupService extends Service { }); } } catch (error) { - getLogger().warn({ error }, `Failed to fetch labels from GitHub, falling back to DB`); + getLogger().warn({ error }, 'TTL: GitHub labels fetch failed, using DB'); currentLabels = this.parseLabels(pullRequest.labels); } diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index 42a0bce..38e5fce 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -276,7 +276,7 @@ export default class WebhookService extends BaseService { try { await this.db.services.Webhook.runWebhooksForBuild(build); } catch (e) { - getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error({ error: e }, 'Failed to invoke the webhook'); + getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error({ error: e }, 'Webhook: invocation failed'); } }); }; diff --git a/src/shared/utils.ts b/src/shared/utils.ts index 9decd06..7ed9e01 100644 --- a/src/shared/utils.ts +++ b/src/shared/utils.ts @@ -21,13 +21,9 @@ import { Deploy } from 'server/models'; import Fastly from 'server/lib/fastly'; import { Link, FeatureFlags } from 'shared/types'; import { DD_URL, DD_LOG_URL } from 'shared/constants'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger/index'; import Model from 'server/models/_Model'; -const logger = rootLogger.child({ - filename: 'src/shared/utils.ts', -}); - /** * determineIfFastlyIsUsed * @description determines if fastly is used in a given deploy @@ -152,7 +148,7 @@ export const constructFastlyBuildLink = async ( const { href: url = '' } = (await fastlyFn(fastlyBuildId, fastlyServiceType)) || {}; return url ? { name: 'Fastly Dashboard', url } : {}; } catch (err) { - logger.error(`constructFastlyBuildLink: there was an error constructing the fastly build link: ${err}`); + getLogger().error(`Fastly: error constructing build link error=${err}`); return {}; } }; From 8391e0df4ac240e33753656b38b8e3b37f459437 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Sun, 11 Jan 2026 23:19:51 -0800 Subject: [PATCH 14/23] add missing buildUuid to log context --- src/server/services/activityStream.ts | 124 +++++++++++++------------- src/server/services/build.ts | 78 ++++++++-------- src/server/services/github.ts | 8 +- 3 files changed, 110 insertions(+), 100 deletions(-) diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index d24f7c3..c7435f6 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -126,56 +126,58 @@ export default class ActivityStream extends BaseService { * @param body */ async updateBuildsAndDeploysFromCommentEdit(pullRequest: PullRequest, commentBody: string) { - let shouldUpdateStatus = true; - await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]'); const { build, repository } = pullRequest; const { deploys, id: buildId } = build; const buildUuid = build?.uuid; - const runUuid = nanoid(); - const REDEPLOY_FLAG = '#REDEPLOY'; - const REDEPLOY_CHECKBOX = '[x] Redeploy Environment'; - const PURGE_FASTLY_CHECKBOX = '[x] Purge Fastly Service Cache'; + return withLogContext({ buildUuid }, async () => { + let shouldUpdateStatus = true; + const runUuid = nanoid(); - const isRedeployRequested = [REDEPLOY_FLAG, REDEPLOY_CHECKBOX].some((flag) => commentBody.includes(flag)); - const isFastlyPurgeRequested = commentBody.includes(PURGE_FASTLY_CHECKBOX); + const REDEPLOY_FLAG = '#REDEPLOY'; + const REDEPLOY_CHECKBOX = '[x] Redeploy Environment'; + const PURGE_FASTLY_CHECKBOX = '[x] Purge Fastly Service Cache'; - try { - if (isRedeployRequested) { - getLogger().info('Deploy: redeploy reason=commentEdit'); - await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { - buildId, - runUUID: runUuid, - ...extractContextForQueue(), - }); - return; - } + const isRedeployRequested = [REDEPLOY_FLAG, REDEPLOY_CHECKBOX].some((flag) => commentBody.includes(flag)); + const isFastlyPurgeRequested = commentBody.includes(PURGE_FASTLY_CHECKBOX); - if (isFastlyPurgeRequested) { - // if fastly purge is requested from comment, we do not have to update the status - await this.purgeFastlyServiceCache(buildUuid); - shouldUpdateStatus = false; - return; - } + try { + if (isRedeployRequested) { + getLogger().info('Deploy: redeploy reason=commentEdit'); + await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', { + buildId, + runUUID: runUuid, + ...extractContextForQueue(), + }); + return; + } - // handle all environment/service overrides - await this.applyCommentOverrides({ build, deploys, pullRequest, commentBody, runUuid }); - } finally { - // after everything update the pr comment - await this.updatePullRequestActivityStream( - build, - deploys, - pullRequest, - repository, - true, - shouldUpdateStatus, - null, - true - ).catch((error) => { - getLogger().warn({ error }, 'ActivityFeed: comment edit update failed'); - }); - } + if (isFastlyPurgeRequested) { + // if fastly purge is requested from comment, we do not have to update the status + await this.purgeFastlyServiceCache(buildUuid); + shouldUpdateStatus = false; + return; + } + + // handle all environment/service overrides + await this.applyCommentOverrides({ build, deploys, pullRequest, commentBody, runUuid }); + } finally { + // after everything update the pr comment + await this.updatePullRequestActivityStream( + build, + deploys, + pullRequest, + repository, + true, + shouldUpdateStatus, + null, + true + ).catch((error) => { + getLogger().warn({ error }, 'ActivityFeed: comment edit update failed'); + }); + } + }); } private async applyCommentOverrides({ @@ -1176,27 +1178,29 @@ export default class ActivityStream extends BaseService { } private async purgeFastlyServiceCache(uuid: string) { - try { - const computeShieldServiceId = await this.fastly.getFastlyServiceId(uuid, 'compute-shield'); - getLogger().debug(`Fastly computeShieldServiceId=${computeShieldServiceId}`); - if (computeShieldServiceId) { - await this.fastly.purgeAllServiceCache(computeShieldServiceId, uuid, 'fastly'); - } + return withLogContext({ buildUuid: uuid }, async () => { + try { + const computeShieldServiceId = await this.fastly.getFastlyServiceId(uuid, 'compute-shield'); + getLogger().debug(`Fastly computeShieldServiceId=${computeShieldServiceId}`); + if (computeShieldServiceId) { + await this.fastly.purgeAllServiceCache(computeShieldServiceId, uuid, 'fastly'); + } - const optimizelyServiceId = await this.fastly.getFastlyServiceId(uuid, 'optimizely'); - getLogger().debug(`Fastly optimizelyServiceId=${optimizelyServiceId}`); - if (optimizelyServiceId) { - await this.fastly.purgeAllServiceCache(optimizelyServiceId, uuid, 'optimizely'); - } + const optimizelyServiceId = await this.fastly.getFastlyServiceId(uuid, 'optimizely'); + getLogger().debug(`Fastly optimizelyServiceId=${optimizelyServiceId}`); + if (optimizelyServiceId) { + await this.fastly.purgeAllServiceCache(optimizelyServiceId, uuid, 'optimizely'); + } - const fastlyServiceId = await this.fastly.getFastlyServiceId(uuid, 'fastly'); - getLogger().debug(`Fastly fastlyServiceId=${fastlyServiceId}`); - if (fastlyServiceId) { - await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly'); + const fastlyServiceId = await this.fastly.getFastlyServiceId(uuid, 'fastly'); + getLogger().debug(`Fastly fastlyServiceId=${fastlyServiceId}`); + if (fastlyServiceId) { + await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly'); + } + getLogger().info(`Fastly: purged serviceId=${fastlyServiceId}`); + } catch (error) { + getLogger().error({ error }, 'Fastly: cache purge failed'); } - getLogger().info(`Fastly: purged serviceId=${fastlyServiceId}`); - } catch (error) { - getLogger().error({ error }, 'Fastly: cache purge failed'); - } + }); } } diff --git a/src/server/services/build.ts b/src/server/services/build.ts index fa5078f..4faf2c9 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -743,52 +743,54 @@ export default class BuildService extends BaseService { updateStatus: boolean, error: Error = null ) { - try { - await build.reload(); - await build?.$fetchGraph('[deploys.[service, deployable], pullRequest.[repository]]'); + return withLogContext({ buildUuid: build.uuid }, async () => { + try { + await build.reload(); + await build?.$fetchGraph('[deploys.[service, deployable], pullRequest.[repository]]'); - const { deploys, pullRequest } = build; - const { repository } = pullRequest; + const { deploys, pullRequest } = build; + const { repository } = pullRequest; - if (build.runUUID !== runUUID) { - return; - } else { - await build.$query().patch({ - status, - }); + if (build.runUUID !== runUUID) { + return; + } else { + await build.$query().patch({ + status, + }); - // add dashboard links to build database - let dashboardLinks = constructBuildLinks(build.uuid); - const hasFastly = determineIfFastlyIsUsed(deploys); - if (hasFastly) { - try { - const fastlyDashboardUrl = await this.fastly.getServiceDashboardUrl(build.uuid, 'fastly'); - if (fastlyDashboardUrl) { - dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href); + // add dashboard links to build database + let dashboardLinks = constructBuildLinks(build.uuid); + const hasFastly = determineIfFastlyIsUsed(deploys); + if (hasFastly) { + try { + const fastlyDashboardUrl = await this.fastly.getServiceDashboardUrl(build.uuid, 'fastly'); + if (fastlyDashboardUrl) { + dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href); + } + } catch (err) { + getLogger().error({ error: err }, 'Fastly: dashboard URL fetch failed'); } - } catch (err) { - getLogger().error({ error: err }, 'Fastly: dashboard URL fetch failed'); } + await build.$query().patch({ dashboardLinks }); + + await this.db.services.ActivityStream.updatePullRequestActivityStream( + build, + deploys, + pullRequest, + repository, + updateMissionControl, + updateStatus, + error + ).catch((e) => { + getLogger().error({ error: e }, 'ActivityStream: update failed'); + }); } - await build.$query().patch({ dashboardLinks }); + } finally { + getLogger().debug(`Build status changed: status=${build.status}`); - await this.db.services.ActivityStream.updatePullRequestActivityStream( - build, - deploys, - pullRequest, - repository, - updateMissionControl, - updateStatus, - error - ).catch((e) => { - getLogger().error({ error: e }, 'ActivityStream: update failed'); - }); + await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id, ...extractContextForQueue() }); } - } finally { - getLogger().debug(`Build status changed: status=${build.status}`); - - await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id, ...extractContextForQueue() }); - } + }); } async markConfigurationsAsBuilt(build: Build) { diff --git a/src/server/services/github.ts b/src/server/services/github.ts index 6879db2..7735963 100644 --- a/src/server/services/github.ts +++ b/src/server/services/github.ts @@ -181,8 +181,12 @@ export default class GithubService extends Service { if (!pullRequest || isBot) return; await pullRequest.$fetchGraph('[build, repository]'); - getLogger().info(`PR: edited by=${commentCreatorUsername}`); - await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body); + const buildUuid = pullRequest.build?.uuid; + + return withLogContext({ buildUuid }, async () => { + getLogger().info(`PR: edited by=${commentCreatorUsername}`); + await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body); + }); } catch (error) { getLogger().error({ error }, `GitHub: issue comment handling failed`); } From dc01e9083742d4ef03c7f2fbf948da283a281335 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 00:19:19 -0800 Subject: [PATCH 15/23] clean up logs --- src/pages/api/v1/builds/[uuid]/deploy.ts | 6 +++--- src/server/services/build.ts | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index 7d5b08a..477205d 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -93,7 +93,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { } try { - getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: redeploy requested uuid=${uuid}`); + getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: redeploy requested'); const buildService = new BuildService(); const build: Build = await buildService.db.models.Build.query() @@ -113,14 +113,14 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { correlationId, }); - getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: redeploy queued uuid=${uuid}`); + getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: redeploy queued'); return res.status(200).json({ status: 'success', message: `Redeploy for build ${uuid} has been queued`, }); } catch (error) { - getLogger({ stage: LogStage.BUILD_FAILED, error }).error(`Build: redeploy failed uuid=${uuid}`); + getLogger({ stage: LogStage.BUILD_FAILED }).error({ error }, 'Build: redeploy failed'); return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` }); } }); diff --git a/src/server/services/build.ts b/src/server/services/build.ts index 4faf2c9..6ee6318 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -1304,7 +1304,7 @@ export default class BuildService extends BaseService { updateLogContext({ buildUuid: build.uuid }); } - getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: queued'); + getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: processing'); if (!build.pullRequest.deployOnUpdate) { getLogger().info('Deploy: skipping reason=deployOnUpdateDisabled'); From 2ec0868886f81eb1ed8ab71edb12468ad5790fec Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 11:24:32 -0800 Subject: [PATCH 16/23] fix oversized api response --- src/pages/api/v1/deploys.ts | 12 ------------ src/pages/api/v1/schema/validate.ts | 5 ++++- src/server/lib/github/cacheRequest.ts | 9 +++------ src/server/lib/github/index.ts | 6 +++--- 4 files changed, 10 insertions(+), 22 deletions(-) diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts index 4270cfd..a85d3e6 100644 --- a/src/pages/api/v1/deploys.ts +++ b/src/pages/api/v1/deploys.ts @@ -66,10 +66,6 @@ import BuildService from 'server/services/build'; * type: string * env: * type: object - * buildLogs: - * type: string - * containerLogs: - * type: string * serviceId: * type: integer * buildId: @@ -104,8 +100,6 @@ import BuildService from 'server/services/build'; * type: string * replicaCount: * type: integer - * yamlConfig: - * type: object * deployableId: * type: integer * isRunningLatest: @@ -114,8 +108,6 @@ import BuildService from 'server/services/build'; * type: string * deployPipelineId: * type: string - * buildOutput: - * type: string * buildJobName: * type: string * 400: @@ -194,8 +186,6 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { 'internalHostname', 'publicUrl', 'env', - 'buildLogs', - 'containerLogs', 'serviceId', 'buildId', 'createdAt', @@ -211,12 +201,10 @@ export default async (req: NextApiRequest, res: NextApiResponse) => { 'cname', 'runUUID', 'replicaCount', - 'yamlConfig', 'deployableId', 'isRunningLatest', 'runningImage', 'deployPipelineId', - 'buildOutput', 'buildJobName' ); diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts index 2237508..9e922f7 100644 --- a/src/pages/api/v1/schema/validate.ts +++ b/src/pages/api/v1/schema/validate.ts @@ -102,7 +102,7 @@ type ErrorResponse = { type Response = ValidationResponse | ErrorResponse; import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getYamlFileContentFromBranch } from 'server/lib/github'; +import { getYamlFileContentFromBranch, ConfigFileNotFound } from 'server/lib/github'; import { getLogger } from 'server/lib/logger/index'; import { YamlConfigParser, ParsingError } from 'server/lib/yamlConfigParser'; import { YamlConfigValidator, ValidationError } from 'server/lib/yamlConfigValidator'; @@ -130,6 +130,9 @@ const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse Date: Mon, 12 Jan 2026 13:16:56 -0800 Subject: [PATCH 17/23] fix test --- src/server/lib/github/__tests__/deployments.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/lib/github/__tests__/deployments.test.ts b/src/server/lib/github/__tests__/deployments.test.ts index 5db3a7d..4682ea5 100644 --- a/src/server/lib/github/__tests__/deployments.test.ts +++ b/src/server/lib/github/__tests__/deployments.test.ts @@ -139,7 +139,7 @@ describe('GitHub Deployment Functions', () => { const error = new Error('Network error'); mockOctokit.request.mockRejectedValue(error); - await expect(deleteGithubDeployment(mockDeploy)).rejects.toThrow('Network error'); + await expect(deleteGithubDeployment(mockDeploy)).rejects.toThrow('GitHub API request failed'); expect(mockOctokit.request).toHaveBeenCalledWith( `DELETE /repos/${mockDeploy.build.pullRequest.repository.fullName}/deployments/${mockDeploy.githubDeploymentId}` ); From d999537487e7c410d5695ff3916d617d0acac97d Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 15:50:02 -0800 Subject: [PATCH 18/23] cleanup logs --- .../services/[name]/build-jobs/[jobName]/route.ts | 10 +++------- .../builds/[uuid]/services/[name]/build-jobs/route.ts | 10 +++------- src/server/lib/kubernetes/getNativeBuildJobs.ts | 10 +++------- src/server/services/logStreaming.ts | 10 ++-------- 4 files changed, 11 insertions(+), 29 deletions(-) diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts index 4a1e888..7e57cbe 100644 --- a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts +++ b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts @@ -15,16 +15,12 @@ */ import { NextRequest } from 'next/server'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import { LogStreamingService } from 'server/services/logStreaming'; import { HttpError } from '@kubernetes/client-node'; import { createApiHandler } from 'server/lib/createApiHandler'; import { errorResponse, successResponse } from 'server/lib/response'; -const logger = rootLogger.child({ - filename: __filename, -}); - interface RouteParams { uuid: string; name: string; @@ -96,7 +92,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams }) const { uuid, name: serviceName, jobName } = params; if (!uuid || !jobName || !serviceName) { - logger.warn({ uuid, serviceName, jobName }, 'Missing or invalid path parameters'); + getLogger().warn(`API: invalid params uuid=${uuid} serviceName=${serviceName} jobName=${jobName}`); return errorResponse('Missing or invalid parameters', { status: 400 }, req); } @@ -107,7 +103,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams }) return successResponse(response, { status: 200 }, req); } catch (error: any) { - logger.error({ err: error, uuid, serviceName, jobName }, 'Error getting log streaming info'); + getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${serviceName}`); if (error.message === 'Build not found') { return errorResponse('Build not found', { status: 404 }, req); diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts index 12c6762..58d174b 100644 --- a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts +++ b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts @@ -15,16 +15,12 @@ */ import { NextRequest } from 'next/server'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import { HttpError } from '@kubernetes/client-node'; import { createApiHandler } from 'server/lib/createApiHandler'; import { errorResponse, successResponse } from 'server/lib/response'; import { getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs'; -const logger = rootLogger.child({ - filename: __filename, -}); - /** * @openapi * /api/v2/builds/{uuid}/services/{name}/builds: @@ -87,7 +83,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string const { uuid, name } = params; if (!uuid || !name) { - logger.warn({ uuid, name }, 'Missing or invalid path parameters'); + getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`); return errorResponse('Missing or invalid uuid or name parameters', { status: 400 }, req); } @@ -99,7 +95,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string return successResponse(response, { status: 200 }, req); } catch (error) { - logger.error({ err: error }, `Error getting build logs for service ${name} in environment ${uuid}.`); + getLogger().error({ error }, `API: build logs fetch failed service=${name}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { diff --git a/src/server/lib/kubernetes/getNativeBuildJobs.ts b/src/server/lib/kubernetes/getNativeBuildJobs.ts index de53b64..5897b5f 100644 --- a/src/server/lib/kubernetes/getNativeBuildJobs.ts +++ b/src/server/lib/kubernetes/getNativeBuildJobs.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; export interface BuildJobInfo { @@ -30,10 +30,6 @@ export interface BuildJobInfo { podName?: string; } -const logger = rootLogger.child({ - filename: __filename, -}); - export async function getNativeBuildJobs(serviceName: string, namespace: string): Promise { const kc = new k8s.KubeConfig(); kc.loadFromDefault(); @@ -115,7 +111,7 @@ export async function getNativeBuildJobs(serviceName: string, namespace: string) } } } catch (podError) { - logger.warn(`Failed to get pods for job ${jobName}:`, podError); + getLogger().warn({ error: podError }, `K8s: failed to get pods jobName=${jobName}`); } } @@ -141,7 +137,7 @@ export async function getNativeBuildJobs(serviceName: string, namespace: string) return buildJobs; } catch (error) { - logger.error(`Error listing native build jobs for service ${serviceName}:`, error); + getLogger().error({ error }, `K8s: failed to list build jobs service=${serviceName}`); throw error; } } diff --git a/src/server/services/logStreaming.ts b/src/server/services/logStreaming.ts index c22e753..a1b384d 100644 --- a/src/server/services/logStreaming.ts +++ b/src/server/services/logStreaming.ts @@ -14,15 +14,11 @@ * limitations under the License. */ -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import { getK8sJobStatusAndPod } from 'server/lib/logStreamingHelper'; import BuildService from 'server/services/build'; import { LogStreamResponse, LogType } from './types/logStreaming'; -const logger = rootLogger.child({ - filename: __filename, -}); - export class LogStreamingService { private buildService: BuildService; @@ -46,9 +42,7 @@ export class LogStreamingService { const namespace = `env-${uuid}`; const logType: LogType = (explicitType as LogType) || this.detectLogType(jobName); - logger.info( - `uuid=${uuid} name=${serviceName} jobName=${jobName} logType=${logType} message="Processing log request"` - ); + getLogger().info(`LogStreaming: processing log request name=${serviceName} jobName=${jobName} logType=${logType}`); // 3. Fetch K8s Data const podInfo = await getK8sJobStatusAndPod(jobName, namespace); From 9fb09a814f5ca3ddcbf90698fcff43134cf6d76d Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 22:00:50 -0800 Subject: [PATCH 19/23] logger migration --- .../services/[name]/deploy-jobs/[jobName]/route.ts | 10 +++------- .../builds/[uuid]/services/[name]/deploy-jobs/route.ts | 10 +++------- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts index 2630159..e6cb2ad 100644 --- a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts +++ b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts @@ -15,16 +15,12 @@ */ import { NextRequest } from 'next/server'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import { LogStreamingService } from 'server/services/logStreaming'; import { HttpError } from '@kubernetes/client-node'; import { createApiHandler } from 'server/lib/createApiHandler'; import { errorResponse, successResponse } from 'server/lib/response'; -const logger = rootLogger.child({ - filename: __filename, -}); - interface RouteParams { uuid: string; name: string; @@ -96,7 +92,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams }) const { uuid, name: serviceName, jobName } = params; if (!uuid || !jobName || !serviceName) { - logger.warn({ uuid, serviceName, jobName }, 'Missing or invalid path parameters'); + getLogger().warn(`API: invalid params uuid=${uuid} serviceName=${serviceName} jobName=${jobName}`); return errorResponse('Missing or invalid parameters', { status: 400 }, req); } @@ -107,7 +103,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams }) return successResponse(response, { status: 200 }, req); } catch (error: any) { - logger.error({ err: error, uuid, serviceName, jobName }, 'Error getting log streaming info'); + getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${serviceName}`); if (error.message === 'Deploy not found') { return errorResponse('Deploy not found', { status: 404 }, req); diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts index e5f58fb..cdc3c6e 100644 --- a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts +++ b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts @@ -15,16 +15,12 @@ */ import { NextRequest } from 'next/server'; -import rootLogger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; import { HttpError } from '@kubernetes/client-node'; import { createApiHandler } from 'server/lib/createApiHandler'; import { errorResponse, successResponse } from 'server/lib/response'; import { getDeploymentJobs } from 'server/lib/kubernetes/getDeploymentJobs'; -const logger = rootLogger.child({ - filename: __filename, -}); - /** * @openapi * /api/v2/builds/{uuid}/services/{name}/deploys: @@ -85,7 +81,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string const { uuid, name } = params; if (!uuid || !name) { - logger.warn({ uuid, name }, 'Missing or invalid path parameters'); + getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`); return errorResponse('Missing or invalid uuid or name parameters', { status: 400 }, req); } @@ -97,7 +93,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string return successResponse(response, { status: 200 }, req); } catch (error) { - logger.error({ err: error }, `Error getting deploy logs for service ${name} in environment ${uuid}.`); + getLogger().error({ error }, `API: deploy logs fetch failed service=${name}`); if (error instanceof HttpError) { if (error.response?.statusCode === 404) { From 155c9b6fe998f2e789ce8c7c2a240ee66a9a6e1b Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 22:36:38 -0800 Subject: [PATCH 20/23] rename logger.ts to rootLogger.ts to fix import ambiguity --- src/pages/api/health.ts | 2 +- src/pages/api/v1/admin/ttl/cleanup.ts | 2 +- src/pages/api/v1/ai/chat.ts | 2 +- src/pages/api/v1/ai/models.ts | 2 +- src/pages/api/v1/builds/[uuid]/deploy.ts | 2 +- src/pages/api/v1/builds/[uuid]/graph.ts | 2 +- src/pages/api/v1/builds/[uuid]/index.ts | 2 +- .../api/v1/builds/[uuid]/jobs/[jobName]/events.ts | 2 +- .../api/v1/builds/[uuid]/jobs/[jobName]/logs.ts | 2 +- .../api/v1/builds/[uuid]/services/[name]/build.ts | 2 +- .../v1/builds/[uuid]/services/[name]/buildLogs.ts | 2 +- .../[uuid]/services/[name]/buildLogs/[jobName].ts | 2 +- .../builds/[uuid]/services/[name]/deployLogs.ts | 2 +- .../services/[name]/deployLogs/[jobName].ts | 2 +- .../builds/[uuid]/services/[name]/deployment.ts | 2 +- .../[uuid]/services/[name]/logs/[jobName].ts | 2 +- src/pages/api/v1/builds/[uuid]/torndown.ts | 2 +- src/pages/api/v1/builds/[uuid]/webhooks.ts | 2 +- src/pages/api/v1/builds/index.ts | 2 +- src/pages/api/v1/config/cache.ts | 2 +- src/pages/api/v1/deploy-summary.ts | 2 +- src/pages/api/v1/deployables.ts | 2 +- src/pages/api/v1/deploys.ts | 2 +- src/pages/api/v1/pull-requests/[id]/builds.ts | 2 +- src/pages/api/v1/pull-requests/[id]/index.ts | 2 +- src/pages/api/v1/pull-requests/index.ts | 2 +- src/pages/api/v1/repos/index.ts | 2 +- src/pages/api/v1/schema/validate.ts | 2 +- src/pages/api/v1/setup/callback.ts | 2 +- src/pages/api/v1/setup/index.ts | 2 +- src/pages/api/v1/setup/installed.ts | 2 +- src/pages/api/v1/users/index.ts | 2 +- src/pages/api/webhooks/github.ts | 2 +- src/server/database.ts | 2 +- src/server/jobs/index.ts | 2 +- src/server/lib/__tests__/kubernetes.test.ts | 15 ++++++--------- src/server/lib/__tests__/utils.test.ts | 4 ++-- src/server/lib/buildEnvVariables.ts | 2 +- src/server/lib/cli.ts | 2 +- src/server/lib/codefresh/index.ts | 2 +- src/server/lib/codefresh/utils/index.ts | 2 +- src/server/lib/comment.ts | 2 +- src/server/lib/configFileWebhookEnvVariables.ts | 2 +- .../lib/deploymentManager/deploymentManager.ts | 2 +- src/server/lib/envVariables.ts | 2 +- src/server/lib/fastly.ts | 2 +- src/server/lib/github/__tests__/index.test.ts | 6 +++--- src/server/lib/github/__tests__/utils.test.ts | 4 ++-- src/server/lib/github/cacheRequest.ts | 2 +- src/server/lib/github/deployments.ts | 2 +- src/server/lib/github/index.ts | 2 +- src/server/lib/github/utils/index.ts | 2 +- src/server/lib/helm/helm.ts | 2 +- src/server/lib/k8sStreamer.ts | 2 +- src/server/lib/kubernetes.ts | 2 +- src/server/lib/kubernetes/JobMonitor.ts | 2 +- .../lib/kubernetes/common/serviceAccount.ts | 2 +- src/server/lib/kubernetes/rbac.ts | 2 +- src/server/lib/kubernetesApply/applyManifest.ts | 2 +- src/server/lib/kubernetesApply/logs.ts | 2 +- src/server/lib/logStreamingHelper.ts | 2 +- .../lib/logger/__tests__/contextLogger.test.ts | 2 +- src/server/lib/logger/contextLogger.ts | 2 +- src/server/lib/logger/index.ts | 2 +- .../lib/nativeBuild/__tests__/buildkit.test.ts | 2 -- src/server/lib/nativeBuild/engines.ts | 2 +- src/server/lib/nativeBuild/index.ts | 2 +- src/server/lib/nativeBuild/utils.ts | 2 +- src/server/lib/nativeHelm/helm.ts | 2 +- src/server/lib/nativeHelm/utils.ts | 2 +- src/server/lib/queueManager.ts | 2 +- src/server/lib/redisClient.ts | 2 +- src/server/lib/response.ts | 2 +- src/server/lib/{logger.ts => rootLogger.ts} | 0 src/server/lib/shell.ts | 2 +- src/server/lib/tracer/index.ts | 2 +- src/server/lib/utils.ts | 2 +- src/server/lib/webhook/index.ts | 2 +- src/server/lib/yamlConfigValidator.ts | 2 +- src/server/models/config/index.ts | 2 +- src/server/models/config/utils.ts | 2 +- src/server/models/yaml/Config.ts | 2 +- src/server/models/yaml/YamlService.ts | 2 +- src/server/services/__tests__/github.test.ts | 15 ++++++++++----- src/server/services/activityStream.ts | 2 +- src/server/services/ai/conversation/manager.ts | 2 +- .../services/ai/orchestration/orchestrator.ts | 2 +- src/server/services/ai/orchestration/safety.ts | 2 +- src/server/services/ai/providers/gemini.ts | 2 +- src/server/services/ai/service.ts | 2 +- src/server/services/ai/streaming/jsonBuffer.ts | 2 +- .../services/ai/streaming/responseHandler.ts | 2 +- src/server/services/aiAgent.ts | 2 +- src/server/services/build.ts | 2 +- src/server/services/codefresh.ts | 2 +- src/server/services/deploy.ts | 2 +- src/server/services/deployable.ts | 2 +- src/server/services/environment.ts | 2 +- src/server/services/github.ts | 2 +- src/server/services/globalConfig.ts | 2 +- src/server/services/ingress.ts | 2 +- src/server/services/label.ts | 2 +- src/server/services/override.ts | 2 +- src/server/services/pullRequest.ts | 2 +- src/server/services/repository.ts | 2 +- src/server/services/service.ts | 2 +- src/server/services/ttlCleanup.ts | 2 +- src/server/services/webhook.ts | 2 +- src/shared/utils.ts | 2 +- 109 files changed, 125 insertions(+), 125 deletions(-) rename src/server/lib/{logger.ts => rootLogger.ts} (100%) diff --git a/src/pages/api/health.ts b/src/pages/api/health.ts index d25aaaf..0bb1c09 100644 --- a/src/pages/api/health.ts +++ b/src/pages/api/health.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import { defaultDb } from 'server/lib/dependencies'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import RedisClient from 'server/lib/redisClient'; export default async function healthHandler(req: NextApiRequest, res: NextApiResponse) { diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts index b9eaad5..39452bb 100644 --- a/src/pages/api/v1/admin/ttl/cleanup.ts +++ b/src/pages/api/v1/admin/ttl/cleanup.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import { nanoid } from 'nanoid'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import TTLCleanupService from 'server/services/ttlCleanup'; diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts index 1bef50e..fa3f3ae 100644 --- a/src/pages/api/v1/ai/chat.ts +++ b/src/pages/api/v1/ai/chat.ts @@ -20,7 +20,7 @@ import AIAgentContextService from 'server/services/ai/context/gatherer'; import AIAgentConversationService from 'server/services/ai/conversation/storage'; import AIAgentService from 'server/services/aiAgent'; import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'POST') { diff --git a/src/pages/api/v1/ai/models.ts b/src/pages/api/v1/ai/models.ts index a1e9249..7d9ce30 100644 --- a/src/pages/api/v1/ai/models.ts +++ b/src/pages/api/v1/ai/models.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'GET') { diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts index 477205d..e09c7b8 100644 --- a/src/pages/api/v1/builds/[uuid]/deploy.ts +++ b/src/pages/api/v1/builds/[uuid]/deploy.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import { Build } from 'server/models'; import { nanoid } from 'nanoid'; import BuildService from 'server/services/build'; diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts index 4a1ae19..7493abc 100644 --- a/src/pages/api/v1/builds/[uuid]/graph.ts +++ b/src/pages/api/v1/builds/[uuid]/graph.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next/types'; import { generateGraph } from 'server/lib/dependencyGraph'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { Build } from 'server/models'; import BuildService from 'server/services/build'; diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts index c945250..29e2034 100644 --- a/src/pages/api/v1/builds/[uuid]/index.ts +++ b/src/pages/api/v1/builds/[uuid]/index.ts @@ -16,7 +16,7 @@ import { nanoid } from 'nanoid'; import { NextApiRequest, NextApiResponse } from 'next/types'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import { Build } from 'server/models'; import BuildService from 'server/services/build'; import OverrideService from 'server/services/override'; diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts index 2a06087..2a1e8cf 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts @@ -142,7 +142,7 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts index f6e9bd9..99d08ed 100644 --- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts +++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import unifiedLogStreamHandler from '../../services/[name]/logs/[jobName]'; /** diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts index 9cf6d46..5fa019e 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger'; import GithubService from 'server/services/github'; import { Build } from 'server/models'; import DeployService from 'server/services/deploy'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts index 236b67b..98cf834 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { HttpError } from '@kubernetes/client-node'; import { BuildJobInfo, getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts index 0fc84f4..c457701 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import unifiedLogStreamHandler from '../logs/[jobName]'; /** diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts index 483f67c..cc02f2c 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { HttpError } from '@kubernetes/client-node'; import { DeploymentJobInfo, getDeploymentJobs } from 'server/lib/kubernetes/getDeploymentJobs'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts index 04964df..02ef1bf 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts @@ -131,7 +131,7 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import unifiedLogStreamHandler from '../logs/[jobName]'; const deployLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => { diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts index 06a388f..1ab28a7 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts @@ -15,7 +15,7 @@ */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; import { Deploy } from 'server/models'; diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts index 6b07cad..6d3a6da 100644 --- a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts +++ b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts @@ -159,7 +159,7 @@ * example: Failed to communicate with Kubernetes. */ import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { LogStreamingService } from 'server/services/logStreaming'; import { HttpError } from '@kubernetes/client-node'; diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts index 07d93bf..4450dde 100644 --- a/src/pages/api/v1/builds/[uuid]/torndown.ts +++ b/src/pages/api/v1/builds/[uuid]/torndown.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { Build } from 'server/models'; import { BuildStatus, DeployStatus } from 'shared/constants'; diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts index d1d1c0f..ea068e1 100644 --- a/src/pages/api/v1/builds/[uuid]/webhooks.ts +++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts @@ -16,7 +16,7 @@ import { NextApiRequest, NextApiResponse } from 'next/types'; import { nanoid } from 'nanoid'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import GithubService from 'server/services/github'; import { Build } from 'server/models'; import WebhookService from 'server/services/webhook'; diff --git a/src/pages/api/v1/builds/index.ts b/src/pages/api/v1/builds/index.ts index 179a8e4..3405df3 100644 --- a/src/pages/api/v1/builds/index.ts +++ b/src/pages/api/v1/builds/index.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BuildService from 'server/services/build'; /** diff --git a/src/pages/api/v1/config/cache.ts b/src/pages/api/v1/config/cache.ts index f1ea090..7dc5f53 100644 --- a/src/pages/api/v1/config/cache.ts +++ b/src/pages/api/v1/config/cache.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; /** diff --git a/src/pages/api/v1/deploy-summary.ts b/src/pages/api/v1/deploy-summary.ts index 2b5891b..a0e47fc 100644 --- a/src/pages/api/v1/deploy-summary.ts +++ b/src/pages/api/v1/deploy-summary.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BuildService from 'server/services/build'; /** diff --git a/src/pages/api/v1/deployables.ts b/src/pages/api/v1/deployables.ts index f8e7d4a..afb0ce3 100644 --- a/src/pages/api/v1/deployables.ts +++ b/src/pages/api/v1/deployables.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BuildService from 'server/services/build'; /** diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts index a85d3e6..a3e6e34 100644 --- a/src/pages/api/v1/deploys.ts +++ b/src/pages/api/v1/deploys.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BuildService from 'server/services/build'; /** diff --git a/src/pages/api/v1/pull-requests/[id]/builds.ts b/src/pages/api/v1/pull-requests/[id]/builds.ts index 161050f..b1827bc 100644 --- a/src/pages/api/v1/pull-requests/[id]/builds.ts +++ b/src/pages/api/v1/pull-requests/[id]/builds.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BuildService from 'server/services/build'; import PullRequestService from 'server/services/pullRequest'; diff --git a/src/pages/api/v1/pull-requests/[id]/index.ts b/src/pages/api/v1/pull-requests/[id]/index.ts index f5d0ead..caaf7a1 100644 --- a/src/pages/api/v1/pull-requests/[id]/index.ts +++ b/src/pages/api/v1/pull-requests/[id]/index.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import PullRequestService from 'server/services/pullRequest'; /** diff --git a/src/pages/api/v1/pull-requests/index.ts b/src/pages/api/v1/pull-requests/index.ts index 0002410..afb6f77 100644 --- a/src/pages/api/v1/pull-requests/index.ts +++ b/src/pages/api/v1/pull-requests/index.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import PullRequestService from 'server/services/pullRequest'; /** diff --git a/src/pages/api/v1/repos/index.ts b/src/pages/api/v1/repos/index.ts index 6f45b27..79de907 100644 --- a/src/pages/api/v1/repos/index.ts +++ b/src/pages/api/v1/repos/index.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import PullRequestService from 'server/services/pullRequest'; /** diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts index 9e922f7..27b4ed3 100644 --- a/src/pages/api/v1/schema/validate.ts +++ b/src/pages/api/v1/schema/validate.ts @@ -103,7 +103,7 @@ type Response = ValidationResponse | ErrorResponse; import { NextApiRequest, NextApiResponse } from 'next/types'; import { getYamlFileContentFromBranch, ConfigFileNotFound } from 'server/lib/github'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { YamlConfigParser, ParsingError } from 'server/lib/yamlConfigParser'; import { YamlConfigValidator, ValidationError } from 'server/lib/yamlConfigValidator'; diff --git a/src/pages/api/v1/setup/callback.ts b/src/pages/api/v1/setup/callback.ts index 053e680..0be7168 100644 --- a/src/pages/api/v1/setup/callback.ts +++ b/src/pages/api/v1/setup/callback.ts @@ -16,7 +16,7 @@ import type { NextApiRequest, NextApiResponse } from 'next'; import { updateSecret, getCurrentNamespaceFromFile } from 'server/lib/kubernetes'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import { SECRET_BOOTSTRAP_NAME } from 'shared/config'; diff --git a/src/pages/api/v1/setup/index.ts b/src/pages/api/v1/setup/index.ts index 2bd8fc8..7420819 100644 --- a/src/pages/api/v1/setup/index.ts +++ b/src/pages/api/v1/setup/index.ts @@ -16,7 +16,7 @@ import { randomBytes } from 'crypto'; import type { NextApiRequest, NextApiResponse } from 'next'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import { APP_HOST } from 'shared/config'; diff --git a/src/pages/api/v1/setup/installed.ts b/src/pages/api/v1/setup/installed.ts index f21aa51..dccde88 100644 --- a/src/pages/api/v1/setup/installed.ts +++ b/src/pages/api/v1/setup/installed.ts @@ -16,7 +16,7 @@ import type { NextApiRequest, NextApiResponse } from 'next'; import { updateSecret, getCurrentNamespaceFromFile } from 'server/lib/kubernetes'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import { SECRET_BOOTSTRAP_NAME } from 'shared/config'; diff --git a/src/pages/api/v1/users/index.ts b/src/pages/api/v1/users/index.ts index 017b5dd..1a3f793 100644 --- a/src/pages/api/v1/users/index.ts +++ b/src/pages/api/v1/users/index.ts @@ -15,7 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import PullRequestService from 'server/services/pullRequest'; /** diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts index 9f3c699..2d43293 100644 --- a/src/pages/api/webhooks/github.ts +++ b/src/pages/api/webhooks/github.ts @@ -21,7 +21,7 @@ import { LIFECYCLE_MODE } from 'shared/index'; import { stringify } from 'flatted'; import BootstrapJobs from 'server/jobs/index'; import createAndBindServices from 'server/services'; -import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger'; const services = createAndBindServices(); diff --git a/src/server/database.ts b/src/server/database.ts index cdbd9fd..064ed50 100644 --- a/src/server/database.ts +++ b/src/server/database.ts @@ -21,7 +21,7 @@ import { IServices } from 'server/services/types'; import Model from 'server/models/_Model'; import knexfile from '../../knexfile'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export default class Database { models: models.IModels; diff --git a/src/server/jobs/index.ts b/src/server/jobs/index.ts index 025d926..a4c9dfa 100644 --- a/src/server/jobs/index.ts +++ b/src/server/jobs/index.ts @@ -15,7 +15,7 @@ */ import { IServices } from 'server/services/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { defaultDb, redisClient } from 'server/lib/dependencies'; import RedisClient from 'server/lib/redisClient'; import QueueManager from 'server/lib/queueManager'; diff --git a/src/server/lib/__tests__/kubernetes.test.ts b/src/server/lib/__tests__/kubernetes.test.ts index 47d42c4..e283f86 100644 --- a/src/server/lib/__tests__/kubernetes.test.ts +++ b/src/server/lib/__tests__/kubernetes.test.ts @@ -19,15 +19,12 @@ import * as k8s from '../kubernetes'; // Mock the logger to avoid console output during tests jest.mock('../logger', () => ({ - __esModule: true, - default: { - child: () => ({ - info: jest.fn(), - debug: jest.fn(), - warn: jest.fn(), - error: jest.fn(), - }), - }, + getLogger: jest.fn(() => ({ + info: jest.fn(), + debug: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + })), })); describe('Kubernetes Node Placement', () => { diff --git a/src/server/lib/__tests__/utils.test.ts b/src/server/lib/__tests__/utils.test.ts index bdefab0..d94a26c 100644 --- a/src/server/lib/__tests__/utils.test.ts +++ b/src/server/lib/__tests__/utils.test.ts @@ -54,7 +54,7 @@ jest.mock('server/services/globalConfig', () => { }; }); -jest.mock('server/lib/logger/index', () => ({ +jest.mock('server/lib/logger', () => ({ getLogger: jest.fn().mockReturnValue({ info: jest.fn(), warn: jest.fn(), @@ -62,7 +62,7 @@ jest.mock('server/lib/logger/index', () => ({ debug: jest.fn(), }), })); -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; describe('exec', () => { test('exec success', async () => { diff --git a/src/server/lib/buildEnvVariables.ts b/src/server/lib/buildEnvVariables.ts index e0bc7db..973a852 100644 --- a/src/server/lib/buildEnvVariables.ts +++ b/src/server/lib/buildEnvVariables.ts @@ -17,7 +17,7 @@ import { EnvironmentVariables } from 'server/lib/envVariables'; import { Build, Deploy } from 'server/models'; import { DeployTypes, FeatureFlags } from 'shared/constants'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { ValidationError } from './yamlConfigValidator'; import * as YamlService from 'server/models/yaml'; diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts index 1aa5218..6ea2e4e 100644 --- a/src/server/lib/cli.ts +++ b/src/server/lib/cli.ts @@ -18,7 +18,7 @@ import { merge } from 'lodash'; import { Build, Deploy, Service, Deployable } from 'server/models'; import { CLIDeployTypes, DeployTypes } from 'shared/constants'; import { shellPromise } from './shell'; -import { getLogger, withLogContext, updateLogContext } from './logger/index'; +import { getLogger, withLogContext, updateLogContext } from './logger'; import GlobalConfigService from 'server/services/globalConfig'; import { DatabaseSettings } from 'server/services/types/globalConfig'; diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts index ed0ce0d..64d4aca 100644 --- a/src/server/lib/codefresh/index.ts +++ b/src/server/lib/codefresh/index.ts @@ -15,7 +15,7 @@ */ import { shellPromise } from 'server/lib/shell'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { generateCodefreshCmd, constructEcrTag, getCodefreshPipelineIdFromOutput } from 'server/lib/codefresh/utils'; import { waitUntil } from 'server/lib/utils'; import { ContainerBuildOptions } from 'server/lib/codefresh/types'; diff --git a/src/server/lib/codefresh/utils/index.ts b/src/server/lib/codefresh/utils/index.ts index 71aa572..5d771b3 100644 --- a/src/server/lib/codefresh/utils/index.ts +++ b/src/server/lib/codefresh/utils/index.ts @@ -17,7 +17,7 @@ import { generateYaml } from 'server/lib/codefresh/utils/generateYaml'; import { generateCodefreshCmd } from 'server/lib/codefresh/utils/generateCodefreshCmd'; import { CF, CF_CHECKOUT_STEP, CF_BUILD_STEP, CF_AFTER_BUILD_STEP } from 'server/lib/codefresh/constants'; -import { updateLogContext } from 'server/lib/logger/index'; +import { updateLogContext } from 'server/lib/logger'; export const constructBuildArgs = (envVars = {}) => { const envVarsItems = Object.keys(envVars); diff --git a/src/server/lib/comment.ts b/src/server/lib/comment.ts index 9b3c868..24f15c1 100644 --- a/src/server/lib/comment.ts +++ b/src/server/lib/comment.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from './logger/index'; +import { getLogger } from './logger'; import { CommentParser } from 'shared/constants'; import { compact, flatten, set } from 'lodash'; diff --git a/src/server/lib/configFileWebhookEnvVariables.ts b/src/server/lib/configFileWebhookEnvVariables.ts index 6cabc08..2afea52 100644 --- a/src/server/lib/configFileWebhookEnvVariables.ts +++ b/src/server/lib/configFileWebhookEnvVariables.ts @@ -16,7 +16,7 @@ import { EnvironmentVariables } from 'server/lib/envVariables'; import { Build } from 'server/models'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { Webhook } from 'server/models/yaml'; import { FeatureFlags } from 'shared/constants'; diff --git a/src/server/lib/deploymentManager/deploymentManager.ts b/src/server/lib/deploymentManager/deploymentManager.ts index ee3b88b..0320930 100644 --- a/src/server/lib/deploymentManager/deploymentManager.ts +++ b/src/server/lib/deploymentManager/deploymentManager.ts @@ -20,7 +20,7 @@ import { DeployStatus, DeployTypes, CLIDeployTypes } from 'shared/constants'; import { createKubernetesApplyJob, monitorKubernetesJob } from '../kubernetesApply/applyManifest'; import { nanoid, customAlphabet } from 'nanoid'; import DeployService from 'server/services/deploy'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; import { waitForDeployPodReady } from '../kubernetes'; diff --git a/src/server/lib/envVariables.ts b/src/server/lib/envVariables.ts index 55cd144..a10008e 100644 --- a/src/server/lib/envVariables.ts +++ b/src/server/lib/envVariables.ts @@ -27,7 +27,7 @@ import { NO_DEFAULT_ENV_UUID, } from 'shared/constants'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { LifecycleError } from './errors'; import GlobalConfigService from 'server/services/globalConfig'; diff --git a/src/server/lib/fastly.ts b/src/server/lib/fastly.ts index fb39818..3ae61a8 100644 --- a/src/server/lib/fastly.ts +++ b/src/server/lib/fastly.ts @@ -15,7 +15,7 @@ */ import * as FastlyInstance from 'fastly/dist/index.js'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { Redis } from 'ioredis'; import { FASTLY_TOKEN } from 'shared/config'; import GlobalConfigService from 'server/services/globalConfig'; diff --git a/src/server/lib/github/__tests__/index.test.ts b/src/server/lib/github/__tests__/index.test.ts index a2ab110..f5a6ceb 100644 --- a/src/server/lib/github/__tests__/index.test.ts +++ b/src/server/lib/github/__tests__/index.test.ts @@ -45,7 +45,7 @@ jest.mock('server/services/globalConfig', () => { jest.mock('axios'); jest.mock('server/lib/github/client'); jest.mock('server/lib/github/utils'); -jest.mock('server/lib/logger/index', () => ({ +jest.mock('server/lib/logger', () => ({ getLogger: jest.fn().mockReturnValue({ info: jest.fn(), debug: jest.fn(), @@ -53,8 +53,8 @@ jest.mock('server/lib/logger/index', () => ({ warn: jest.fn(), }), })); -import { getLogger } from 'server/lib/logger/index'; -import logger from 'server/lib/logger'; +import { getLogger } from 'server/lib/logger'; +import logger from 'server/lib/rootLogger'; test('createOrUpdatePullRequestComment success', async () => { jest.spyOn(client, 'createOctokitClient').mockResolvedValue({ diff --git a/src/server/lib/github/__tests__/utils.test.ts b/src/server/lib/github/__tests__/utils.test.ts index 121aaf3..5a6e208 100644 --- a/src/server/lib/github/__tests__/utils.test.ts +++ b/src/server/lib/github/__tests__/utils.test.ts @@ -35,9 +35,9 @@ jest.mock('server/services/globalConfig', () => { jest.mock('server/lib/github/client'); -jest.mock('server/lib/logger'); +jest.mock('server/lib/rootLogger'); -import logger from 'server/lib/logger'; +import logger from 'server/lib/rootLogger'; test('getAppToken success', async () => { const app = jest.fn().mockResolvedValue({ token: '123' }); diff --git a/src/server/lib/github/cacheRequest.ts b/src/server/lib/github/cacheRequest.ts index 2e5b69f..04a511b 100644 --- a/src/server/lib/github/cacheRequest.ts +++ b/src/server/lib/github/cacheRequest.ts @@ -15,7 +15,7 @@ */ import { cloneDeep, merge } from 'lodash'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { GITHUB_API_CACHE_EXPIRATION_SECONDS } from 'shared/constants'; import { createOctokitClient } from 'server/lib/github/client'; import { CacheRequestData } from 'server/lib/github/types'; diff --git a/src/server/lib/github/deployments.ts b/src/server/lib/github/deployments.ts index 1951de1..b03efe0 100644 --- a/src/server/lib/github/deployments.ts +++ b/src/server/lib/github/deployments.ts @@ -18,7 +18,7 @@ import { Deploy } from 'server/models'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { getPullRequest } from 'server/lib/github/index'; import { DeployStatus } from 'shared/constants'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; const githubDeploymentStatuses = { deployed: 'success', diff --git a/src/server/lib/github/index.ts b/src/server/lib/github/index.ts index 3cda73b..8a19ee2 100644 --- a/src/server/lib/github/index.ts +++ b/src/server/lib/github/index.ts @@ -19,7 +19,7 @@ import crypto from 'crypto'; import { NextApiRequest } from 'next'; import { GITHUB_WEBHOOK_SECRET } from 'shared/config'; import { LifecycleError } from 'server/lib/errors'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { createOctokitClient } from 'server/lib/github/client'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { LIFECYCLE_FILE_NAME_REGEX } from 'server/lib/github/constants'; diff --git a/src/server/lib/github/utils/index.ts b/src/server/lib/github/utils/index.ts index 025f679..4fc6fe4 100644 --- a/src/server/lib/github/utils/index.ts +++ b/src/server/lib/github/utils/index.ts @@ -15,7 +15,7 @@ */ import { Octokit } from '@octokit/core'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { cacheRequest } from 'server/lib/github/cacheRequest'; import { ConstructOctokitClientOptions, GetAppTokenOptions } from 'server/lib/github/types'; diff --git a/src/server/lib/helm/helm.ts b/src/server/lib/helm/helm.ts index 3ae50d4..4fb02d0 100644 --- a/src/server/lib/helm/helm.ts +++ b/src/server/lib/helm/helm.ts @@ -20,7 +20,7 @@ import Deploy from 'server/models/Deploy'; import GlobalConfigService from 'server/services/globalConfig'; import { TMP_PATH } from 'shared/config'; import { DeployStatus } from 'shared/constants'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { shellPromise } from 'server/lib/shell'; import { kubeContextStep } from 'server/lib/codefresh'; import Build from 'server/models/Build'; diff --git a/src/server/lib/k8sStreamer.ts b/src/server/lib/k8sStreamer.ts index 4329ab4..2e75028 100644 --- a/src/server/lib/k8sStreamer.ts +++ b/src/server/lib/k8sStreamer.ts @@ -15,7 +15,7 @@ */ import { KubeConfig } from '@kubernetes/client-node'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; import { PassThrough, Writable } from 'stream'; diff --git a/src/server/lib/kubernetes.ts b/src/server/lib/kubernetes.ts index 730cc9a..c062c2b 100644 --- a/src/server/lib/kubernetes.ts +++ b/src/server/lib/kubernetes.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from './logger/index'; +import { getLogger } from './logger'; import yaml from 'js-yaml'; import _ from 'lodash'; import { Build, Deploy, Deployable, Service } from 'server/models'; diff --git a/src/server/lib/kubernetes/JobMonitor.ts b/src/server/lib/kubernetes/JobMonitor.ts index 420dbb8..1e841e2 100644 --- a/src/server/lib/kubernetes/JobMonitor.ts +++ b/src/server/lib/kubernetes/JobMonitor.ts @@ -15,7 +15,7 @@ */ import { shellPromise } from '../shell'; -import { getLogger } from '../logger/index'; +import { getLogger } from '../logger'; export interface JobStatus { logs: string; diff --git a/src/server/lib/kubernetes/common/serviceAccount.ts b/src/server/lib/kubernetes/common/serviceAccount.ts index 975d14a..032099f 100644 --- a/src/server/lib/kubernetes/common/serviceAccount.ts +++ b/src/server/lib/kubernetes/common/serviceAccount.ts @@ -15,7 +15,7 @@ */ import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { setupServiceAccountInNamespace } from '../../nativeHelm/utils'; export async function ensureServiceAccountForJob( diff --git a/src/server/lib/kubernetes/rbac.ts b/src/server/lib/kubernetes/rbac.ts index f9e86ec..f79d11c 100644 --- a/src/server/lib/kubernetes/rbac.ts +++ b/src/server/lib/kubernetes/rbac.ts @@ -16,7 +16,7 @@ import { V1ServiceAccount, V1Role, V1RoleBinding } from '@kubernetes/client-node'; import * as k8s from '@kubernetes/client-node'; -import { getLogger } from '../logger/index'; +import { getLogger } from '../logger'; export interface RBACConfig { namespace: string; diff --git a/src/server/lib/kubernetesApply/applyManifest.ts b/src/server/lib/kubernetesApply/applyManifest.ts index 940c5b9..6b49405 100644 --- a/src/server/lib/kubernetesApply/applyManifest.ts +++ b/src/server/lib/kubernetesApply/applyManifest.ts @@ -17,7 +17,7 @@ import * as k8s from '@kubernetes/client-node'; import { HttpError } from '@kubernetes/client-node'; import { Deploy } from 'server/models'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; export interface KubernetesApplyJobConfig { diff --git a/src/server/lib/kubernetesApply/logs.ts b/src/server/lib/kubernetesApply/logs.ts index 857fc6f..10b9001 100644 --- a/src/server/lib/kubernetesApply/logs.ts +++ b/src/server/lib/kubernetesApply/logs.ts @@ -16,7 +16,7 @@ import * as k8s from '@kubernetes/client-node'; import { Deploy } from 'server/models'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; /** * Fetches logs from a Kubernetes apply job for a deploy diff --git a/src/server/lib/logStreamingHelper.ts b/src/server/lib/logStreamingHelper.ts index 6ab6e1a..cfcd8b6 100644 --- a/src/server/lib/logStreamingHelper.ts +++ b/src/server/lib/logStreamingHelper.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; import { StreamingInfo, LogSourceStatus, K8sPodInfo, K8sContainerInfo } from 'shared/types'; import { HttpError, V1ContainerStatus } from '@kubernetes/client-node'; diff --git a/src/server/lib/logger/__tests__/contextLogger.test.ts b/src/server/lib/logger/__tests__/contextLogger.test.ts index a6cd452..9800706 100644 --- a/src/server/lib/logger/__tests__/contextLogger.test.ts +++ b/src/server/lib/logger/__tests__/contextLogger.test.ts @@ -23,7 +23,7 @@ const mockChild = jest.fn().mockReturnValue({ debug: jest.fn(), }); -jest.mock('../../logger', () => ({ +jest.mock('../../rootLogger', () => ({ __esModule: true, default: { child: (...args: unknown[]) => mockChild(...args), diff --git a/src/server/lib/logger/contextLogger.ts b/src/server/lib/logger/contextLogger.ts index 8ec1737..0ddee31 100644 --- a/src/server/lib/logger/contextLogger.ts +++ b/src/server/lib/logger/contextLogger.ts @@ -15,7 +15,7 @@ */ import tracer from 'dd-trace'; -import rootLogger from '../logger'; +import rootLogger from '../rootLogger'; import { getLogContext } from './context'; import type { LogContext } from './types'; diff --git a/src/server/lib/logger/index.ts b/src/server/lib/logger/index.ts index d4ea7be..a7111b2 100644 --- a/src/server/lib/logger/index.ts +++ b/src/server/lib/logger/index.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -export { default as rootLogger } from '../logger'; +export { default as rootLogger } from '../rootLogger'; export { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from './context'; export { getLogger } from './contextLogger'; export { withSpan } from './spans'; diff --git a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts index 9c5c745..84afd93 100644 --- a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts +++ b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts @@ -57,8 +57,6 @@ jest.mock('../../logger', () => { })), }; return { - __esModule: true, - default: mockLogger, getLogger: jest.fn(() => mockLogger), }; }); diff --git a/src/server/lib/nativeBuild/engines.ts b/src/server/lib/nativeBuild/engines.ts index 38e245f..5816776 100644 --- a/src/server/lib/nativeBuild/engines.ts +++ b/src/server/lib/nativeBuild/engines.ts @@ -16,7 +16,7 @@ import { Deploy } from '../../models'; import { shellPromise } from '../shell'; -import { getLogger } from '../logger/index'; +import { getLogger } from '../logger'; import GlobalConfigService from '../../services/globalConfig'; import { waitForJobAndGetLogs, diff --git a/src/server/lib/nativeBuild/index.ts b/src/server/lib/nativeBuild/index.ts index 0581861..2766431 100644 --- a/src/server/lib/nativeBuild/index.ts +++ b/src/server/lib/nativeBuild/index.ts @@ -15,7 +15,7 @@ */ import { Deploy } from '../../models'; -import { getLogger, withSpan, withLogContext } from '../logger/index'; +import { getLogger, withSpan, withLogContext } from '../logger'; import { ensureNamespaceExists } from './utils'; import { buildWithEngine, NativeBuildOptions } from './engines'; import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount'; diff --git a/src/server/lib/nativeBuild/utils.ts b/src/server/lib/nativeBuild/utils.ts index 7af4ff5..8b104a7 100644 --- a/src/server/lib/nativeBuild/utils.ts +++ b/src/server/lib/nativeBuild/utils.ts @@ -16,7 +16,7 @@ import { V1Job } from '@kubernetes/client-node'; import { shellPromise } from '../shell'; -import { getLogger } from '../logger/index'; +import { getLogger } from '../logger'; import * as k8s from '@kubernetes/client-node'; import GlobalConfigService from '../../services/globalConfig'; import { createBuildJob } from '../kubernetes/jobFactory'; diff --git a/src/server/lib/nativeHelm/helm.ts b/src/server/lib/nativeHelm/helm.ts index 1785e06..0aaddc2 100644 --- a/src/server/lib/nativeHelm/helm.ts +++ b/src/server/lib/nativeHelm/helm.ts @@ -18,7 +18,7 @@ import yaml from 'js-yaml'; import fs from 'fs'; import Deploy from 'server/models/Deploy'; import GlobalConfigService from 'server/services/globalConfig'; -import { getLogger, withSpan, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withSpan, withLogContext } from 'server/lib/logger'; import { shellPromise } from 'server/lib/shell'; import { randomAlphanumeric } from 'server/lib/random'; import { nanoid } from 'nanoid'; diff --git a/src/server/lib/nativeHelm/utils.ts b/src/server/lib/nativeHelm/utils.ts index 59ab824..7ef29f1 100644 --- a/src/server/lib/nativeHelm/utils.ts +++ b/src/server/lib/nativeHelm/utils.ts @@ -26,7 +26,7 @@ import { setupDeployServiceAccountInNamespace, } from 'server/lib/kubernetes/rbac'; import { HelmConfigBuilder } from 'server/lib/config/ConfigBuilder'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { shellPromise } from 'server/lib/shell'; import { normalizeKubernetesLabelValue } from 'server/lib/kubernetes/utils'; diff --git a/src/server/lib/queueManager.ts b/src/server/lib/queueManager.ts index ead747a..6be81f3 100644 --- a/src/server/lib/queueManager.ts +++ b/src/server/lib/queueManager.ts @@ -16,7 +16,7 @@ import { Queue, Worker, QueueOptions, WorkerOptions, Processor } from 'bullmq'; import { Redis } from 'ioredis'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; interface RegisteredQueue { queue: Queue; diff --git a/src/server/lib/redisClient.ts b/src/server/lib/redisClient.ts index 2b1520f..13448b8 100644 --- a/src/server/lib/redisClient.ts +++ b/src/server/lib/redisClient.ts @@ -17,7 +17,7 @@ import Redis from 'ioredis'; import Redlock from 'redlock'; import { REDIS_URL, APP_REDIS_HOST, APP_REDIS_PORT, APP_REDIS_PASSWORD, APP_REDIS_TLS } from 'shared/config'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class RedisClient { private static instance: RedisClient; diff --git a/src/server/lib/response.ts b/src/server/lib/response.ts index c4e6f64..30b3c6d 100644 --- a/src/server/lib/response.ts +++ b/src/server/lib/response.ts @@ -16,7 +16,7 @@ import { NextRequest, NextResponse } from 'next/server'; import { PaginationMetadata } from './paginate'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; interface Metadata { pagination?: PaginationMetadata; diff --git a/src/server/lib/logger.ts b/src/server/lib/rootLogger.ts similarity index 100% rename from src/server/lib/logger.ts rename to src/server/lib/rootLogger.ts diff --git a/src/server/lib/shell.ts b/src/server/lib/shell.ts index a7b61ef..ec2386f 100644 --- a/src/server/lib/shell.ts +++ b/src/server/lib/shell.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import shell, { ExecOptions } from 'shelljs'; interface Options extends ExecOptions { diff --git a/src/server/lib/tracer/index.ts b/src/server/lib/tracer/index.ts index 1f08bd3..6adc8ac 100644 --- a/src/server/lib/tracer/index.ts +++ b/src/server/lib/tracer/index.ts @@ -15,7 +15,7 @@ */ import { Span, tracer, TracerOptions } from 'dd-trace'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; // Refer to the readme for insights diff --git a/src/server/lib/utils.ts b/src/server/lib/utils.ts index 0b81f71..1cecde2 100644 --- a/src/server/lib/utils.ts +++ b/src/server/lib/utils.ts @@ -20,7 +20,7 @@ import { GithubPullRequestActions, PullRequestStatus, FallbackLabels } from 'sha import GlobalConfigService from 'server/services/globalConfig'; import { GenerateDeployTagOptions, WaitUntilOptions, EnableKillswitchOptions } from 'server/lib/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { ENVIRONMENT } from 'shared/config'; const execFilePromise = promisify(execFile); diff --git a/src/server/lib/webhook/index.ts b/src/server/lib/webhook/index.ts index 6b632bd..e84444a 100644 --- a/src/server/lib/webhook/index.ts +++ b/src/server/lib/webhook/index.ts @@ -22,7 +22,7 @@ import { createWebhookJob, WebhookJobConfig } from 'server/lib/kubernetes/webhoo import { shellPromise } from 'server/lib/shell'; import { waitForJobAndGetLogs } from 'server/lib/nativeBuild/utils'; import { ensureServiceAccountForJob } from 'server/lib/kubernetes/common/serviceAccount'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { nanoid } from 'nanoid'; const MANIFEST_PATH = process.env.MANIFEST_PATH || '/tmp/lifecycle/manifests'; diff --git a/src/server/lib/yamlConfigValidator.ts b/src/server/lib/yamlConfigValidator.ts index e196afb..46b2350 100644 --- a/src/server/lib/yamlConfigValidator.ts +++ b/src/server/lib/yamlConfigValidator.ts @@ -19,7 +19,7 @@ import { LifecycleError } from './errors'; import JsonSchema from 'jsonschema'; import { BuildStatus, CAPACITY_TYPE, DiskAccessMode } from 'shared/constants'; import { schema_1_0_0 } from './yamlSchemas'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class ValidationError extends LifecycleError { constructor(msg: string, uuid: string = null, service: string = null) { diff --git a/src/server/models/config/index.ts b/src/server/models/config/index.ts index 43947d2..c562e24 100644 --- a/src/server/models/config/index.ts +++ b/src/server/models/config/index.ts @@ -24,7 +24,7 @@ import { } from 'server/models/config/utils'; import { LifecycleConfig, Service } from 'server/models/config/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export const isGithubServiceDockerConfig = (obj) => isInObj(obj, 'dockerfilePath'); export const isDockerServiceConfig = (obj) => isInObj(obj, 'dockerImage'); diff --git a/src/server/models/config/utils.ts b/src/server/models/config/utils.ts index 769a026..bdebce9 100644 --- a/src/server/models/config/utils.ts +++ b/src/server/models/config/utils.ts @@ -21,7 +21,7 @@ import { YamlConfigParser } from 'server/lib/yamlConfigParser'; import Repository from 'server/models/Repository'; import { Service } from 'server/models/yaml/types'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export const isInObj = (obj, key) => (!obj ? false : key in obj); diff --git a/src/server/models/yaml/Config.ts b/src/server/models/yaml/Config.ts index f431d2f..bb9b6b3 100644 --- a/src/server/models/yaml/Config.ts +++ b/src/server/models/yaml/Config.ts @@ -21,7 +21,7 @@ import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValid import Repository from '../Repository'; import { Environment } from './YamlEnvironment'; import { Service, Service001 } from './YamlService'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export interface LifecycleConfig { readonly version: string; diff --git a/src/server/models/yaml/YamlService.ts b/src/server/models/yaml/YamlService.ts index c3b463a..80a1902 100644 --- a/src/server/models/yaml/YamlService.ts +++ b/src/server/models/yaml/YamlService.ts @@ -16,7 +16,7 @@ /* eslint-disable no-case-declarations */ import _ from 'lodash'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import { DeployTypes, FeatureFlags, NO_DEFAULT_ENV_UUID } from 'shared/constants'; import Build from '../Build'; diff --git a/src/server/services/__tests__/github.test.ts b/src/server/services/__tests__/github.test.ts index 56b0ce4..f73cc39 100644 --- a/src/server/services/__tests__/github.test.ts +++ b/src/server/services/__tests__/github.test.ts @@ -22,11 +22,16 @@ import { PushEvent } from '@octokit/webhooks-types'; mockRedisClient(); jest.mock('server/lib/logger', () => ({ - error: jest.fn(), - info: jest.fn(), - warn: jest.fn(), - debug: jest.fn(), - child: jest.fn().mockReturnThis(), + getLogger: jest.fn(() => ({ + error: jest.fn(), + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + })), + withLogContext: jest.fn((ctx, fn) => fn()), + extractContextForQueue: jest.fn(() => ({})), + LogStage: {}, })); describe('Github Service - handlePushWebhook', () => { diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts index c7435f6..5e4a1ab 100644 --- a/src/server/services/activityStream.ts +++ b/src/server/services/activityStream.ts @@ -15,7 +15,7 @@ */ import BaseService from './_service'; -import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger'; import { Build, PullRequest, Deploy, Repository } from 'server/models'; import * as github from 'server/lib/github'; import { APP_HOST, QUEUE_NAMES } from 'shared/config'; diff --git a/src/server/services/ai/conversation/manager.ts b/src/server/services/ai/conversation/manager.ts index 0b5ee14..00a6be8 100644 --- a/src/server/services/ai/conversation/manager.ts +++ b/src/server/services/ai/conversation/manager.ts @@ -15,7 +15,7 @@ */ import { LLMProvider, Message, StreamChunk } from '../types/provider'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export interface ConversationState { summary: string; diff --git a/src/server/services/ai/orchestration/orchestrator.ts b/src/server/services/ai/orchestration/orchestrator.ts index 53f62a2..97822d9 100644 --- a/src/server/services/ai/orchestration/orchestrator.ts +++ b/src/server/services/ai/orchestration/orchestrator.ts @@ -20,7 +20,7 @@ import { StreamCallbacks } from '../types/stream'; import { ToolRegistry } from '../tools/registry'; import { ToolSafetyManager } from './safety'; import { LoopDetector } from './loopProtection'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export interface OrchestrationResult { success: boolean; diff --git a/src/server/services/ai/orchestration/safety.ts b/src/server/services/ai/orchestration/safety.ts index 07bc09d..ab9efd0 100644 --- a/src/server/services/ai/orchestration/safety.ts +++ b/src/server/services/ai/orchestration/safety.ts @@ -17,7 +17,7 @@ import JsonSchema from 'jsonschema'; import { Tool, ToolResult, ToolSafetyLevel } from '../types/tool'; import { StreamCallbacks } from '../types/stream'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class ToolSafetyManager { private requireConfirmation: boolean; diff --git a/src/server/services/ai/providers/gemini.ts b/src/server/services/ai/providers/gemini.ts index ff19a73..ac86844 100644 --- a/src/server/services/ai/providers/gemini.ts +++ b/src/server/services/ai/providers/gemini.ts @@ -18,7 +18,7 @@ import { GoogleGenerativeAI, SchemaType } from '@google/generative-ai'; import { BaseLLMProvider } from './base'; import { ModelInfo, CompletionOptions, StreamChunk, Message } from '../types/provider'; import { Tool, ToolCall } from '../types/tool'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class GeminiProvider extends BaseLLMProvider { name = 'gemini'; diff --git a/src/server/services/ai/service.ts b/src/server/services/ai/service.ts index 6537820..afd6cbc 100644 --- a/src/server/services/ai/service.ts +++ b/src/server/services/ai/service.ts @@ -39,7 +39,7 @@ import { GitHubClient, } from './tools'; import { DebugContext, DebugMessage } from '../types/aiAgent'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export interface AIAgentConfig { provider: ProviderType; diff --git a/src/server/services/ai/streaming/jsonBuffer.ts b/src/server/services/ai/streaming/jsonBuffer.ts index c6d67ea..98e12f1 100644 --- a/src/server/services/ai/streaming/jsonBuffer.ts +++ b/src/server/services/ai/streaming/jsonBuffer.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class JSONBuffer { private buffer: string = ''; diff --git a/src/server/services/ai/streaming/responseHandler.ts b/src/server/services/ai/streaming/responseHandler.ts index 9ba8d6f..3499dbe 100644 --- a/src/server/services/ai/streaming/responseHandler.ts +++ b/src/server/services/ai/streaming/responseHandler.ts @@ -16,7 +16,7 @@ import { StreamCallbacks } from '../types/stream'; import { JSONBuffer } from './jsonBuffer'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export class ResponseHandler { private jsonBuffer: JSONBuffer; diff --git a/src/server/services/aiAgent.ts b/src/server/services/aiAgent.ts index 3860623..4cebf4b 100644 --- a/src/server/services/aiAgent.ts +++ b/src/server/services/aiAgent.ts @@ -31,7 +31,7 @@ import { PatchK8sResourceTool, GetIssueCommentTool, } from './ai/tools'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; export default class AIAgentService extends BaseService { private service: AIAgentCore | null = null; diff --git a/src/server/services/build.ts b/src/server/services/build.ts index 6ee6318..50c5d65 100644 --- a/src/server/services/build.ts +++ b/src/server/services/build.ts @@ -30,7 +30,7 @@ import BaseService from './_service'; import _ from 'lodash'; import { QUEUE_NAMES } from 'shared/config'; import { LifecycleError } from 'server/lib/errors'; -import { withLogContext, getLogger, extractContextForQueue, LogStage, updateLogContext } from 'server/lib/logger/index'; +import { withLogContext, getLogger, extractContextForQueue, LogStage, updateLogContext } from 'server/lib/logger'; import { ParsingError, YamlConfigParser } from 'server/lib/yamlConfigParser'; import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValidator'; diff --git a/src/server/services/codefresh.ts b/src/server/services/codefresh.ts index 1837874..52d5533 100644 --- a/src/server/services/codefresh.ts +++ b/src/server/services/codefresh.ts @@ -17,7 +17,7 @@ import BaseService from './_service'; import * as YamlService from 'server/models/yaml'; import { triggerPipeline } from 'server/lib/codefresh'; -import { getLogger, updateLogContext } from 'server/lib/logger/index'; +import { getLogger, updateLogContext } from 'server/lib/logger'; export default class CodefreshService extends BaseService { async triggerYamlConfigWebhookPipeline(webhook: YamlService.Webhook, data: Record): Promise { diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts index afc84c0..419b4ff 100644 --- a/src/server/services/deploy.ts +++ b/src/server/services/deploy.ts @@ -17,7 +17,7 @@ import BaseService from './_service'; import { Environment, Build, Service, Deploy, Deployable } from 'server/models'; import * as codefresh from 'server/lib/codefresh'; -import { getLogger, withLogContext } from 'server/lib/logger/index'; +import { getLogger, withLogContext } from 'server/lib/logger'; import hash from 'object-hash'; import { DeployStatus, DeployTypes } from 'shared/constants'; import * as cli from 'server/lib/cli'; diff --git a/src/server/services/deployable.ts b/src/server/services/deployable.ts index b34482c..8248e9e 100644 --- a/src/server/services/deployable.ts +++ b/src/server/services/deployable.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import BaseService from './_service'; import { Environment, Repository, Service, PullRequest, Build, Deploy } from 'server/models'; import Deployable from 'server/models/Deployable'; diff --git a/src/server/services/environment.ts b/src/server/services/environment.ts index 167d569..98c9b75 100644 --- a/src/server/services/environment.ts +++ b/src/server/services/environment.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import Environment from 'server/models/Environment'; import Service from './_service'; diff --git a/src/server/services/github.ts b/src/server/services/github.ts index 7735963..ae275fb 100644 --- a/src/server/services/github.ts +++ b/src/server/services/github.ts @@ -17,7 +17,7 @@ import { parse as fParse } from 'flatted'; import _ from 'lodash'; import Service from './_service'; -import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger'; import { IssueCommentEvent, PullRequestEvent, PushEvent } from '@octokit/webhooks-types'; import { GithubPullRequestActions, diff --git a/src/server/services/globalConfig.ts b/src/server/services/globalConfig.ts index 5cb49dd..18c780c 100644 --- a/src/server/services/globalConfig.ts +++ b/src/server/services/globalConfig.ts @@ -15,7 +15,7 @@ */ import { createAppAuth } from '@octokit/auth-app'; -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import BaseService from './_service'; import { GlobalConfig, LabelsConfig } from './types/globalConfig'; import { GITHUB_APP_INSTALLATION_ID, APP_AUTH, APP_ENV, QUEUE_NAMES } from 'shared/config'; diff --git a/src/server/services/ingress.ts b/src/server/services/ingress.ts index 963d151..de74e5a 100644 --- a/src/server/services/ingress.ts +++ b/src/server/services/ingress.ts @@ -15,7 +15,7 @@ */ /* eslint-disable no-unused-vars */ -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import BaseService from './_service'; import fs from 'fs'; import { TMP_PATH, QUEUE_NAMES } from 'shared/config'; diff --git a/src/server/services/label.ts b/src/server/services/label.ts index 39a6bf6..212b45c 100644 --- a/src/server/services/label.ts +++ b/src/server/services/label.ts @@ -18,7 +18,7 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger'; import { waitForColumnValue } from 'shared/utils'; import { updatePullRequestLabels } from 'server/lib/github'; import { getDeployLabel } from 'server/lib/utils'; diff --git a/src/server/services/override.ts b/src/server/services/override.ts index 1ae6842..96f5860 100644 --- a/src/server/services/override.ts +++ b/src/server/services/override.ts @@ -15,7 +15,7 @@ */ import BaseService from './_service'; -import { getLogger, updateLogContext } from 'server/lib/logger/index'; +import { getLogger, updateLogContext } from 'server/lib/logger'; import { Build } from 'server/models'; import * as k8s from 'server/lib/kubernetes'; import DeployService from './deploy'; diff --git a/src/server/services/pullRequest.ts b/src/server/services/pullRequest.ts index d6933a0..1037e87 100644 --- a/src/server/services/pullRequest.ts +++ b/src/server/services/pullRequest.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { withLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage } from 'server/lib/logger'; import { PullRequest, Repository } from 'server/models'; import BaseService from './_service'; import { UniqueViolationError } from 'objection'; diff --git a/src/server/services/repository.ts b/src/server/services/repository.ts index a7ba36e..cdd65c4 100644 --- a/src/server/services/repository.ts +++ b/src/server/services/repository.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { Repository } from 'server/models'; import BaseService from './_service'; diff --git a/src/server/services/service.ts b/src/server/services/service.ts index 354a00f..081d6dd 100644 --- a/src/server/services/service.ts +++ b/src/server/services/service.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import { Environment, Repository } from 'server/models'; import ServiceModel from 'server/models/Service'; import { CAPACITY_TYPE, DeployTypes } from 'shared/constants'; diff --git a/src/server/services/ttlCleanup.ts b/src/server/services/ttlCleanup.ts index 96f0212..3e0f0ae 100644 --- a/src/server/services/ttlCleanup.ts +++ b/src/server/services/ttlCleanup.ts @@ -18,7 +18,7 @@ import Service from './_service'; import { Queue, Job } from 'bullmq'; import { QUEUE_NAMES } from 'shared/config'; import { redisClient } from 'server/lib/dependencies'; -import { withLogContext, updateLogContext, getLogger, LogStage } from 'server/lib/logger/index'; +import { withLogContext, updateLogContext, getLogger, LogStage } from 'server/lib/logger'; import * as k8s from '@kubernetes/client-node'; import { updatePullRequestLabels, createOrUpdatePullRequestComment, getPullRequestLabels } from 'server/lib/github'; import { getKeepLabel, getDisabledLabel, getDeployLabel } from 'server/lib/utils'; diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts index 38e5fce..80ab2d6 100644 --- a/src/server/services/webhook.ts +++ b/src/server/services/webhook.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger/index'; +import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger'; import BaseService from './_service'; import { Build, PullRequest } from 'server/models'; import * as YamlService from 'server/models/yaml'; diff --git a/src/shared/utils.ts b/src/shared/utils.ts index 7ed9e01..58cbb62 100644 --- a/src/shared/utils.ts +++ b/src/shared/utils.ts @@ -21,7 +21,7 @@ import { Deploy } from 'server/models'; import Fastly from 'server/lib/fastly'; import { Link, FeatureFlags } from 'shared/types'; import { DD_URL, DD_LOG_URL } from 'shared/constants'; -import { getLogger } from 'server/lib/logger/index'; +import { getLogger } from 'server/lib/logger'; import Model from 'server/models/_Model'; /** From e79f60bf86176ddd12971e6db7ac198d68e3d3bc Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 22:52:18 -0800 Subject: [PATCH 21/23] fix build --- ws-server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ws-server.ts b/ws-server.ts index e269767..ff64f01 100644 --- a/ws-server.ts +++ b/ws-server.ts @@ -31,7 +31,7 @@ import { createServer, IncomingMessage, ServerResponse } from 'http'; import { parse } from 'url'; import next from 'next'; import { WebSocketServer, WebSocket } from 'ws'; -import rootLogger from './src/server/lib/logger'; +import { rootLogger } from './src/server/lib/logger'; import { streamK8sLogs, AbortHandle } from './src/server/lib/k8sStreamer'; const dev = process.env.NODE_ENV !== 'production'; From de193b260a657fad26540f68cc01d79100214b3b Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Mon, 12 Jan 2026 23:43:06 -0800 Subject: [PATCH 22/23] fix swallowed errors with proper logging --- src/pages/api/v1/ai/config.ts | 2 ++ src/pages/api/v1/setup/configure.ts | 2 ++ src/server/lib/__tests__/utils.test.ts | 2 +- src/server/lib/auth.ts | 4 ++-- src/server/lib/codefresh/index.ts | 4 +++- src/server/lib/logStreamingHelper.ts | 2 +- src/server/lib/nativeBuild/index.ts | 2 +- src/server/lib/tracer/index.ts | 4 ++-- src/server/lib/utils.ts | 2 +- src/server/services/ai/orchestration/orchestrator.ts | 2 +- src/server/services/ai/streaming/jsonBuffer.ts | 2 +- src/server/services/aiAgent.ts | 2 +- src/shared/utils.ts | 2 +- 13 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/pages/api/v1/ai/config.ts b/src/pages/api/v1/ai/config.ts index 84706bb..bba57b8 100644 --- a/src/pages/api/v1/ai/config.ts +++ b/src/pages/api/v1/ai/config.ts @@ -15,6 +15,7 @@ */ import { NextApiRequest, NextApiResponse } from 'next'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -39,6 +40,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) configured: apiKeySet, }); } catch (error) { + getLogger().error({ error }, 'AI: config fetch failed'); return res.status(500).json({ enabled: false }); } } diff --git a/src/pages/api/v1/setup/configure.ts b/src/pages/api/v1/setup/configure.ts index 9a19eec..d44d200 100644 --- a/src/pages/api/v1/setup/configure.ts +++ b/src/pages/api/v1/setup/configure.ts @@ -17,6 +17,7 @@ import { NextApiRequest, NextApiResponse } from 'next'; import { getCurrentNamespaceFromFile } from 'server/lib/kubernetes'; import { shellPromise } from 'server/lib/shell'; +import { getLogger } from 'server/lib/logger'; import GlobalConfigService from 'server/services/globalConfig'; import { APP_HOST } from 'shared/config'; @@ -63,6 +64,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) await globalConfigService.setConfig('app_setup', updated_app_setup); res.status(200).json({ message: 'Deployment restarted' }); } catch (error) { + getLogger().error({ error }, 'Setup: deployment restart failed'); return res.status(500).json({ error: 'Restarting deployment failed' }); } } diff --git a/src/server/lib/__tests__/utils.test.ts b/src/server/lib/__tests__/utils.test.ts index d94a26c..bcc1329 100644 --- a/src/server/lib/__tests__/utils.test.ts +++ b/src/server/lib/__tests__/utils.test.ts @@ -76,7 +76,7 @@ describe('exec', () => { const execCmd = jest.fn().mockRejectedValue(new Error('error')); await exec('cmd', ['arg1', 'arg2'], { execCmd }); - expect(getLogger().error).toHaveBeenCalledWith('Exec: error executing runner=cmd error={}'); + expect(getLogger().error).toHaveBeenCalledWith({ error: new Error('error') }, 'Exec: command failed runner=cmd'); }); test('exec no stdout', async () => { diff --git a/src/server/lib/auth.ts b/src/server/lib/auth.ts index 854d6ce..f8a98c5 100644 --- a/src/server/lib/auth.ts +++ b/src/server/lib/auth.ts @@ -52,7 +52,7 @@ export async function verifyAuth(request: NextRequest): Promise { const jwksUrl = process.env.KEYCLOAK_JWKS_URL; if (!issuer || !audience || !jwksUrl) { - console.error('Missing required Keycloak environment variables'); + console.error('Auth: missing Keycloak environment variables'); return { success: false, error: { message: 'Server configuration error', status: 500 }, @@ -73,7 +73,7 @@ export async function verifyAuth(request: NextRequest): Promise { return { success: true, payload }; } catch (error) { const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; - console.error('JWT Verification Error:', errorMessage); + console.error('Auth: JWT verification failed', error); // 6. If any part of the verification fails, return an error. return { diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts index 64d4aca..ec0ff5e 100644 --- a/src/server/lib/codefresh/index.ts +++ b/src/server/lib/codefresh/index.ts @@ -91,6 +91,7 @@ export const waitForImage = async (id: string, { timeoutMs = 180000, intervalMs const checkStatus = checkPipelineStatus(id); return await waitUntil(checkStatus, { timeoutMs, intervalMs }); } catch (error) { + getLogger().error({ error }, `Codefresh: waitForImage failed pipelineId=${id}`); return false; } }; @@ -140,6 +141,7 @@ export const getLogs = async (id: string) => { const output = await shellPromise(command); return output; } catch (error) { - return error; + getLogger().error({ error }, `Codefresh: getLogs failed pipelineId=${id}`); + return ''; } }; diff --git a/src/server/lib/logStreamingHelper.ts b/src/server/lib/logStreamingHelper.ts index cfcd8b6..774a422 100644 --- a/src/server/lib/logStreamingHelper.ts +++ b/src/server/lib/logStreamingHelper.ts @@ -41,7 +41,7 @@ export async function getLogStreamingInfoForJob( try { podInfo = await getK8sJobStatusAndPod(jobName, namespace); } catch (k8sError: any) { - getLogger().error(`LogStreaming: error fetching job status jobName=${jobName} error=${k8sError.message}`); + getLogger().error({ error: k8sError }, `LogStreaming: job status fetch failed jobName=${jobName}`); const errorStatus: LogSourceStatus = { status: 'Unknown', streamingRequired: false, diff --git a/src/server/lib/nativeBuild/index.ts b/src/server/lib/nativeBuild/index.ts index 2766431..46027d1 100644 --- a/src/server/lib/nativeBuild/index.ts +++ b/src/server/lib/nativeBuild/index.ts @@ -64,7 +64,7 @@ export async function buildWithNative(deploy: Deploy, options: NativeBuildOption return result; } catch (error) { const duration = Date.now() - startTime; - getLogger().error(`Build: failed error=${error.message} duration=${duration}ms`); + getLogger().error({ error }, `Build: failed duration=${duration}ms`); return { success: false, diff --git a/src/server/lib/tracer/index.ts b/src/server/lib/tracer/index.ts index 6adc8ac..b309dba 100644 --- a/src/server/lib/tracer/index.ts +++ b/src/server/lib/tracer/index.ts @@ -61,7 +61,7 @@ export class Tracer { } return this; } catch (error) { - getLogger().error(`Tracer: initialization error error=${error}`); + getLogger().error({ error }, 'Tracer: initialization failed'); return this; } } @@ -104,7 +104,7 @@ export class Tracer { if (typeof tracer?.scope === 'function') { tracer.scope().active()?.setTag('error', true); } - getLogger().error(`Tracer: error decorating method=${propertyKey.toString()} error=${error}`); + getLogger().error({ error }, `Tracer: decorator failed method=${propertyKey.toString()}`); throw error; } }); diff --git a/src/server/lib/utils.ts b/src/server/lib/utils.ts index 1cecde2..a4a851c 100644 --- a/src/server/lib/utils.ts +++ b/src/server/lib/utils.ts @@ -30,7 +30,7 @@ export const exec = async (runner: string, cmd: string[], { execCmd = execFilePr const out = await execCmd(runner, cmd); return out?.stdout || ''; } catch (err) { - getLogger().error(`Exec: error executing runner=${runner} error=${JSON.stringify(err)}`); + getLogger().error({ error: err }, `Exec: command failed runner=${runner}`); return ''; } }; diff --git a/src/server/services/ai/orchestration/orchestrator.ts b/src/server/services/ai/orchestration/orchestrator.ts index 97822d9..26b0069 100644 --- a/src/server/services/ai/orchestration/orchestrator.ts +++ b/src/server/services/ai/orchestration/orchestrator.ts @@ -82,7 +82,7 @@ export class ToolOrchestrator { } } } catch (error: any) { - getLogger().error(`AI: stream error message=${error.message} buildUuid=${buildUuid || 'none'}`); + getLogger().error({ error }, `AI: stream error buildUuid=${buildUuid || 'none'}`); return { success: false, error: error.message || 'Provider error', diff --git a/src/server/services/ai/streaming/jsonBuffer.ts b/src/server/services/ai/streaming/jsonBuffer.ts index 98e12f1..71bfa93 100644 --- a/src/server/services/ai/streaming/jsonBuffer.ts +++ b/src/server/services/ai/streaming/jsonBuffer.ts @@ -45,7 +45,7 @@ export class JSONBuffer { try { return JSON.parse(this.buffer); } catch (error: any) { - getLogger().error(`JSONBuffer: parse failed bufferLength=${this.buffer.length} error=${error?.message}`); + getLogger().error({ error }, `JSONBuffer: parse failed bufferLength=${this.buffer.length}`); return null; } } diff --git a/src/server/services/aiAgent.ts b/src/server/services/aiAgent.ts index 4cebf4b..bbd5244 100644 --- a/src/server/services/aiAgent.ts +++ b/src/server/services/aiAgent.ts @@ -237,7 +237,7 @@ Respond with ONLY the word INVESTIGATE or FIX, nothing else.`; return 'investigate'; } } catch (error: any) { - getLogger().error(`AI: classifyUserIntent failed error=${error?.message}`); + getLogger().error({ error }, 'AI: classifyUserIntent failed'); return 'investigate'; } } diff --git a/src/shared/utils.ts b/src/shared/utils.ts index 58cbb62..4221162 100644 --- a/src/shared/utils.ts +++ b/src/shared/utils.ts @@ -148,7 +148,7 @@ export const constructFastlyBuildLink = async ( const { href: url = '' } = (await fastlyFn(fastlyBuildId, fastlyServiceType)) || {}; return url ? { name: 'Fastly Dashboard', url } : {}; } catch (err) { - getLogger().error(`Fastly: error constructing build link error=${err}`); + getLogger().error({ error: err }, 'Fastly: build link construction failed'); return {}; } }; From abc2f167561df11b9460e01c4c736de8c53c9ca2 Mon Sep 17 00:00:00 2001 From: vmelikyan Date: Tue, 13 Jan 2026 09:51:58 -0800 Subject: [PATCH 23/23] move rootLogger to logger module --- src/server/lib/github/__tests__/index.test.ts | 3 +-- src/server/lib/github/__tests__/utils.test.ts | 4 ++-- src/server/lib/kubernetes/JobMonitor.ts | 12 +++++++++++- .../lib/logger/__tests__/contextLogger.test.ts | 2 +- src/server/lib/logger/contextLogger.ts | 2 +- src/server/lib/logger/index.ts | 2 +- src/server/lib/{ => logger}/rootLogger.ts | 12 +++++++++--- 7 files changed, 26 insertions(+), 11 deletions(-) rename src/server/lib/{ => logger}/rootLogger.ts (78%) diff --git a/src/server/lib/github/__tests__/index.test.ts b/src/server/lib/github/__tests__/index.test.ts index f5a6ceb..9051e0f 100644 --- a/src/server/lib/github/__tests__/index.test.ts +++ b/src/server/lib/github/__tests__/index.test.ts @@ -53,8 +53,7 @@ jest.mock('server/lib/logger', () => ({ warn: jest.fn(), }), })); -import { getLogger } from 'server/lib/logger'; -import logger from 'server/lib/rootLogger'; +import { getLogger, rootLogger as logger } from 'server/lib/logger'; test('createOrUpdatePullRequestComment success', async () => { jest.spyOn(client, 'createOctokitClient').mockResolvedValue({ diff --git a/src/server/lib/github/__tests__/utils.test.ts b/src/server/lib/github/__tests__/utils.test.ts index 5a6e208..031536c 100644 --- a/src/server/lib/github/__tests__/utils.test.ts +++ b/src/server/lib/github/__tests__/utils.test.ts @@ -35,9 +35,9 @@ jest.mock('server/services/globalConfig', () => { jest.mock('server/lib/github/client'); -jest.mock('server/lib/rootLogger'); +jest.mock('server/lib/logger/rootLogger'); -import logger from 'server/lib/rootLogger'; +import { rootLogger as logger } from 'server/lib/logger'; test('getAppToken success', async () => { const app = jest.fn().mockResolvedValue({ token: '123' }); diff --git a/src/server/lib/kubernetes/JobMonitor.ts b/src/server/lib/kubernetes/JobMonitor.ts index 1e841e2..92975d5 100644 --- a/src/server/lib/kubernetes/JobMonitor.ts +++ b/src/server/lib/kubernetes/JobMonitor.ts @@ -280,7 +280,17 @@ export class JobMonitor { ); if (failedStatus.trim() === 'True') { - getLogger().error(`Job: failed name=${this.jobName}`); + const failedReason = await shellPromise( + `kubectl get job ${this.jobName} -n ${this.namespace} -o jsonpath='{.status.conditions[?(@.type=="Failed")].reason}'` + ); + const failedMessage = await shellPromise( + `kubectl get job ${this.jobName} -n ${this.namespace} -o jsonpath='{.status.conditions[?(@.type=="Failed")].message}'` + ); + getLogger().error( + `Job: failed name=${this.jobName} reason=${failedReason.trim() || 'Unknown'} message=${ + failedMessage.trim() || 'No message' + }` + ); // Check if job was superseded try { diff --git a/src/server/lib/logger/__tests__/contextLogger.test.ts b/src/server/lib/logger/__tests__/contextLogger.test.ts index 9800706..73d5f60 100644 --- a/src/server/lib/logger/__tests__/contextLogger.test.ts +++ b/src/server/lib/logger/__tests__/contextLogger.test.ts @@ -23,7 +23,7 @@ const mockChild = jest.fn().mockReturnValue({ debug: jest.fn(), }); -jest.mock('../../rootLogger', () => ({ +jest.mock('../rootLogger', () => ({ __esModule: true, default: { child: (...args: unknown[]) => mockChild(...args), diff --git a/src/server/lib/logger/contextLogger.ts b/src/server/lib/logger/contextLogger.ts index 0ddee31..065be4d 100644 --- a/src/server/lib/logger/contextLogger.ts +++ b/src/server/lib/logger/contextLogger.ts @@ -15,7 +15,7 @@ */ import tracer from 'dd-trace'; -import rootLogger from '../rootLogger'; +import rootLogger from './rootLogger'; import { getLogContext } from './context'; import type { LogContext } from './types'; diff --git a/src/server/lib/logger/index.ts b/src/server/lib/logger/index.ts index a7111b2..774291e 100644 --- a/src/server/lib/logger/index.ts +++ b/src/server/lib/logger/index.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -export { default as rootLogger } from '../rootLogger'; +export { default as rootLogger } from './rootLogger'; export { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from './context'; export { getLogger } from './contextLogger'; export { withSpan } from './spans'; diff --git a/src/server/lib/rootLogger.ts b/src/server/lib/logger/rootLogger.ts similarity index 78% rename from src/server/lib/rootLogger.ts rename to src/server/lib/logger/rootLogger.ts index 7216527..45b51da 100644 --- a/src/server/lib/rootLogger.ts +++ b/src/server/lib/logger/rootLogger.ts @@ -16,7 +16,7 @@ import pino from 'pino'; import pinoCaller from 'pino-caller'; -import { LOG_LEVEL } from '../../shared/config'; +import { LOG_LEVEL } from '../../../shared/config'; export const enabled = process.env.PINO_LOGGER === 'false' ? false : true; export const level = LOG_LEVEL || 'info'; @@ -32,9 +32,15 @@ const transport = { }; const serializers = { - error: (value: unknown): string => { + error: (value: unknown): Record | string => { if (value instanceof Error) { - return value.message; + return { + type: value.name, + message: value.message, + stack: value.stack, + ...((value as any).code && { code: (value as any).code }), + ...((value as any).statusCode && { statusCode: (value as any).statusCode }), + }; } if (typeof value === 'object' && value !== null) { try {