diff --git a/dd-trace.js b/dd-trace.js
new file mode 100644
index 0000000..dca809e
--- /dev/null
+++ b/dd-trace.js
@@ -0,0 +1,43 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+'use strict';
+
+const tracer = require('dd-trace').init({
+ serviceMapping: {
+ redis: 'lifecycle-redis',
+ ioredis: 'lifecycle-redis',
+ pg: 'lifecycle-postgres',
+ },
+});
+
+const blocklist = [/^\/api\/health/, /^\/api\/jobs/, /^\/_next\/static/, /^\/_next\/webpack-hmr/];
+
+tracer.use('http', {
+ server: {
+ blocklist,
+ },
+ client: {
+ blocklist,
+ },
+});
+
+tracer.use('next', {
+ blocklist,
+});
+
+tracer.use('net', false);
+tracer.use('dns', false);
diff --git a/helm/environments/local/lifecycle.yaml b/helm/environments/local/lifecycle.yaml
index efcf00e..acc64bb 100644
--- a/helm/environments/local/lifecycle.yaml
+++ b/helm/environments/local/lifecycle.yaml
@@ -66,6 +66,8 @@ components:
value: web
- name: PORT
value: '80'
+ - name: DD_TRACE_ENABLED
+ value: 'false'
ports:
- name: http
containerPort: 80
@@ -122,6 +124,8 @@ components:
value: '10000'
- name: LIFECYCLE_UI_HOSTHAME_WITH_SCHEME
value: 'http://localhost:8000'
+ - name: DD_TRACE_ENABLED
+ value: 'false'
ports:
- name: http
containerPort: 80
diff --git a/next-env.d.ts b/next-env.d.ts
index 4f11a03..725dd6f 100644
--- a/next-env.d.ts
+++ b/next-env.d.ts
@@ -1,5 +1,6 @@
///
///
+///
// NOTE: This file should not be edited
-// see https://nextjs.org/docs/basic-features/typescript for more information.
+// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.
diff --git a/package.json b/package.json
index 8404b38..8c4a388 100644
--- a/package.json
+++ b/package.json
@@ -7,17 +7,17 @@
],
"scripts": {
"babel-node": "babel-node --extensions '.ts'",
- "dev": "LOG_LEVEL=debug ts-node -r tsconfig-paths/register --project tsconfig.server.json ws-server.ts | pino-pretty -c -t HH:MM -i pid,hostname,filename -o '{msg}'",
+ "dev": "LOG_LEVEL=debug ts-node -r ./dd-trace.js -r tsconfig-paths/register --project tsconfig.server.json ws-server.ts | pino-pretty -c -t HH:MM -i pid,hostname,filename -o '{msg}'",
"build": "next build && tsc --project tsconfig.server.json && tsc-alias -p tsconfig.server.json",
- "start": "NEXT_MANUAL_SIG_HANDLE=true NODE_ENV=production node .next/ws-server.js",
+ "start": "NEXT_MANUAL_SIG_HANDLE=true NODE_ENV=production node -r ./dd-trace.js .next/ws-server.js",
"run-prod": "port=5001 pnpm run start",
"knex": "pnpm run knex",
"test": "NODE_ENV=test jest --maxWorkers=75%",
"lint": "eslint --ext .ts src",
"lint:fix": "pnpm run lint --fix",
"ts-check": "tsc --project tsconfig.json",
- "db:migrate": "NODE_OPTIONS='--loader ts-node/esm' knex migrate:latest",
- "db:rollback": "NODE_OPTIONS='--loader ts-node/esm' knex migrate:rollback",
+ "db:migrate": "tsx node_modules/knex/bin/cli.js migrate:latest",
+ "db:rollback": "tsx node_modules/knex/bin/cli.js migrate:rollback",
"db:seed": "knex seed:run",
"prepare": "husky install",
"generate:jsonschemas": "tsx ./scripts/generateSchemas.ts generatejson",
diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts
index 4a1e888..7e57cbe 100644
--- a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts
+++ b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/[jobName]/route.ts
@@ -15,16 +15,12 @@
*/
import { NextRequest } from 'next/server';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { LogStreamingService } from 'server/services/logStreaming';
import { HttpError } from '@kubernetes/client-node';
import { createApiHandler } from 'server/lib/createApiHandler';
import { errorResponse, successResponse } from 'server/lib/response';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
interface RouteParams {
uuid: string;
name: string;
@@ -96,7 +92,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams })
const { uuid, name: serviceName, jobName } = params;
if (!uuid || !jobName || !serviceName) {
- logger.warn({ uuid, serviceName, jobName }, 'Missing or invalid path parameters');
+ getLogger().warn(`API: invalid params uuid=${uuid} serviceName=${serviceName} jobName=${jobName}`);
return errorResponse('Missing or invalid parameters', { status: 400 }, req);
}
@@ -107,7 +103,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams })
return successResponse(response, { status: 200 }, req);
} catch (error: any) {
- logger.error({ err: error, uuid, serviceName, jobName }, 'Error getting log streaming info');
+ getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${serviceName}`);
if (error.message === 'Build not found') {
return errorResponse('Build not found', { status: 404 }, req);
diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts
index 12c6762..58d174b 100644
--- a/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts
+++ b/src/app/api/v2/builds/[uuid]/services/[name]/build-jobs/route.ts
@@ -15,16 +15,12 @@
*/
import { NextRequest } from 'next/server';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { HttpError } from '@kubernetes/client-node';
import { createApiHandler } from 'server/lib/createApiHandler';
import { errorResponse, successResponse } from 'server/lib/response';
import { getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
/**
* @openapi
* /api/v2/builds/{uuid}/services/{name}/builds:
@@ -87,7 +83,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string
const { uuid, name } = params;
if (!uuid || !name) {
- logger.warn({ uuid, name }, 'Missing or invalid path parameters');
+ getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`);
return errorResponse('Missing or invalid uuid or name parameters', { status: 400 }, req);
}
@@ -99,7 +95,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string
return successResponse(response, { status: 200 }, req);
} catch (error) {
- logger.error({ err: error }, `Error getting build logs for service ${name} in environment ${uuid}.`);
+ getLogger().error({ error }, `API: build logs fetch failed service=${name}`);
if (error instanceof HttpError) {
if (error.response?.statusCode === 404) {
diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts
index 2630159..e6cb2ad 100644
--- a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts
+++ b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/[jobName]/route.ts
@@ -15,16 +15,12 @@
*/
import { NextRequest } from 'next/server';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { LogStreamingService } from 'server/services/logStreaming';
import { HttpError } from '@kubernetes/client-node';
import { createApiHandler } from 'server/lib/createApiHandler';
import { errorResponse, successResponse } from 'server/lib/response';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
interface RouteParams {
uuid: string;
name: string;
@@ -96,7 +92,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams })
const { uuid, name: serviceName, jobName } = params;
if (!uuid || !jobName || !serviceName) {
- logger.warn({ uuid, serviceName, jobName }, 'Missing or invalid path parameters');
+ getLogger().warn(`API: invalid params uuid=${uuid} serviceName=${serviceName} jobName=${jobName}`);
return errorResponse('Missing or invalid parameters', { status: 400 }, req);
}
@@ -107,7 +103,7 @@ const getHandler = async (req: NextRequest, { params }: { params: RouteParams })
return successResponse(response, { status: 200 }, req);
} catch (error: any) {
- logger.error({ err: error, uuid, serviceName, jobName }, 'Error getting log streaming info');
+ getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${serviceName}`);
if (error.message === 'Deploy not found') {
return errorResponse('Deploy not found', { status: 404 }, req);
diff --git a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts
index e5f58fb..cdc3c6e 100644
--- a/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts
+++ b/src/app/api/v2/builds/[uuid]/services/[name]/deploy-jobs/route.ts
@@ -15,16 +15,12 @@
*/
import { NextRequest } from 'next/server';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { HttpError } from '@kubernetes/client-node';
import { createApiHandler } from 'server/lib/createApiHandler';
import { errorResponse, successResponse } from 'server/lib/response';
import { getDeploymentJobs } from 'server/lib/kubernetes/getDeploymentJobs';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
/**
* @openapi
* /api/v2/builds/{uuid}/services/{name}/deploys:
@@ -85,7 +81,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string
const { uuid, name } = params;
if (!uuid || !name) {
- logger.warn({ uuid, name }, 'Missing or invalid path parameters');
+ getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`);
return errorResponse('Missing or invalid uuid or name parameters', { status: 400 }, req);
}
@@ -97,7 +93,7 @@ const getHandler = async (req: NextRequest, { params }: { params: { uuid: string
return successResponse(response, { status: 200 }, req);
} catch (error) {
- logger.error({ err: error }, `Error getting deploy logs for service ${name} in environment ${uuid}.`);
+ getLogger().error({ error }, `API: deploy logs fetch failed service=${name}`);
if (error instanceof HttpError) {
if (error.response?.statusCode === 404) {
diff --git a/src/pages/api/health.ts b/src/pages/api/health.ts
index 3fc8a9e..0bb1c09 100644
--- a/src/pages/api/health.ts
+++ b/src/pages/api/health.ts
@@ -16,7 +16,7 @@
import { NextApiRequest, NextApiResponse } from 'next';
import { defaultDb } from 'server/lib/dependencies';
-import logger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import RedisClient from 'server/lib/redisClient';
export default async function healthHandler(req: NextApiRequest, res: NextApiResponse) {
@@ -30,7 +30,7 @@ export default async function healthHandler(req: NextApiRequest, res: NextApiRes
await defaultDb.knex.raw('SELECT 1');
res.status(200).json({ status: 'Healthy' });
} catch (error) {
- logger.error(`Health check failed. Error:\n ${error}`);
+ getLogger().error({ error }, 'Health: check failed');
return res.status(500).json({ status: 'Unhealthy', error: `An error occurred while performing health check.` });
}
}
diff --git a/src/pages/api/v1/admin/ttl/cleanup.ts b/src/pages/api/v1/admin/ttl/cleanup.ts
index d159f65..39452bb 100644
--- a/src/pages/api/v1/admin/ttl/cleanup.ts
+++ b/src/pages/api/v1/admin/ttl/cleanup.ts
@@ -15,14 +15,11 @@
*/
import { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { nanoid } from 'nanoid';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import GlobalConfigService from 'server/services/globalConfig';
import TTLCleanupService from 'server/services/ttlCleanup';
-const logger = rootLogger.child({
- filename: 'v1/admin/ttl/cleanup.ts',
-});
-
/**
* @openapi
* /api/v1/admin/ttl/cleanup:
@@ -160,7 +157,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(405).json({ error: `${req.method} is not allowed.` });
}
} catch (error) {
- logger.error(`Error occurred on TTL cleanup operation: \n ${error}`);
+ getLogger().error({ error }, 'TTL: cleanup operation failed');
res.status(500).json({ error: 'An unexpected error occurred.' });
}
};
@@ -172,39 +169,45 @@ async function getTTLConfig(res: NextApiResponse) {
const ttlConfig = globalConfig.ttl_cleanup;
if (!ttlConfig) {
- logger.warn('[API] TTL cleanup configuration not found in global config');
+ getLogger().warn('TTL: config not found');
return res.status(404).json({ error: 'TTL cleanup configuration not found' });
}
return res.status(200).json({ config: ttlConfig });
} catch (error) {
- logger.error(`[API] Error occurred retrieving TTL cleanup config: \n ${error}`);
+ getLogger().error({ error }, 'TTL: config retrieval failed');
return res.status(500).json({ error: 'Unable to retrieve TTL cleanup configuration' });
}
}
async function triggerTTLCleanup(req: NextApiRequest, res: NextApiResponse) {
- try {
- const { dryRun = false } = req.body || {};
+ const correlationId = `api-ttl-cleanup-${Date.now()}-${nanoid(8)}`;
- // Validate dryRun parameter type
- if (typeof dryRun !== 'boolean') {
- return res.status(400).json({ error: 'dryRun must be a boolean value' });
- }
+ return withLogContext({ correlationId }, async () => {
+ try {
+ const { dryRun = false } = req.body || {};
- // Create new service instance and add job to queue
- const ttlCleanupService = new TTLCleanupService();
- const job = await ttlCleanupService.ttlCleanupQueue.add('manual-ttl-cleanup', { dryRun });
+ // Validate dryRun parameter type
+ if (typeof dryRun !== 'boolean') {
+ return res.status(400).json({ error: 'dryRun must be a boolean value' });
+ }
- logger.info(`[API] TTL cleanup job triggered manually (job ID: ${job.id}, dryRun: ${dryRun})`);
+ // Create new service instance and add job to queue
+ const ttlCleanupService = new TTLCleanupService();
+ const job = await ttlCleanupService.ttlCleanupQueue.add('manual-ttl-cleanup', { dryRun, correlationId });
- return res.status(200).json({
- message: 'TTL cleanup job triggered successfully',
- jobId: job.id,
- dryRun,
- });
- } catch (error) {
- logger.error(`[API] Error occurred triggering TTL cleanup: \n ${error}`);
- return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' });
- }
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info(
+ `TTL: cleanup job triggered manually jobId=${job.id} dryRun=${dryRun}`
+ );
+
+ return res.status(200).json({
+ message: 'TTL cleanup job triggered successfully',
+ jobId: job.id,
+ dryRun,
+ });
+ } catch (error) {
+ getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'TTL: cleanup trigger failed');
+ return res.status(500).json({ error: 'Unable to trigger TTL cleanup job' });
+ }
+ });
}
diff --git a/src/pages/api/v1/ai/chat.ts b/src/pages/api/v1/ai/chat.ts
index 66ba02b..fa3f3ae 100644
--- a/src/pages/api/v1/ai/chat.ts
+++ b/src/pages/api/v1/ai/chat.ts
@@ -20,9 +20,7 @@ import AIAgentContextService from 'server/services/ai/context/gatherer';
import AIAgentConversationService from 'server/services/ai/conversation/storage';
import AIAgentService from 'server/services/aiAgent';
import GlobalConfigService from 'server/services/globalConfig';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ filename: 'api/v2/debug/chat' });
+import { getLogger, withLogContext } from 'server/lib/logger';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== 'POST') {
@@ -56,185 +54,173 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
}
- const logger = rootLogger.child({ filename: 'api/v2/debug/chat', buildUuid });
+ return withLogContext({ buildUuid }, async () => {
+ const aiAgentContextService = new AIAgentContextService(defaultDb, defaultRedis);
+ const conversationService = new AIAgentConversationService(defaultDb, defaultRedis);
+ const llmService = new AIAgentService(defaultDb, defaultRedis);
- const aiAgentContextService = new AIAgentContextService(defaultDb, defaultRedis);
- const conversationService = new AIAgentConversationService(defaultDb, defaultRedis);
- const llmService = new AIAgentService(defaultDb, defaultRedis);
+ try {
+ if (provider && modelId) {
+ await llmService.initializeWithMode('investigate', provider, modelId);
+ } else {
+ await llmService.initialize();
+ }
+ } catch (error) {
+ getLogger().error({ error }, 'AI: init failed');
+ res.write(
+ `data: ${JSON.stringify({
+ error: error.message,
+ code: 'LLM_INIT_ERROR',
+ })}\n\n`
+ );
+ return res.end();
+ }
- try {
- if (provider && modelId) {
- await llmService.initializeWithMode('investigate', provider, modelId);
- } else {
- await llmService.initialize();
+ if (clearHistory) {
+ await conversationService.clearConversation(buildUuid);
}
- } catch (error) {
- logger.error({ error }, 'Failed to initialize LLM service');
- res.write(
- `data: ${JSON.stringify({
- error: error.message,
- code: 'LLM_INIT_ERROR',
- })}\n\n`
- );
- return res.end();
- }
- if (clearHistory) {
- await conversationService.clearConversation(buildUuid);
- }
+ const conversation = await conversationService.getConversation(buildUuid);
+ const conversationHistory = conversation?.messages || [];
- const conversation = await conversationService.getConversation(buildUuid);
- const conversationHistory = conversation?.messages || [];
+ let context;
+ try {
+ context = await aiAgentContextService.gatherFullContext(buildUuid);
+ } catch (error) {
+ getLogger().error({ error }, 'AI: context gather failed');
+ res.write(
+ `data: ${JSON.stringify({
+ error: `Build not found: ${error.message}`,
+ code: 'CONTEXT_ERROR',
+ })}\n\n`
+ );
+ return res.end();
+ }
- let context;
- try {
- context = await aiAgentContextService.gatherFullContext(buildUuid);
- } catch (error) {
- logger.error({ error, buildUuid }, 'Failed to gather context');
- res.write(
- `data: ${JSON.stringify({
- error: `Build not found: ${error.message}`,
- code: 'CONTEXT_ERROR',
- })}\n\n`
- );
- return res.end();
- }
+ let aiResponse = '';
+ let isJsonResponse = false;
+ let totalInvestigationTimeMs = 0;
+ try {
+ const mode = await llmService.classifyUserIntent(message, conversationHistory);
+ getLogger().info(`AI: classified user intent mode=${mode}`);
+
+ const result = await llmService.processQueryStream(
+ message,
+ context,
+ conversationHistory,
+ (chunk) => {
+ res.write(`data: ${JSON.stringify({ type: 'chunk', content: chunk })}\n\n`);
+ },
+ (activity) => {
+ res.write(`data: ${JSON.stringify(activity)}\n\n`);
+ if (typeof (res as any).flush === 'function') {
+ (res as any).flush();
+ }
+ },
+ undefined,
+ mode
+ );
- let aiResponse = '';
- let isJsonResponse = false;
- let totalInvestigationTimeMs = 0;
- try {
- const mode = await llmService.classifyUserIntent(message, conversationHistory);
- logger.info(`Classified user intent as: ${mode}`);
-
- const result = await llmService.processQueryStream(
- message,
- context,
- conversationHistory,
- (chunk) => {
- res.write(`data: ${JSON.stringify({ type: 'chunk', content: chunk })}\n\n`);
- },
- (activity) => {
- res.write(`data: ${JSON.stringify(activity)}\n\n`);
- if (typeof (res as any).flush === 'function') {
- (res as any).flush();
- }
- },
- undefined,
- mode
- );
+ aiResponse = result.response;
+ isJsonResponse = result.isJson;
+ totalInvestigationTimeMs = result.totalInvestigationTimeMs;
- aiResponse = result.response;
- isJsonResponse = result.isJson;
- totalInvestigationTimeMs = result.totalInvestigationTimeMs;
-
- // If this is a JSON investigation response, send it as a special complete_json event
- // This ensures frontend receives the cleaned JSON instead of trying to parse accumulated chunks
- if (isJsonResponse) {
- // Inject repository info into the JSON response for GitHub links
- try {
- const parsed = JSON.parse(aiResponse);
- if (parsed.type === 'investigation_complete' && context.lifecycleContext?.pullRequest) {
- const fullName = context.lifecycleContext.pullRequest.fullName;
- const branch = context.lifecycleContext.pullRequest.branch;
- if (fullName && branch) {
- const [owner, name] = fullName.split('/');
- parsed.repository = { owner, name, branch };
-
- // Ensure all string fields are properly escaped for JSON serialization
- const sanitizeForJson = (obj: any): any => {
- if (typeof obj === 'string') {
- // Already properly escaped by backend - no need to re-escape
- return obj;
- } else if (Array.isArray(obj)) {
- return obj.map((item) => sanitizeForJson(item));
- } else if (obj && typeof obj === 'object') {
- const sanitized: any = {};
- for (const key in obj) {
- if (Object.prototype.hasOwnProperty.call(obj, key)) {
- sanitized[key] = sanitizeForJson(obj[key]);
+ if (isJsonResponse) {
+ try {
+ const parsed = JSON.parse(aiResponse);
+ if (parsed.type === 'investigation_complete' && context.lifecycleContext?.pullRequest) {
+ const fullName = context.lifecycleContext.pullRequest.fullName;
+ const branch = context.lifecycleContext.pullRequest.branch;
+ if (fullName && branch) {
+ const [owner, name] = fullName.split('/');
+ parsed.repository = { owner, name, branch };
+
+ const sanitizeForJson = (obj: any): any => {
+ if (typeof obj === 'string') {
+ return obj;
+ } else if (Array.isArray(obj)) {
+ return obj.map((item) => sanitizeForJson(item));
+ } else if (obj && typeof obj === 'object') {
+ const sanitized: any = {};
+ for (const key in obj) {
+ if (Object.prototype.hasOwnProperty.call(obj, key)) {
+ sanitized[key] = sanitizeForJson(obj[key]);
+ }
}
+ return sanitized;
}
- return sanitized;
- }
- return obj;
- };
+ return obj;
+ };
- const sanitized = sanitizeForJson(parsed);
- aiResponse = JSON.stringify(sanitized, null, 2);
+ const sanitized = sanitizeForJson(parsed);
+ aiResponse = JSON.stringify(sanitized, null, 2);
- // Validate the final JSON before sending
- JSON.parse(aiResponse); // This will throw if invalid
+ JSON.parse(aiResponse);
+ }
}
+ } catch (e) {
+ getLogger().error(
+ { error: e instanceof Error ? e.message : String(e), responseLength: aiResponse.length },
+ 'AI: JSON validation failed'
+ );
+ aiResponse =
+ '⚠️ Investigation completed but response formatting failed. Please try asking a more specific question.';
+ isJsonResponse = false;
}
- } catch (e) {
- // If JSON parsing/validation fails, log error and send plain text fallback
- logger.error(
- { error: e, responseLength: aiResponse.length },
- 'JSON validation failed for investigation response'
- );
- // Convert to plain text message
- aiResponse =
- '⚠️ Investigation completed but response formatting failed. Please try asking a more specific question.';
- isJsonResponse = false; // Treat as plain text
- }
- if (isJsonResponse) {
+ if (isJsonResponse) {
+ res.write(
+ `data: ${JSON.stringify({ type: 'complete_json', content: aiResponse, totalInvestigationTimeMs })}\n\n`
+ );
+ }
+ }
+ } catch (error: any) {
+ getLogger().error({ error }, 'AI: query failed');
+
+ if (
+ error?.status === 429 ||
+ error?.error?.error?.type === 'rate_limit_error' ||
+ error?.message?.includes('RATE_LIMIT_EXCEEDED') ||
+ error?.message?.includes('quota exceeded')
+ ) {
+ res.write(
+ `data: ${JSON.stringify({
+ error:
+ 'Rate limit exceeded. Please wait a moment and try again. The AI service is currently handling many requests.',
+ code: 'RATE_LIMIT_EXCEEDED',
+ retryAfter: 60,
+ })}\n\n`
+ );
+ } else {
res.write(
- `data: ${JSON.stringify({ type: 'complete_json', content: aiResponse, totalInvestigationTimeMs })}\n\n`
+ `data: ${JSON.stringify({
+ error: error?.message || error?.toString() || 'AI service error',
+ code: 'LLM_API_ERROR',
+ })}\n\n`
);
}
+ res.end();
+ return;
}
- } catch (error: any) {
- logger.error({ error, errorMessage: error?.message, errorStack: error?.stack }, 'LLM query failed');
-
- // Check if it's a rate limit error
- if (
- error?.status === 429 ||
- error?.error?.error?.type === 'rate_limit_error' ||
- error?.message?.includes('RATE_LIMIT_EXCEEDED') ||
- error?.message?.includes('quota exceeded')
- ) {
- res.write(
- `data: ${JSON.stringify({
- error:
- 'Rate limit exceeded. Please wait a moment and try again. The AI service is currently handling many requests.',
- code: 'RATE_LIMIT_EXCEEDED',
- retryAfter: 60,
- })}\n\n`
- );
- } else {
- res.write(
- `data: ${JSON.stringify({
- error: error?.message || error?.toString() || 'AI service error',
- code: 'LLM_API_ERROR',
- })}\n\n`
- );
- }
- res.end();
- return;
- }
- await conversationService.addMessage(buildUuid, {
- role: 'user',
- content: message,
- timestamp: Date.now(),
- isSystemAction,
- });
+ await conversationService.addMessage(buildUuid, {
+ role: 'user',
+ content: message,
+ timestamp: Date.now(),
+ isSystemAction,
+ });
- await conversationService.addMessage(buildUuid, {
- role: 'assistant',
- content: aiResponse,
- timestamp: Date.now(),
- });
+ await conversationService.addMessage(buildUuid, {
+ role: 'assistant',
+ content: aiResponse,
+ timestamp: Date.now(),
+ });
- res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`);
- res.end();
+ res.write(`data: ${JSON.stringify({ type: 'complete', totalInvestigationTimeMs })}\n\n`);
+ res.end();
+ });
} catch (error: any) {
- logger.error(
- { error, errorMessage: error?.message, errorStack: error?.stack },
- 'Unexpected error in AI agent chat'
- );
+ getLogger().error({ error }, 'AI: chat request failed');
res.write(`data: ${JSON.stringify({ error: error?.message || 'Internal error' })}\n\n`);
res.end();
}
diff --git a/src/pages/api/v1/ai/config.ts b/src/pages/api/v1/ai/config.ts
index 84706bb..bba57b8 100644
--- a/src/pages/api/v1/ai/config.ts
+++ b/src/pages/api/v1/ai/config.ts
@@ -15,6 +15,7 @@
*/
import { NextApiRequest, NextApiResponse } from 'next';
+import { getLogger } from 'server/lib/logger';
import GlobalConfigService from 'server/services/globalConfig';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -39,6 +40,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
configured: apiKeySet,
});
} catch (error) {
+ getLogger().error({ error }, 'AI: config fetch failed');
return res.status(500).json({ enabled: false });
}
}
diff --git a/src/pages/api/v1/ai/models.ts b/src/pages/api/v1/ai/models.ts
index 889f009..7d9ce30 100644
--- a/src/pages/api/v1/ai/models.ts
+++ b/src/pages/api/v1/ai/models.ts
@@ -16,9 +16,7 @@
import { NextApiRequest, NextApiResponse } from 'next';
import GlobalConfigService from 'server/services/globalConfig';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ filename: 'api/v1/ai/models' });
+import { getLogger } from 'server/lib/logger';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== 'GET') {
@@ -34,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
if (!aiAgentConfig.providers || !Array.isArray(aiAgentConfig.providers)) {
- logger.warn('aiAgent config missing providers array');
+ getLogger().warn('AI: config missing providers array');
return res.status(200).json({ models: [] });
}
@@ -58,7 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.status(200).json({ models });
} catch (error: any) {
- logger.error({ error, errorMessage: error?.message }, 'Failed to fetch available models');
+ getLogger().error({ error }, 'AI: models fetch failed');
return res.status(500).json({ error: 'Failed to fetch available models' });
}
}
diff --git a/src/pages/api/v1/builds/[uuid]/deploy.ts b/src/pages/api/v1/builds/[uuid]/deploy.ts
index 4f170aa..e09c7b8 100644
--- a/src/pages/api/v1/builds/[uuid]/deploy.ts
+++ b/src/pages/api/v1/builds/[uuid]/deploy.ts
@@ -15,15 +15,11 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import { Build } from 'server/models';
import { nanoid } from 'nanoid';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/deploy.ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/deploy:
@@ -88,36 +84,44 @@ const logger = rootLogger.child({
*/
// eslint-disable-next-line import/no-anonymous-default-export
export default async (req: NextApiRequest, res: NextApiResponse) => {
- if (req.method !== 'POST') {
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
+ const correlationId = `api-redeploy-${Date.now()}-${nanoid(8)}`;
const { uuid } = req.query;
- try {
- const buildService = new BuildService();
- const build: Build = await buildService.db.models.Build.query()
- .findOne({ uuid })
- .withGraphFetched('deploys.deployable');
-
- if (!build) {
- logger.info(`Build with UUID ${uuid} not found`);
- return res.status(404).json({ error: `Build not found for ${uuid}` });
+ return withLogContext({ correlationId, buildUuid: uuid as string }, async () => {
+ if (req.method !== 'POST') {
+ return res.status(405).json({ error: `${req.method} is not allowed` });
}
- const buildId = build.id;
- const runUUID = nanoid();
- await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
- buildId,
- runUUID,
- });
+ try {
+ getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: redeploy requested');
+
+ const buildService = new BuildService();
+ const build: Build = await buildService.db.models.Build.query()
+ .findOne({ uuid })
+ .withGraphFetched('deploys.deployable');
- return res.status(200).json({
- status: 'success',
- message: `Redeploy for build ${uuid} has been queued`,
- });
- } catch (error) {
- logger.error(`Unable to proceed with redeploy for build ${uuid}. Error: \n ${error}`);
- return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` });
- }
+ if (!build) {
+ getLogger().debug('Build not found');
+ return res.status(404).json({ error: `Build not found for ${uuid}` });
+ }
+
+ const buildId = build.id;
+ const runUUID = nanoid();
+ await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
+ buildId,
+ runUUID,
+ correlationId,
+ });
+
+ getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: redeploy queued');
+
+ return res.status(200).json({
+ status: 'success',
+ message: `Redeploy for build ${uuid} has been queued`,
+ });
+ } catch (error) {
+ getLogger({ stage: LogStage.BUILD_FAILED }).error({ error }, 'Build: redeploy failed');
+ return res.status(500).json({ error: `Unable to proceed with redeploy for build ${uuid}.` });
+ }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/graph.ts b/src/pages/api/v1/builds/[uuid]/graph.ts
index 25a31da..7493abc 100644
--- a/src/pages/api/v1/builds/[uuid]/graph.ts
+++ b/src/pages/api/v1/builds/[uuid]/graph.ts
@@ -16,14 +16,10 @@
import { NextApiRequest, NextApiResponse } from 'next/types';
import { generateGraph } from 'server/lib/dependencyGraph';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { Build } from 'server/models';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/graph.ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/graph:
@@ -88,30 +84,31 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
const { uuid } = req.query;
- try {
- const buildService = new BuildService();
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ try {
+ const buildService = new BuildService();
+
+ const build: Build = await buildService.db.models.Build.query()
+ .findOne({
+ uuid,
+ })
+ .withGraphFetched('[deploys.deployable, deployables]');
- const build: Build = await buildService.db.models.Build.query()
- .findOne({
- uuid,
- })
- .withGraphFetched('[deploys.deployable, deployables]');
+ if (Object.keys(build.dependencyGraph).length === 0) {
+ const dependencyGraph = await generateGraph(build, 'TB');
+ await build.$query().patchAndFetch({
+ dependencyGraph,
+ });
+ }
- if (Object.keys(build.dependencyGraph).length === 0) {
- // generate the graph if it does not exist
- const dependencyGraph = await generateGraph(build, 'TB');
- await build.$query().patchAndFetch({
- dependencyGraph,
+ return res.status(200).json({
+ status: 'success',
+ message: `Dependency graph for ${uuid} returned.`,
+ dependencyGraph: build.dependencyGraph,
});
+ } catch (error) {
+ getLogger().error({ error }, 'Build: dependency graph fetch failed');
+ res.status(500).json({ error: 'An unexpected error occurred.' });
}
-
- return res.status(200).json({
- status: 'success',
- message: `Dependency graph for ${uuid} returned.`,
- dependencyGraph: build.dependencyGraph,
- });
- } catch (error) {
- logger.error(`Eorror fetching dependency graph for ${uuid}: ${error}`);
- res.status(500).json({ error: 'An unexpected error occurred.' });
- }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/index.ts b/src/pages/api/v1/builds/[uuid]/index.ts
index 3df9d70..29e2034 100644
--- a/src/pages/api/v1/builds/[uuid]/index.ts
+++ b/src/pages/api/v1/builds/[uuid]/index.ts
@@ -16,19 +16,14 @@
import { nanoid } from 'nanoid';
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import { Build } from 'server/models';
import BuildService from 'server/services/build';
import OverrideService from 'server/services/override';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/index.ts',
-});
-
async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) {
- const { uuid } = req.query;
-
try {
+ const { uuid } = req.query;
const buildService = new BuildService();
const build = await buildService.db.models.Build.query()
@@ -52,23 +47,23 @@ async function retrieveBuild(req: NextApiRequest, res: NextApiResponse) {
);
if (!build) {
- logger.info(`Build with UUID ${uuid} not found`);
+ getLogger().debug('Build not found');
return res.status(404).json({ error: 'Build not found' });
}
return res.status(200).json(build);
} catch (error) {
- logger.error(`Error fetching build ${uuid}:`, error);
+ getLogger({ error }).error('API: build fetch failed');
return res.status(500).json({ error: 'An unexpected error occurred' });
}
}
-async function updateBuild(req: NextApiRequest, res: NextApiResponse) {
+async function updateBuild(req: NextApiRequest, res: NextApiResponse, correlationId: string) {
const { uuid } = req.query;
const { uuid: newUuid } = req.body;
if (!newUuid || typeof newUuid !== 'string') {
- logger.info(`[${uuid}] Missing or invalid uuid in request body`);
+ getLogger().debug('Missing or invalid uuid in request body');
return res.status(400).json({ error: 'uuid is required' });
}
@@ -78,27 +73,29 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse) {
const build: Build = await override.db.models.Build.query().findOne({ uuid }).withGraphFetched('pullRequest');
if (!build) {
- logger.info(`[${uuid}] Build not found, cannot patch uuid.`);
+ getLogger().debug('Build not found, cannot patch uuid');
return res.status(404).json({ error: 'Build not found' });
}
if (newUuid === build.uuid) {
- logger.info(`[${uuid}] Attempted to update UUID to same value: ${newUuid}`);
+ getLogger().debug(`Attempted to update UUID to same value: newUuid=${newUuid}`);
return res.status(400).json({ error: 'UUID must be different' });
}
const validation = await override.validateUuid(newUuid);
if (!validation.valid) {
- logger.info(`[${uuid}] UUID validation failed on attempt to change: ${validation.error}`);
+ getLogger().debug(`UUID validation failed: error=${validation.error}`);
return res.status(400).json({ error: validation.error });
}
const result = await override.updateBuildUuid(build, newUuid);
if (build.pullRequest?.deployOnUpdate) {
+ getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Triggering redeploy after UUID update`);
await new BuildService().resolveAndDeployBuildQueue.add('resolve-deploy', {
buildId: build.id,
runUUID: nanoid(),
+ correlationId,
});
}
@@ -108,7 +105,7 @@ async function updateBuild(req: NextApiRequest, res: NextApiResponse) {
},
});
} catch (error) {
- logger.error({ error }, `[${uuid}] Error updating UUID to ${newUuid}: ${error}`);
+ getLogger({ error }).error(`API: UUID update failed newUuid=${newUuid}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
}
@@ -346,12 +343,19 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(400).json({ error: 'Invalid UUID' });
}
- switch (req.method) {
- case 'GET':
- return retrieveBuild(req, res);
- case 'PATCH':
- return updateBuild(req, res);
- default:
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method === 'PATCH') {
+ const correlationId = `api-build-update-${Date.now()}-${nanoid(8)}`;
+ return withLogContext({ correlationId }, async () => {
+ return updateBuild(req, res, correlationId);
+ });
+ }
+
+ switch (req.method) {
+ case 'GET':
+ return retrieveBuild(req, res);
+ default:
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts
index 6ea5e87..2a1e8cf 100644
--- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts
+++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/events.ts
@@ -142,14 +142,10 @@
* example: Failed to communicate with Kubernetes.
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
import { HttpError } from '@kubernetes/client-node';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
interface K8sEvent {
name: string;
namespace: string;
@@ -220,47 +216,49 @@ async function getJobEvents(jobName: string, namespace: string): Promise {
- if (req.method !== 'GET') {
- logger.warn({ method: req.method }, 'Method not allowed');
- res.setHeader('Allow', ['GET']);
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
const { uuid, jobName } = req.query;
- if (typeof uuid !== 'string' || typeof jobName !== 'string') {
- logger.warn({ uuid, jobName }, 'Missing or invalid query parameters');
- return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' });
- }
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method !== 'GET') {
+ getLogger().warn(`API: method not allowed method=${req.method}`);
+ res.setHeader('Allow', ['GET']);
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
- try {
- const namespace = `env-${uuid}`;
+ if (typeof uuid !== 'string' || typeof jobName !== 'string') {
+ getLogger().warn(`API: invalid params uuid=${uuid} jobName=${jobName}`);
+ return res.status(400).json({ error: 'Missing or invalid uuid or jobName parameters' });
+ }
- const events = await getJobEvents(jobName, namespace);
+ try {
+ const namespace = `env-${uuid}`;
- const response: EventsResponse = {
- events,
- };
+ const events = await getJobEvents(jobName, namespace);
- return res.status(200).json(response);
- } catch (error) {
- logger.error({ err: error }, `Error getting events for job ${jobName} in environment ${uuid}.`);
+ const response: EventsResponse = {
+ events,
+ };
+
+ return res.status(200).json(response);
+ } catch (error) {
+ getLogger().error({ error }, `API: events fetch failed jobName=${jobName}`);
- if (error instanceof HttpError) {
- if (error.response?.statusCode === 404) {
- return res.status(404).json({ error: 'Environment or job not found.' });
+ if (error instanceof HttpError) {
+ if (error.response?.statusCode === 404) {
+ return res.status(404).json({ error: 'Environment or job not found.' });
+ }
+ return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
}
- return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
- }
- return res.status(500).json({ error: 'Internal server error occurred.' });
- }
+ return res.status(500).json({ error: 'Internal server error occurred.' });
+ }
+ });
};
export default eventsHandler;
diff --git a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts
index ebb62b1..99d08ed 100644
--- a/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts
+++ b/src/pages/api/v1/builds/[uuid]/jobs/[jobName]/logs.ts
@@ -15,13 +15,9 @@
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import unifiedLogStreamHandler from '../../services/[name]/logs/[jobName]';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/jobs/{jobName}/logs:
@@ -101,15 +97,15 @@ const logger = rootLogger.child({
* description: Internal server error
*/
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- logger.info(
- `method=${req.method} jobName=${req.query.jobName} message="Job logs endpoint called, delegating to unified handler"`
- );
+ const { uuid, jobName } = req.query;
+
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ getLogger().info(`API: job logs called method=${req.method} jobName=${jobName}`);
- // Set type to 'webhook' for job logs
- req.query.type = 'webhook';
+ req.query.type = 'webhook';
- // Set name to undefined since it's not required for webhook jobs
- req.query.name = undefined;
+ req.query.name = undefined;
- return unifiedLogStreamHandler(req, res);
+ return unifiedLogStreamHandler(req, res);
+ });
}
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts
index 450199d..5fa019e 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/build.ts
@@ -15,7 +15,7 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger';
import GithubService from 'server/services/github';
import { Build } from 'server/models';
import DeployService from 'server/services/deploy';
@@ -23,10 +23,6 @@ import { DeployStatus } from 'shared/constants';
import { nanoid } from 'nanoid';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/services/[name]/build.ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/services/{name}/build:
@@ -102,59 +98,69 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
}
const { uuid, name } = req.query;
+ const correlationId = `api-service-redeploy-${Date.now()}-${nanoid(8)}`;
- try {
- const githubService = new GithubService();
- const build: Build = await githubService.db.models.Build.query()
- .findOne({
- uuid,
- })
- .withGraphFetched('deploys.deployable');
+ return withLogContext({ correlationId, buildUuid: uuid as string }, async () => {
+ try {
+ const githubService = new GithubService();
+ const build: Build = await githubService.db.models.Build.query()
+ .findOne({
+ uuid,
+ })
+ .withGraphFetched('deploys.deployable');
- const buildId = build.id;
+ const buildId = build.id;
- if (!build) {
- logger.info(`Build with UUID ${uuid} not found`);
- return res.status(404).json({ error: `Build not found for ${uuid}` });
- }
+ if (!build) {
+ getLogger().debug(`Build not found`);
+ return res.status(404).json({ error: `Build not found for ${uuid}` });
+ }
- const deploy = build.deploys.find((deploy) => deploy.deployable.name === name);
+ const deploy = build.deploys.find((deploy) => deploy.deployable.name === name);
- if (!deploy) {
- logger.info(`Deployable ${name} not found in build ${uuid}`);
- res.status(404).json({ error: `${name} service is not found in ${uuid} build.` });
- return;
- }
+ if (!deploy) {
+ getLogger().debug(`Deployable not found: service=${name}`);
+ res.status(404).json({ error: `${name} service is not found in ${uuid} build.` });
+ return;
+ }
- const githubRepositoryId = deploy.deployable.repositoryId;
+ const githubRepositoryId = deploy.deployable.repositoryId;
- const runUUID = nanoid();
- const buildService = new BuildService();
- await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
- buildId,
- githubRepositoryId,
- runUUID,
- });
+ const runUUID = nanoid();
+ const buildService = new BuildService();
+ await buildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
+ buildId,
+ githubRepositoryId,
+ runUUID,
+ ...extractContextForQueue(),
+ });
- const deployService = new DeployService();
+ getLogger({ stage: LogStage.BUILD_QUEUED }).info(`Build: service redeploy queued service=${name}`);
- await deploy.$query().patchAndFetch({
- runUUID,
- });
+ const deployService = new DeployService();
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.QUEUED,
- },
- runUUID
- );
- return res.status(200).json({
- status: 'success',
- message: `Redeploy for service ${name} in build ${uuid} has been queued`,
- });
- } catch (error) {
- logger.error(`Unable to proceed with redeploy for services ${name} in build ${uuid}. Error: \n ${error}`);
- return res.status(500).json({ error: `Unable to proceed with redeploy for services ${name} in build ${uuid}.` });
- }
+ await deploy.$query().patchAndFetch({
+ runUUID,
+ });
+
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.QUEUED,
+ },
+ runUUID,
+ githubRepositoryId
+ );
+ return res.status(200).json({
+ status: 'success',
+ message: `Redeploy for service ${name} in build ${uuid} has been queued`,
+ });
+ } catch (error) {
+ getLogger({ stage: LogStage.BUILD_FAILED }).error(
+ { error },
+ `Unable to proceed with redeploy for services ${name} in build ${uuid}`
+ );
+ return res.status(500).json({ error: `Unable to proceed with redeploy for services ${name} in build ${uuid}.` });
+ }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts
index fe281b6..98cf834 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs.ts
@@ -15,14 +15,10 @@
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { HttpError } from '@kubernetes/client-node';
import { BuildJobInfo, getNativeBuildJobs } from 'server/lib/kubernetes/getNativeBuildJobs';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
interface BuildLogsListResponse {
builds: BuildJobInfo[];
}
@@ -146,39 +142,41 @@ interface BuildLogsListResponse {
*/
// eslint-disable-next-line import/no-anonymous-default-export
export default async (req: NextApiRequest, res: NextApiResponse) => {
- if (req.method !== 'GET') {
- logger.warn({ method: req.method }, 'Method not allowed');
- res.setHeader('Allow', ['GET']);
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
const { uuid, name } = req.query;
- if (typeof uuid !== 'string' || typeof name !== 'string') {
- logger.warn({ uuid, name }, 'Missing or invalid query parameters');
- return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' });
- }
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method !== 'GET') {
+ getLogger().warn(`API: method not allowed method=${req.method}`);
+ res.setHeader('Allow', ['GET']);
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
- try {
- const namespace = `env-${uuid}`;
+ if (typeof uuid !== 'string' || typeof name !== 'string') {
+ getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`);
+ return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' });
+ }
- const buildJobs = await getNativeBuildJobs(name, namespace);
+ try {
+ const namespace = `env-${uuid}`;
- const response: BuildLogsListResponse = {
- builds: buildJobs,
- };
+ const buildJobs = await getNativeBuildJobs(name, namespace);
- return res.status(200).json(response);
- } catch (error) {
- logger.error({ err: error }, `Error getting build logs for service ${name} in environment ${uuid}.`);
+ const response: BuildLogsListResponse = {
+ builds: buildJobs,
+ };
- if (error instanceof HttpError) {
- if (error.response?.statusCode === 404) {
- return res.status(404).json({ error: 'Environment or service not found.' });
+ return res.status(200).json(response);
+ } catch (error) {
+ getLogger().error({ error }, `API: build logs fetch failed service=${name}`);
+
+ if (error instanceof HttpError) {
+ if (error.response?.statusCode === 404) {
+ return res.status(404).json({ error: 'Environment or service not found.' });
+ }
+ return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
}
- return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
- }
- return res.status(500).json({ error: 'Internal server error occurred.' });
- }
+ return res.status(500).json({ error: 'Internal server error occurred.' });
+ }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts
index 0c499ff..c457701 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/buildLogs/[jobName].ts
@@ -15,13 +15,9 @@
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import unifiedLogStreamHandler from '../logs/[jobName]';
-const logger = rootLogger.child({
- filename: 'buildLogs/[jobName].ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/services/{name}/buildLogs/{jobName}:
@@ -101,11 +97,13 @@ const logger = rootLogger.child({
* description: Internal server error
*/
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
- logger.info(
- `method=${req.method} jobName=${req.query.jobName} message="Build logs endpoint called, delegating to unified handler"`
- );
+ const { uuid, jobName } = req.query;
+
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ getLogger().info(`API: build logs endpoint called method=${req.method} jobName=${jobName}`);
- req.query.type = 'build';
+ req.query.type = 'build';
- return unifiedLogStreamHandler(req, res);
+ return unifiedLogStreamHandler(req, res);
+ });
}
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts
index ad69c3b..cc02f2c 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs.ts
@@ -15,14 +15,10 @@
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { HttpError } from '@kubernetes/client-node';
import { DeploymentJobInfo, getDeploymentJobs } from 'server/lib/kubernetes/getDeploymentJobs';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
interface DeployLogsListResponse {
deployments: DeploymentJobInfo[];
}
@@ -112,41 +108,43 @@ interface DeployLogsListResponse {
* description: Internal server error
*/
const deployLogsHandler = async (req: NextApiRequest, res: NextApiResponse) => {
- if (req.method !== 'GET') {
- logger.warn({ method: req.method }, 'Method not allowed');
- res.setHeader('Allow', ['GET']);
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
const { uuid, name } = req.query;
- if (typeof uuid !== 'string' || typeof name !== 'string') {
- logger.warn({ uuid, name }, 'Missing or invalid query parameters');
- return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' });
- }
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method !== 'GET') {
+ getLogger().warn(`API: method not allowed method=${req.method}`);
+ res.setHeader('Allow', ['GET']);
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
- try {
- const namespace = `env-${uuid}`;
+ if (typeof uuid !== 'string' || typeof name !== 'string') {
+ getLogger().warn(`API: invalid params uuid=${uuid} name=${name}`);
+ return res.status(400).json({ error: 'Missing or invalid uuid or name parameters' });
+ }
- const deployments = await getDeploymentJobs(name, namespace);
+ try {
+ const namespace = `env-${uuid}`;
- const response: DeployLogsListResponse = {
- deployments,
- };
+ const deployments = await getDeploymentJobs(name, namespace);
- return res.status(200).json(response);
- } catch (error) {
- logger.error({ err: error }, `Error getting deploy logs for service ${name} in environment ${uuid}.`);
+ const response: DeployLogsListResponse = {
+ deployments,
+ };
- if (error instanceof HttpError) {
- if (error.response?.statusCode === 404) {
- return res.status(404).json({ error: 'Environment or service not found.' });
+ return res.status(200).json(response);
+ } catch (error) {
+ getLogger().error({ error }, `API: deploy logs fetch failed service=${name}`);
+
+ if (error instanceof HttpError) {
+ if (error.response?.statusCode === 404) {
+ return res.status(404).json({ error: 'Environment or service not found.' });
+ }
+ return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
}
- return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
- }
- return res.status(500).json({ error: 'Internal server error occurred.' });
- }
+ return res.status(500).json({ error: 'Internal server error occurred.' });
+ }
+ });
};
export default deployLogsHandler;
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts
index 0750189..02ef1bf 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployLogs/[jobName].ts
@@ -131,21 +131,21 @@
* example: Failed to communicate with Kubernetes.
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import unifiedLogStreamHandler from '../logs/[jobName]';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
const deployLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => {
- logger.info(
- `method=${req.method} jobName=${req.query.jobName} message="Deploy logs endpoint called, delegating to unified handler"`
- );
+ const { uuid, jobName } = req.query;
+
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ getLogger().info(
+ `method=${req.method} jobName=${jobName} Deploy logs endpoint called, delegating to unified handler`
+ );
- req.query.type = 'deploy';
+ req.query.type = 'deploy';
- return unifiedLogStreamHandler(req, res);
+ return unifiedLogStreamHandler(req, res);
+ });
};
export default deployLogStreamHandler;
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts
index a6b3f96..1ab28a7 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/deployment.ts
@@ -15,15 +15,11 @@
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
import { HttpError } from '@kubernetes/client-node';
import { Deploy } from 'server/models';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
@@ -47,12 +43,12 @@ async function getHelmDeploymentDetails(namespace: string, deployUuid: string):
try {
const secretName = `sh.helm.release.v1.${deployUuid}.v1`;
- logger.debug(`Checking for Helm secret: ${secretName} in namespace ${namespace}`);
+ getLogger({}).debug(`Checking for Helm secret: secretName=${secretName} namespace=${namespace}`);
const secret = await coreV1Api.readNamespacedSecret(secretName, namespace);
if (!secret.body.data?.release) {
- logger.debug(`Helm secret ${secretName} found but no release data`);
+ getLogger({}).debug(`Helm secret found but no release data: secretName=${secretName}`);
return null;
}
@@ -78,8 +74,8 @@ async function getHelmDeploymentDetails(namespace: string, deployUuid: string):
try {
release = JSON.parse(releaseData.toString());
} catch (parseError: any) {
- logger.warn(
- `Failed to parse Helm release data for ${deployUuid}: decompress_error=${decompressError.message} parse_error=${parseError.message}`
+ getLogger({}).warn(
+ `Failed to parse Helm release data: deployUuid=${deployUuid} decompress_error=${decompressError.message} parse_error=${parseError.message}`
);
return null;
}
@@ -247,52 +243,54 @@ async function getGitHubDeploymentDetails(
* type: string
*/
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
- if (req.method !== 'GET') {
- logger.warn({ method: req.method }, 'Method not allowed');
- res.setHeader('Allow', ['GET']);
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
const { uuid, name } = req.query;
- if (typeof uuid !== 'string' || typeof name !== 'string') {
- logger.warn({ uuid, name }, 'Missing or invalid query parameters');
- return res.status(400).json({ error: 'Missing or invalid parameters' });
- }
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method !== 'GET') {
+ getLogger().warn(`API: method not allowed method=${req.method}`);
+ res.setHeader('Allow', ['GET']);
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
- const deployUuid = `${name}-${uuid}`;
+ if (typeof uuid !== 'string' || typeof name !== 'string') {
+ getLogger().warn(`API: invalid query params uuid=${uuid} name=${name}`);
+ return res.status(400).json({ error: 'Missing or invalid parameters' });
+ }
- try {
- const namespace = `env-${uuid}`;
+ const deployUuid = `${name}-${uuid}`;
- logger.info(`Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}`);
+ try {
+ const namespace = `env-${uuid}`;
- const helmDetails = await getHelmDeploymentDetails(namespace, deployUuid);
- if (helmDetails) {
- logger.info(`Found Helm deployment details for ${deployUuid}`);
- return res.status(200).json(helmDetails);
- }
+ getLogger().debug(`Fetching deployment details: deployUuid=${deployUuid} namespace=${namespace} service=${name}`);
- const githubDetails = await getGitHubDeploymentDetails(namespace, deployUuid);
- if (githubDetails) {
- logger.info(`Found GitHub-type deployment details for ${deployUuid}`);
- return res.status(200).json(githubDetails);
- }
+ const helmDetails = await getHelmDeploymentDetails(namespace, deployUuid);
+ if (helmDetails) {
+ getLogger().debug(`Found Helm deployment details: deployUuid=${deployUuid}`);
+ return res.status(200).json(helmDetails);
+ }
- logger.warn(`No deployment details found for ${deployUuid}`);
- return res.status(404).json({ error: 'Deployment not found' });
- } catch (error) {
- logger.error({ err: error }, `Error getting deployment details for ${deployUuid}`);
+ const githubDetails = await getGitHubDeploymentDetails(namespace, deployUuid);
+ if (githubDetails) {
+ getLogger().debug(`Found GitHub-type deployment details: deployUuid=${deployUuid}`);
+ return res.status(200).json(githubDetails);
+ }
- if (error instanceof HttpError) {
- if (error.response?.statusCode === 404) {
- return res.status(404).json({ error: 'Deployment not found' });
+ getLogger().warn(`API: deployment not found deployUuid=${deployUuid}`);
+ return res.status(404).json({ error: 'Deployment not found' });
+ } catch (error) {
+ getLogger().error({ error }, `API: deployment details error deployUuid=${deployUuid}`);
+
+ if (error instanceof HttpError) {
+ if (error.response?.statusCode === 404) {
+ return res.status(404).json({ error: 'Deployment not found' });
+ }
+ return res.status(502).json({ error: 'Failed to communicate with Kubernetes' });
}
- return res.status(502).json({ error: 'Failed to communicate with Kubernetes' });
- }
- return res.status(500).json({ error: 'Internal server error' });
- }
+ return res.status(500).json({ error: 'Internal server error' });
+ }
+ });
};
export default handler;
diff --git a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts
index 13dd994..6d3a6da 100644
--- a/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts
+++ b/src/pages/api/v1/builds/[uuid]/services/[name]/logs/[jobName].ts
@@ -159,68 +159,57 @@
* example: Failed to communicate with Kubernetes.
*/
import type { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { LogStreamingService } from 'server/services/logStreaming';
import { HttpError } from '@kubernetes/client-node';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
const unifiedLogStreamHandler = async (req: NextApiRequest, res: NextApiResponse) => {
- if (req.method !== 'GET') {
- logger.warn(`method=${req.method} message="Method not allowed"`);
- res.setHeader('Allow', ['GET']);
- return res.status(405).json({ error: `${req.method} is not allowed` });
- }
-
const { uuid, name, jobName, type } = req.query;
- // 1. Request Validation
- const isWebhookRequest = type === 'webhook';
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ if (req.method !== 'GET') {
+ getLogger().warn(`API: method not allowed method=${req.method}`);
+ res.setHeader('Allow', ['GET']);
+ return res.status(405).json({ error: `${req.method} is not allowed` });
+ }
+
+ const isWebhookRequest = type === 'webhook';
- if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) {
- logger.warn(
- `uuid=${uuid} name=${name} jobName=${jobName} type=${type} message="Missing or invalid query parameters"`
- );
- return res.status(400).json({ error: 'Missing or invalid parameters' });
- }
+ if (typeof uuid !== 'string' || typeof jobName !== 'string' || (!isWebhookRequest && typeof name !== 'string')) {
+ getLogger().warn(`API: invalid params uuid=${uuid} name=${name} jobName=${jobName} type=${type}`);
+ return res.status(400).json({ error: 'Missing or invalid parameters' });
+ }
- if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) {
- logger.warn(`type=${type} message="Invalid type parameter"`);
- return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' });
- }
+ if (type && (typeof type !== 'string' || !['build', 'deploy', 'webhook'].includes(type))) {
+ getLogger().warn(`API: invalid type param type=${type}`);
+ return res.status(400).json({ error: 'Invalid type parameter. Must be "build", "deploy", or "webhook"' });
+ }
- try {
- // 2. Call the Service
- const logService = new LogStreamingService();
+ try {
+ const logService = new LogStreamingService();
- // We cast name and type to strings/undefined safely here because of validation above
+ const response = await logService.getLogStreamInfo(
+ uuid,
+ jobName,
+ name as string | undefined,
+ type as string | undefined
+ );
- const response = await logService.getLogStreamInfo(
- uuid,
- jobName,
- name as string | undefined,
- type as string | undefined
- );
+ return res.status(200).json(response);
+ } catch (error: any) {
+ getLogger().error({ error }, `API: log streaming info failed jobName=${jobName} service=${name}`);
- return res.status(200).json(response);
- } catch (error: any) {
- logger.error(
- `jobName=${jobName} uuid=${uuid} name=${name} error="${error}" message="Error getting log streaming info"`
- );
+ if (error.message === 'Build not found') {
+ return res.status(404).json({ error: 'Build not found' });
+ }
- // 3. Error Mapping
- if (error.message === 'Build not found') {
- return res.status(404).json({ error: 'Build not found' });
- }
+ if (error instanceof HttpError || error.message?.includes('Kubernetes') || error.statusCode === 502) {
+ return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
+ }
- if (error instanceof HttpError || error.message?.includes('Kubernetes') || error.statusCode === 502) {
- return res.status(502).json({ error: 'Failed to communicate with Kubernetes.' });
+ return res.status(500).json({ error: 'Internal server error occurred.' });
}
-
- return res.status(500).json({ error: 'Internal server error occurred.' });
- }
+ });
};
export default unifiedLogStreamHandler;
diff --git a/src/pages/api/v1/builds/[uuid]/torndown.ts b/src/pages/api/v1/builds/[uuid]/torndown.ts
index d9991d7..4450dde 100644
--- a/src/pages/api/v1/builds/[uuid]/torndown.ts
+++ b/src/pages/api/v1/builds/[uuid]/torndown.ts
@@ -15,16 +15,12 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { Build } from 'server/models';
import { BuildStatus, DeployStatus } from 'shared/constants';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/torndown.ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/torndown:
@@ -101,51 +97,54 @@ const logger = rootLogger.child({
// eslint-disable-next-line import/no-anonymous-default-export
export default async (req: NextApiRequest, res: NextApiResponse) => {
if (req.method !== 'PATCH') {
- logger.info({ method: req.method }, `[${req.method}] Method not allowed`);
+ getLogger().debug(`Method not allowed: method=${req.method}`);
return res.status(405).json({ error: `${req.method} is not allowed` });
}
const uuid = req.query?.uuid;
- try {
- if (!uuid) {
- logger.info(`[${uuid}] The uuid is required`);
- return res.status(500).json({ error: 'The uuid is required' });
- }
- const buildService = new BuildService();
+ if (!uuid) {
+ getLogger().debug('The uuid is required');
+ return res.status(500).json({ error: 'The uuid is required' });
+ }
- const build: Build = await buildService.db.models.Build.query()
- .findOne({
- uuid,
- })
- .withGraphFetched('[deploys]');
+ return withLogContext({ buildUuid: uuid as string }, async () => {
+ try {
+ const buildService = new BuildService();
- if (build.isStatic || !build) {
- logger.info(`[${uuid}] The build doesn't exist or is static environment`);
- return res.status(404).json({ error: `The build doesn't exist or is static environment` });
- }
+ const build: Build = await buildService.db.models.Build.query()
+ .findOne({
+ uuid,
+ })
+ .withGraphFetched('[deploys]');
- const deploysIds = build.deploys.map((deploy) => deploy.id);
+ if (build.isStatic || !build) {
+ getLogger().debug('Build does not exist or is static environment');
+ return res.status(404).json({ error: `The build doesn't exist or is static environment` });
+ }
- await buildService.db.models.Build.query().findById(build.id).patch({
- status: BuildStatus.TORN_DOWN,
- statusMessage: 'Namespace was deleted successfully',
- });
+ const deploysIds = build.deploys.map((deploy) => deploy.id);
- await buildService.db.models.Deploy.query()
- .whereIn('id', deploysIds)
- .patch({ status: DeployStatus.TORN_DOWN, statusMessage: 'Namespace was deleted successfully' });
+ await buildService.db.models.Build.query().findById(build.id).patch({
+ status: BuildStatus.TORN_DOWN,
+ statusMessage: 'Namespace was deleted successfully',
+ });
- const updatedDeploys = await buildService.db.models.Deploy.query()
- .whereIn('id', deploysIds)
- .select('id', 'uuid', 'status');
+ await buildService.db.models.Deploy.query()
+ .whereIn('id', deploysIds)
+ .patch({ status: DeployStatus.TORN_DOWN, statusMessage: 'Namespace was deleted successfully' });
- return res.status(200).json({
- status: `The namespace env-${uuid} it was delete sucessfuly`,
- namespacesUpdated: updatedDeploys,
- });
- } catch (error) {
- logger.error({ error }, `[${uuid}] Error in cleanup API in`);
- return res.status(500).json({ error: 'An unexpected error occurred.' });
- }
+ const updatedDeploys = await buildService.db.models.Deploy.query()
+ .whereIn('id', deploysIds)
+ .select('id', 'uuid', 'status');
+
+ return res.status(200).json({
+ status: `The namespace env-${uuid} it was delete sucessfuly`,
+ namespacesUpdated: updatedDeploys,
+ });
+ } catch (error) {
+ getLogger().error({ error }, 'Build: teardown failed');
+ return res.status(500).json({ error: 'An unexpected error occurred.' });
+ }
+ });
};
diff --git a/src/pages/api/v1/builds/[uuid]/webhooks.ts b/src/pages/api/v1/builds/[uuid]/webhooks.ts
index 1f7c2e7..ea068e1 100644
--- a/src/pages/api/v1/builds/[uuid]/webhooks.ts
+++ b/src/pages/api/v1/builds/[uuid]/webhooks.ts
@@ -15,15 +15,12 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { nanoid } from 'nanoid';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import GithubService from 'server/services/github';
import { Build } from 'server/models';
import WebhookService from 'server/services/webhook';
-const logger = rootLogger.child({
- filename: 'builds/[uuid]/webhooks.ts',
-});
-
/**
* @openapi
* /api/v1/builds/{uuid}/webhooks:
@@ -198,57 +195,67 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(400).json({ error: 'Invalid UUID' });
}
- try {
- switch (req.method) {
- case 'GET':
- return retrieveWebhooks(req, res);
- case 'POST':
- return invokeWebhooks(req, res);
- default:
- res.setHeader('Allow', ['GET', 'POST']);
- return res.status(405).json({ error: `${req.method} is not allowed.` });
+ return withLogContext({ buildUuid: uuid }, async () => {
+ try {
+ switch (req.method) {
+ case 'GET':
+ return retrieveWebhooks(req, res);
+ case 'POST':
+ return invokeWebhooks(req, res);
+ default:
+ res.setHeader('Allow', ['GET', 'POST']);
+ return res.status(405).json({ error: `${req.method} is not allowed.` });
+ }
+ } catch (error) {
+ getLogger({ error }).error(`API: webhook request failed method=${req.method}`);
+ res.status(500).json({ error: 'An unexpected error occurred.' });
}
- } catch (error) {
- logger.error(`Error handling ${req.method} request for ${uuid}:`, error);
- res.status(500).json({ error: 'An unexpected error occurred.' });
- }
+ });
};
async function invokeWebhooks(req: NextApiRequest, res: NextApiResponse) {
const { uuid } = req.query;
- try {
- const githubService = new GithubService();
- const build: Build = await githubService.db.models.Build.query().findOne({
- uuid,
- });
+ const correlationId = `api-webhook-invoke-${Date.now()}-${nanoid(8)}`;
- const buildId = build.id;
+ return withLogContext({ correlationId }, async () => {
+ try {
+ const githubService = new GithubService();
+ const build: Build = await githubService.db.models.Build.query().findOne({
+ uuid,
+ });
- if (!build) {
- logger.info(`[API ${uuid}] Build not found`);
- return res.status(404).json({ error: `Build not found for ${uuid}` });
- }
+ const buildId = build.id;
+
+ if (!build) {
+ getLogger().debug('Build not found');
+ return res.status(404).json({ error: `Build not found for ${uuid}` });
+ }
- if (!build.webhooksYaml) {
- logger.info(`[API ${uuid}] No webhooks found for build`);
- return res.status(204).json({
- status: 'no_content',
- message: `No webhooks found for build ${uuid}.`,
+ if (!build.webhooksYaml) {
+ getLogger().debug('No webhooks found for build');
+ return res.status(204).json({
+ status: 'no_content',
+ message: `No webhooks found for build ${uuid}.`,
+ });
+ }
+
+ const webhookService = new WebhookService();
+ await webhookService.webhookQueue.add('webhook', {
+ buildId,
+ correlationId,
});
- }
- const webhookService = new WebhookService();
- await webhookService.webhookQueue.add('webhook', {
- buildId,
- });
- return res.status(200).json({
- status: 'success',
- message: `Webhook for build ${uuid} has been queued`,
- });
- } catch (error) {
- logger.error(`Unable to proceed with webook for build ${uuid}. Error: \n ${error}`);
- return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` });
- }
+ getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).info('Webhook invocation queued via API');
+
+ return res.status(200).json({
+ status: 'success',
+ message: `Webhook for build ${uuid} has been queued`,
+ });
+ } catch (error) {
+ getLogger({ stage: LogStage.WEBHOOK_PROCESSING, error }).error(`Webhook: invoke failed uuid=${uuid}`);
+ return res.status(500).json({ error: `Unable to proceed with triggering webhook for build ${uuid}.` });
+ }
+ });
}
async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) {
@@ -284,7 +291,7 @@ async function retrieveWebhooks(req: NextApiRequest, res: NextApiResponse) {
},
});
} catch (error) {
- logger.error(`Failed to retrieve webhooks for builds ${uuid}. Error: \n ${error}`);
+ getLogger({ error }).error('API: webhooks fetch failed');
return res.status(500).json({ error: `Unable to retrieve webhooks for build ${uuid}.` });
}
}
diff --git a/src/pages/api/v1/builds/index.ts b/src/pages/api/v1/builds/index.ts
index 671c6e8..3405df3 100644
--- a/src/pages/api/v1/builds/index.ts
+++ b/src/pages/api/v1/builds/index.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'api/v1/builds/index.ts',
-});
-
/**
* @openapi
* /api/v1/builds:
@@ -208,7 +204,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(response);
} catch (error) {
- logger.error('Error fetching builds:', error);
+ getLogger({ error }).error('API: builds fetch failed');
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/config/cache.ts b/src/pages/api/v1/config/cache.ts
index 2eef2b9..7dc5f53 100644
--- a/src/pages/api/v1/config/cache.ts
+++ b/src/pages/api/v1/config/cache.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import GlobalConfigService from 'server/services/globalConfig';
-const logger = rootLogger.child({
- filename: 'v1/config/cache.ts',
-});
-
/**
* @openapi
* /api/v1/config/cache:
@@ -111,7 +107,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(405).json({ error: `${req.method} is not allowed.` });
}
} catch (error) {
- logger.error(`Error occurred on config cache operation: \n ${error}`);
+ getLogger().error({ error }, 'Config: cache operation failed');
res.status(500).json({ error: 'An unexpected error occurred.' });
}
};
@@ -122,7 +118,7 @@ async function getCachedConfig(res: NextApiResponse, refresh: boolean = false) {
const configs = await configService.getAllConfigs(refresh);
return res.status(200).json({ configs });
} catch (error) {
- logger.error(`[API] Error occurred retrieving cache config: \n ${error}`);
+ getLogger().error({ error }, 'Config: cache retrieval failed');
return res.status(500).json({ error: `Unable to retrieve global config values` });
}
}
diff --git a/src/pages/api/v1/deploy-summary.ts b/src/pages/api/v1/deploy-summary.ts
index 06cc692..a0e47fc 100644
--- a/src/pages/api/v1/deploy-summary.ts
+++ b/src/pages/api/v1/deploy-summary.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'deploy-summary.ts',
-});
-
/**
* @openapi
* /api/v1/deploy-summary:
@@ -130,7 +126,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id');
if (!build) {
- logger.info(`Build with ID ${parsedBuildId} not found`);
+ getLogger().debug(`Build not found: buildId=${parsedBuildId}`);
return res.status(404).json({ error: 'Build not found' });
}
@@ -162,7 +158,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(result.rows);
} catch (error) {
- logger.error(`Error fetching deploy summary for build ${parsedBuildId}:`, error);
+ getLogger({ error }).error(`API: deploy summary fetch failed buildId=${parsedBuildId}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/deployables.ts b/src/pages/api/v1/deployables.ts
index dd4d5a0..afb0ce3 100644
--- a/src/pages/api/v1/deployables.ts
+++ b/src/pages/api/v1/deployables.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'deployables.ts',
-});
-
/**
* @openapi
* /api/v1/deployables:
@@ -132,7 +128,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id');
if (!build) {
- logger.info(`Build with ID ${parsedBuildId} not found`);
+ getLogger().debug(`Build not found: buildId=${parsedBuildId}`);
return res.status(404).json({ error: 'Build not found' });
}
@@ -163,7 +159,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(deployables);
} catch (error) {
- logger.error(`Error fetching deployables for build ${parsedBuildId}:`, error);
+ getLogger({ error }).error(`API: deployables fetch failed buildId=${parsedBuildId}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/deploys.ts b/src/pages/api/v1/deploys.ts
index e122f9c..a3e6e34 100644
--- a/src/pages/api/v1/deploys.ts
+++ b/src/pages/api/v1/deploys.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BuildService from 'server/services/build';
-const logger = rootLogger.child({
- filename: 'api/v1/deploys.ts',
-});
-
/**
* @openapi
* /api/v1/deploys:
@@ -70,10 +66,6 @@ const logger = rootLogger.child({
* type: string
* env:
* type: object
- * buildLogs:
- * type: string
- * containerLogs:
- * type: string
* serviceId:
* type: integer
* buildId:
@@ -108,8 +100,6 @@ const logger = rootLogger.child({
* type: string
* replicaCount:
* type: integer
- * yamlConfig:
- * type: object
* deployableId:
* type: integer
* isRunningLatest:
@@ -118,8 +108,6 @@ const logger = rootLogger.child({
* type: string
* deployPipelineId:
* type: string
- * buildOutput:
- * type: string
* buildJobName:
* type: string
* 400:
@@ -177,7 +165,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
const build = await buildService.db.models.Build.query().findById(parsedBuildId).select('id');
if (!build) {
- logger.info(`Build with ID ${parsedBuildId} not found`);
+ getLogger().debug(`Build not found: buildId=${parsedBuildId}`);
return res.status(404).json({ error: 'Build not found' });
}
@@ -198,8 +186,6 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
'internalHostname',
'publicUrl',
'env',
- 'buildLogs',
- 'containerLogs',
'serviceId',
'buildId',
'createdAt',
@@ -215,18 +201,16 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
'cname',
'runUUID',
'replicaCount',
- 'yamlConfig',
'deployableId',
'isRunningLatest',
'runningImage',
'deployPipelineId',
- 'buildOutput',
'buildJobName'
);
return res.status(200).json(deploys);
} catch (error) {
- logger.error(`Error fetching deploys for build ${parsedBuildId}:`, error);
+ getLogger({ error }).error(`API: deploys fetch failed buildId=${parsedBuildId}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/pull-requests/[id]/builds.ts b/src/pages/api/v1/pull-requests/[id]/builds.ts
index 222e521..b1827bc 100644
--- a/src/pages/api/v1/pull-requests/[id]/builds.ts
+++ b/src/pages/api/v1/pull-requests/[id]/builds.ts
@@ -15,14 +15,10 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BuildService from 'server/services/build';
import PullRequestService from 'server/services/pullRequest';
-const logger = rootLogger.child({
- filename: 'pull-requests/[id]/builds.ts',
-});
-
/**
* @openapi
* /api/v1/pull-requests/{id}/builds:
@@ -133,7 +129,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
const pullRequest = await pullRequestService.db.models.PullRequest.query().findById(parsedId).select('id');
if (!pullRequest) {
- logger.info(`Pull request with ID ${parsedId} not found`);
+ getLogger().debug(`Pull request not found: id=${parsedId}`);
return res.status(404).json({ error: 'Pull request not found' });
}
@@ -159,7 +155,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(builds);
} catch (error) {
- logger.error(`Error fetching builds for pull request ${parsedId}:`, error);
+ getLogger().error({ error }, `API: builds fetch failed pullRequestId=${parsedId}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/pull-requests/[id]/index.ts b/src/pages/api/v1/pull-requests/[id]/index.ts
index e301801..caaf7a1 100644
--- a/src/pages/api/v1/pull-requests/[id]/index.ts
+++ b/src/pages/api/v1/pull-requests/[id]/index.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import PullRequestService from 'server/services/pullRequest';
-const logger = rootLogger.child({
- filename: 'api/v1/pull-requests/[id].ts',
-});
-
/**
* @openapi
* /api/v1/pull-requests/{id}:
@@ -142,13 +138,13 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
);
if (!pullRequest) {
- logger.info(`Pull request with ID ${parsedId} not found`);
+ getLogger().debug(`Pull request not found: id=${parsedId}`);
return res.status(404).json({ error: 'Pull request not found' });
}
return res.status(200).json(pullRequest);
} catch (error) {
- logger.error(`Error fetching pull request ${parsedId}:`, error);
+ getLogger().error({ error }, `API: pull request fetch failed id=${parsedId}`);
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/pull-requests/index.ts b/src/pages/api/v1/pull-requests/index.ts
index 908cf06..afb6f77 100644
--- a/src/pages/api/v1/pull-requests/index.ts
+++ b/src/pages/api/v1/pull-requests/index.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import PullRequestService from 'server/services/pullRequest';
-const logger = rootLogger.child({
- filename: 'api/v1/pull-requests/index.ts',
-});
-
/**
* @openapi
* /api/v1/pull-requests:
@@ -275,7 +271,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(response);
} catch (error) {
- logger.error('Error fetching pull requests:', error);
+ getLogger().error({ error }, 'API: pull requests fetch failed');
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/repos/index.ts b/src/pages/api/v1/repos/index.ts
index 55d0b79..79de907 100644
--- a/src/pages/api/v1/repos/index.ts
+++ b/src/pages/api/v1/repos/index.ts
@@ -15,13 +15,9 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import PullRequestService from 'server/services/pullRequest';
-const logger = rootLogger.child({
- filename: 'api/v1/repos/index.ts',
-});
-
/**
* @openapi
* /api/v1/repos:
@@ -184,7 +180,7 @@ export default async (req: NextApiRequest, res: NextApiResponse) => {
return res.status(200).json(response);
} catch (error) {
- logger.error('Error fetching repos:', error);
+ getLogger().error({ error }, 'API: repos fetch failed');
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/v1/schema/validate.ts b/src/pages/api/v1/schema/validate.ts
index 29d08af..27b4ed3 100644
--- a/src/pages/api/v1/schema/validate.ts
+++ b/src/pages/api/v1/schema/validate.ts
@@ -102,15 +102,11 @@ type ErrorResponse = {
type Response = ValidationResponse | ErrorResponse;
import { NextApiRequest, NextApiResponse } from 'next/types';
-import { getYamlFileContentFromBranch } from 'server/lib/github';
-import rootLogger from 'server/lib/logger';
+import { getYamlFileContentFromBranch, ConfigFileNotFound } from 'server/lib/github';
+import { getLogger } from 'server/lib/logger';
import { YamlConfigParser, ParsingError } from 'server/lib/yamlConfigParser';
import { YamlConfigValidator, ValidationError } from 'server/lib/yamlConfigValidator';
-const logger = rootLogger.child({
- filename: 'v1/schema/validate',
-});
-
const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse) => {
if (req.method !== 'POST') {
return res.status(405).json({ error: `${req.method} is not allowed` });
@@ -134,7 +130,10 @@ const schemaValidateHandler = async (req: NextApiRequest, res: NextApiResponse {
return res.status(200).json(response);
} catch (error) {
- logger.error('Error fetching users:', error);
+ getLogger().error({ error }, 'API: users fetch failed');
return res.status(500).json({ error: 'An unexpected error occurred' });
}
};
diff --git a/src/pages/api/webhooks/github.ts b/src/pages/api/webhooks/github.ts
index 9bfe94e..2d43293 100644
--- a/src/pages/api/webhooks/github.ts
+++ b/src/pages/api/webhooks/github.ts
@@ -15,32 +15,56 @@
*/
import { NextApiRequest, NextApiResponse } from 'next/types';
-import rootLogger from 'server/lib/logger';
+import tracer from 'dd-trace';
import * as github from 'server/lib/github';
import { LIFECYCLE_MODE } from 'shared/index';
import { stringify } from 'flatted';
import BootstrapJobs from 'server/jobs/index';
import createAndBindServices from 'server/services';
-
-const logger = rootLogger.child({
- filename: 'webhooks/github.ts',
-});
+import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger';
const services = createAndBindServices();
/* Only want to listen on web nodes, otherwise no-op for safety */
// eslint-disable-next-line import/no-anonymous-default-export
export default async (req: NextApiRequest, res: NextApiResponse) => {
- const isVerified = github.verifyWebhookSignature(req);
- if (!isVerified) throw new Error('Webhook not verified');
- if (!['web', 'all'].includes(LIFECYCLE_MODE)) return;
- try {
- if (LIFECYCLE_MODE === 'all') BootstrapJobs(services);
- const message = stringify({ ...req, ...{ headers: req.headers } });
- await services.GithubService.webhookQueue.add('webhook', { message });
- res.status(200).end();
- } catch (error) {
- logger.child({ error }).error(`Github Webhook failure: Error: ${error}`);
- res.status(500).end();
- }
+ const correlationId = (req.headers['x-github-delivery'] as string) || `webhook-${Date.now()}`;
+ const sender = req.body?.sender?.login;
+
+ return withLogContext({ correlationId, sender }, async () => {
+ const isVerified = github.verifyWebhookSignature(req);
+ if (!isVerified) throw new Error('Webhook not verified');
+
+ const event = req.headers['x-github-event'] as string;
+
+ const isBot = sender?.includes('[bot]') === true;
+ if (event === 'issue_comment' && isBot) {
+ tracer.scope().active()?.setTag('manual.drop', true);
+ res.status(200).end();
+ return;
+ }
+
+ getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).info(`Webhook: received event=${event}`);
+
+ if (!['web', 'all'].includes(LIFECYCLE_MODE)) {
+ getLogger({ stage: LogStage.WEBHOOK_SKIPPED }).info('Webhook: skipped reason=wrongMode');
+ return;
+ }
+
+ try {
+ if (LIFECYCLE_MODE === 'all') BootstrapJobs(services);
+ const message = stringify({ ...req, ...{ headers: req.headers } });
+
+ await services.GithubService.webhookQueue.add('webhook', {
+ message,
+ ...extractContextForQueue(),
+ });
+
+ getLogger({ stage: LogStage.WEBHOOK_QUEUED }).info('Webhook: queued');
+ res.status(200).end();
+ } catch (error) {
+ getLogger({ stage: LogStage.WEBHOOK_RECEIVED }).error({ error }, 'Webhook: processing failed');
+ res.status(500).end();
+ }
+ });
};
diff --git a/src/server/database.ts b/src/server/database.ts
index 0024f71..064ed50 100644
--- a/src/server/database.ts
+++ b/src/server/database.ts
@@ -21,11 +21,7 @@ import { IServices } from 'server/services/types';
import Model from 'server/models/_Model';
import knexfile from '../../knexfile';
-import rootLogger from 'server/lib/logger';
-
-const initialLogger = rootLogger.child({
- filename: 'server/database.ts',
-});
+import { getLogger } from 'server/lib/logger';
export default class Database {
models: models.IModels;
@@ -52,8 +48,8 @@ export default class Database {
this.knexConfig = merge({}, knexfile, knexConfig);
}
- setLifecycleConfig(config: any = {}, logger = initialLogger) {
- logger.debug('setLifecycleConfig: setting config', { config });
+ setLifecycleConfig(config: any = {}) {
+ getLogger().debug('Database: setting lifecycle config');
this.config = merge({}, this.config, config);
}
diff --git a/src/server/jobs/index.ts b/src/server/jobs/index.ts
index 1089768..a4c9dfa 100644
--- a/src/server/jobs/index.ts
+++ b/src/server/jobs/index.ts
@@ -15,7 +15,7 @@
*/
import { IServices } from 'server/services/types';
-import rootLogger from '../lib/logger';
+import { getLogger } from 'server/lib/logger';
import { defaultDb, redisClient } from 'server/lib/dependencies';
import RedisClient from 'server/lib/redisClient';
import QueueManager from 'server/lib/queueManager';
@@ -23,16 +23,12 @@ import { MAX_GITHUB_API_REQUEST, GITHUB_API_REQUEST_INTERVAL, QUEUE_NAMES } from
let isBootstrapped = false;
-const logger = rootLogger.child({
- filename: 'jobs/index.ts',
-});
-
export default function bootstrapJobs(services: IServices) {
if (defaultDb.services) {
return;
}
- logger.info(`Bootstrapping jobs...... Yes`);
+ getLogger().info('Jobs: bootstrapping');
const queueManager = QueueManager.getInstance();
queueManager.registerWorker(QUEUE_NAMES.WEBHOOK_PROCESSING, services.GithubService.processWebhooks, {
@@ -49,22 +45,6 @@ export default function bootstrapJobs(services: IServices) {
},
});
- /* Run once per hour */
- services.PullRequest.cleanupClosedPRQueue.add(
- 'cleanup',
- {},
- {
- repeat: {
- every: 60000 * 60, // Once an hour
- },
- }
- );
-
- queueManager.registerWorker(QUEUE_NAMES.CLEANUP, services.PullRequest.processCleanupClosedPRs, {
- connection: redisClient.getConnection(),
- concurrency: 1,
- });
-
services.GlobalConfig.setupCacheRefreshJob();
queueManager.registerWorker(QUEUE_NAMES.GLOBAL_CONFIG_CACHE_REFRESH, services.GlobalConfig.processCacheRefresh, {
@@ -80,8 +60,6 @@ export default function bootstrapJobs(services: IServices) {
concurrency: 1,
});
- services.PullRequest.cleanupClosedPRQueue.add('cleanup', {}, {});
-
queueManager.registerWorker(QUEUE_NAMES.INGRESS_MANIFEST, services.Ingress.createOrUpdateIngressForBuild, {
connection: redisClient.getConnection(),
concurrency: 1,
@@ -133,7 +111,7 @@ export default function bootstrapJobs(services: IServices) {
// This function is used to handle graceful shutdowns add things as needed.
const handleExit = async (signal: string) => {
- logger.info(` ✍️Shutting down (${signal})`);
+ getLogger().info(`Jobs: shutting down signal=${signal}`);
try {
const redisClient = RedisClient.getInstance();
const queueManager = QueueManager.getInstance();
@@ -141,15 +119,15 @@ export default function bootstrapJobs(services: IServices) {
await redisClient.close();
process.exit(0);
} catch (error) {
- logger.info(`Unable to shutdown gracefully: ${error}`);
+ getLogger().error({ error }, 'Jobs: shutdown failed');
process.exit(0);
}
};
process.on('SIGINT', () => handleExit('SIGINT'));
process.on('SIGTERM', () => handleExit('SIGTERM'));
- logger.info(' ✍️Signal handlers registered');
+ getLogger().info('Jobs: signal handlers registered');
}
}
- logger.info('Bootstrapping complete');
+ getLogger().info('Jobs: bootstrap complete');
}
diff --git a/src/server/lib/__tests__/kubernetes.test.ts b/src/server/lib/__tests__/kubernetes.test.ts
index 47d42c4..e283f86 100644
--- a/src/server/lib/__tests__/kubernetes.test.ts
+++ b/src/server/lib/__tests__/kubernetes.test.ts
@@ -19,15 +19,12 @@ import * as k8s from '../kubernetes';
// Mock the logger to avoid console output during tests
jest.mock('../logger', () => ({
- __esModule: true,
- default: {
- child: () => ({
- info: jest.fn(),
- debug: jest.fn(),
- warn: jest.fn(),
- error: jest.fn(),
- }),
- },
+ getLogger: jest.fn(() => ({
+ info: jest.fn(),
+ debug: jest.fn(),
+ warn: jest.fn(),
+ error: jest.fn(),
+ })),
}));
describe('Kubernetes Node Placement', () => {
diff --git a/src/server/lib/__tests__/utils.test.ts b/src/server/lib/__tests__/utils.test.ts
index 2abb59f..bcc1329 100644
--- a/src/server/lib/__tests__/utils.test.ts
+++ b/src/server/lib/__tests__/utils.test.ts
@@ -54,8 +54,15 @@ jest.mock('server/services/globalConfig', () => {
};
});
-jest.mock('server/lib/logger');
-import logger from 'server/lib/logger';
+jest.mock('server/lib/logger', () => ({
+ getLogger: jest.fn().mockReturnValue({
+ info: jest.fn(),
+ warn: jest.fn(),
+ error: jest.fn(),
+ debug: jest.fn(),
+ }),
+}));
+import { getLogger } from 'server/lib/logger';
describe('exec', () => {
test('exec success', async () => {
@@ -68,8 +75,8 @@ describe('exec', () => {
test('exec failure', async () => {
const execCmd = jest.fn().mockRejectedValue(new Error('error'));
- await exec('cmd', ['arg1', 'arg2'], { logger, execCmd });
- expect(logger.error).toHaveBeenCalledWith('exec: error executing {}');
+ await exec('cmd', ['arg1', 'arg2'], { execCmd });
+ expect(getLogger().error).toHaveBeenCalledWith({ error: new Error('error') }, 'Exec: command failed runner=cmd');
});
test('exec no stdout', async () => {
diff --git a/src/server/lib/auth.ts b/src/server/lib/auth.ts
index 854d6ce..f8a98c5 100644
--- a/src/server/lib/auth.ts
+++ b/src/server/lib/auth.ts
@@ -52,7 +52,7 @@ export async function verifyAuth(request: NextRequest): Promise {
const jwksUrl = process.env.KEYCLOAK_JWKS_URL;
if (!issuer || !audience || !jwksUrl) {
- console.error('Missing required Keycloak environment variables');
+ console.error('Auth: missing Keycloak environment variables');
return {
success: false,
error: { message: 'Server configuration error', status: 500 },
@@ -73,7 +73,7 @@ export async function verifyAuth(request: NextRequest): Promise {
return { success: true, payload };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
- console.error('JWT Verification Error:', errorMessage);
+ console.error('Auth: JWT verification failed', error);
// 6. If any part of the verification fails, return an error.
return {
diff --git a/src/server/lib/buildEnvVariables.ts b/src/server/lib/buildEnvVariables.ts
index 79e69d2..973a852 100644
--- a/src/server/lib/buildEnvVariables.ts
+++ b/src/server/lib/buildEnvVariables.ts
@@ -17,15 +17,10 @@
import { EnvironmentVariables } from 'server/lib/envVariables';
import { Build, Deploy } from 'server/models';
import { DeployTypes, FeatureFlags } from 'shared/constants';
-import rootLogger from 'server/lib/logger';
-import { LifecycleError } from './errors';
+import { getLogger } from 'server/lib/logger';
import { ValidationError } from './yamlConfigValidator';
import * as YamlService from 'server/models/yaml';
-const logger = rootLogger.child({
- filename: 'lib/buildEnvVariables.ts',
-});
-
export class BuildEnvironmentVariables extends EnvironmentVariables {
/**
* Retrieve Environment variables. Use lifecycle yaml file while exists; otherwise, falling back to the old LC services table env column.
@@ -64,8 +59,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables {
error.uuid = deploy.uuid;
throw error;
} else {
- logger.warn(error instanceof LifecycleError ? error.getMessage() : `${error}`);
- logger.warn(`[${deploy.uuid}]: Failback using database Environment Variables`);
+ getLogger().warn({ error }, 'EnvVars: fallback to database');
}
}
}
@@ -112,8 +106,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables {
error.uuid = deploy.uuid;
throw error;
} else {
- logger.warn(error instanceof LifecycleError ? error.getMessage() : `${error}`);
- logger.warn(`[${deploy.uuid}]: Failback using database Init Environment Variables`);
+ getLogger().warn({ error }, 'EnvVars: init fallback to database');
}
}
}
@@ -132,12 +125,16 @@ export class BuildEnvironmentVariables extends EnvironmentVariables {
* 2. Interpolate env from deploy parent service (via db or yaml definition for specific branch)
* 3. Save to deploy
* @param build Build model from associated PR
+ * @param githubRepositoryId Optional filter to only resolve env for deploys from a specific repo
* @returns Map of env variables
*/
- public async resolve(build: Build): Promise> {
+ public async resolve(build: Build, githubRepositoryId?: number): Promise> {
if (build != null) {
await build?.$fetchGraph('[services, deploys.[service.[repository], deployable]]');
- const deploys = build?.deploys;
+ const allDeploys = build?.deploys;
+ const deploys = githubRepositoryId
+ ? allDeploys.filter((d) => d.githubRepositoryId === githubRepositoryId)
+ : allDeploys;
const availableEnv = this.cleanup(await this.availableEnvironmentVariablesForBuild(build));
const useDeafulttUUID =
@@ -158,7 +155,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables {
),
})
.catch((error) => {
- logger.error(`[DEPLOY ${deploy.uuid}] Problem when preparing env variable: ${error}`);
+ getLogger().error({ error }, 'EnvVars: preparation failed');
});
if (deploy.deployable?.initDockerfilePath || deploy.service?.initDockerfilePath) {
@@ -177,7 +174,7 @@ export class BuildEnvironmentVariables extends EnvironmentVariables {
),
})
.catch((error) => {
- logger.error(`[DEPLOY ${deploy.uuid}] Problem when preparing init env variable: ${error}`);
+ getLogger().error({ error }, 'EnvVars: init preparation failed');
});
}
});
diff --git a/src/server/lib/cli.ts b/src/server/lib/cli.ts
index 25ebafd..6ea2e4e 100644
--- a/src/server/lib/cli.ts
+++ b/src/server/lib/cli.ts
@@ -18,14 +18,10 @@ import { merge } from 'lodash';
import { Build, Deploy, Service, Deployable } from 'server/models';
import { CLIDeployTypes, DeployTypes } from 'shared/constants';
import { shellPromise } from './shell';
-import rootLogger from './logger';
+import { getLogger, withLogContext, updateLogContext } from './logger';
import GlobalConfigService from 'server/services/globalConfig';
import { DatabaseSettings } from 'server/services/types/globalConfig';
-const logger = rootLogger.child({
- filename: 'lib/cli.ts',
-});
-
/**
* Deploys the build
* @param build the build to deploy
@@ -40,7 +36,10 @@ export async function deployBuild(build: Build) {
return CLIDeployTypes.has(serviceType);
})
.map(async (deploy) => {
- return await cliDeploy(deploy);
+ return withLogContext(
+ { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name },
+ async () => cliDeploy(deploy)
+ );
})
);
}
@@ -65,7 +64,9 @@ export async function cliDeploy(deploy: Deploy) {
* @param deploy the deploy to run
*/
export async function codefreshDeploy(deploy: Deploy, build: Build, service: Service, deployable: Deployable) {
- logger.debug(`Invoking the codefresh CLI to deploy this deploy`);
+ const buildUuid = build?.uuid;
+ updateLogContext({ buildUuid });
+ getLogger().debug('Invoking the codefresh CLI to deploy this deploy');
const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv);
@@ -88,9 +89,9 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser
const command = `codefresh run ${serviceDeployPipelineId} -b "${deploy.branchName}" ${variables.join(
' '
)} ${deployTrigger} -d`;
- logger.debug(`About to run codefresh command: ${command}`);
+ getLogger().debug(`About to run codefresh command: command=${command}`);
const output = await shellPromise(command);
- logger.debug(`codefresh run output: ${output}`);
+ getLogger().debug(`Codefresh run output: output=${output}`);
const id = output.trim();
return id;
}
@@ -100,7 +101,9 @@ export async function codefreshDeploy(deploy: Deploy, build: Build, service: Ser
* @param deploy the deploy to run
*/
export async function codefreshDestroy(deploy: Deploy) {
- logger.debug(`Invoking the codefresh CLI to delete this deploy`);
+ const buildUuid = deploy?.build?.uuid;
+ updateLogContext({ buildUuid });
+ getLogger().debug('Invoking the codefresh CLI to delete this deploy');
try {
/** Reset the SHA so we will re-run the pipelines post destroy */
@@ -111,7 +114,7 @@ export async function codefreshDestroy(deploy: Deploy) {
/* Always pass in a BUILD UUID & BUILD SHA as those are critical keys */
const envVariables = merge(
{
- BUILD_UUID: deploy?.build?.uuid,
+ BUILD_UUID: buildUuid,
BUILD_SHA: deploy?.build?.sha,
},
deploy.env || {},
@@ -140,14 +143,12 @@ export async function codefreshDestroy(deploy: Deploy) {
const command = `codefresh run ${destroyPipelineId} -b "${serviceBranchName}" ${variables.join(
' '
)} ${destroyTrigger} -d`;
- logger.debug('Destroy Command: %s', command);
+ getLogger().debug(`Destroy command: command=${command}`);
const output = await shellPromise(command);
const id = output?.trim();
return id;
} catch (error) {
- logger
- .child({ error })
- .error(`[BUILD ${deploy?.build?.uuid}][cli][codefreshDestroy] Error destroying Codefresh pipeline`);
+ getLogger({ error }).error('Codefresh: pipeline destroy failed');
throw error;
}
}
@@ -172,6 +173,8 @@ export async function waitForCodefresh(id: string) {
* @param build the build to delete CLI services from
*/
export async function deleteBuild(build: Build) {
+ const buildUuid = build?.uuid;
+ updateLogContext({ buildUuid });
try {
const buildId = build?.id;
@@ -187,14 +190,19 @@ export async function deleteBuild(build: Build) {
return CLIDeployTypes.has(serviceType) && d.active;
})
.map(async (deploy) => {
- const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type;
- logger.info(`[DELETE ${deploy?.uuid}] Deleting CLI deploy`);
- return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy);
+ return withLogContext(
+ { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name },
+ async () => {
+ const serviceType: DeployTypes = build.enableFullYaml ? deploy.deployable.type : deploy.service.type;
+ getLogger().info('CLI: deleting');
+ return serviceType === DeployTypes.CODEFRESH ? codefreshDestroy(deploy) : deleteDeploy(deploy);
+ }
+ );
})
);
- logger.info(`[DELETE ${build.uuid}] Deleted CLI resources`);
+ getLogger().info('CLI: deleted');
} catch (e) {
- logger.error(`[DELETE ${build.uuid}] Error deleting CLI resources: ${e}`);
+ getLogger({ error: e }).error('CLI: delete failed');
}
}
diff --git a/src/server/lib/codefresh/index.ts b/src/server/lib/codefresh/index.ts
index ef35403..ec0ff5e 100644
--- a/src/server/lib/codefresh/index.ts
+++ b/src/server/lib/codefresh/index.ts
@@ -15,7 +15,7 @@
*/
import { shellPromise } from 'server/lib/shell';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { generateCodefreshCmd, constructEcrTag, getCodefreshPipelineIdFromOutput } from 'server/lib/codefresh/utils';
import { waitUntil } from 'server/lib/utils';
import { ContainerBuildOptions } from 'server/lib/codefresh/types';
@@ -23,23 +23,17 @@ import { Metrics } from 'server/lib/metrics';
import { ENVIRONMENT } from 'shared/config';
import GlobalConfigService from 'server/services/globalConfig';
-const logger = rootLogger.child({
- filename: 'lib/codefresh/codefresh.ts',
-});
-
export const tagExists = async ({ tag, ecrRepo = 'lifecycle-deployments', uuid = '' }) => {
const { lifecycleDefaults } = await GlobalConfigService.getInstance().getAllConfigs();
const repoName = ecrRepo;
- // fetch the ecr registry id from ecrDomain value `acctid.dkr.ecr.us-west-2.amazonaws.com`. this is useful if registry is in a different account
- // if its in the same account as lifecycle app, still passed for clarity here
const registryId = (lifecycleDefaults.ecrDomain?.split?.('.') || [])[0] || '';
try {
const command = `aws ecr describe-images --repository-name=${repoName} --image-ids=imageTag=${tag} --no-paginate --no-cli-auto-prompt --registry-id ${registryId}`;
await shellPromise(command);
- logger.info(`[BUILD ${uuid}] Image with tag:${tag} exists in ecr repo ${repoName}`);
+ getLogger().info(`ECR: exists tag=${tag} repo=${repoName}`);
return true;
} catch (error) {
- logger.info(`[BUILD ${uuid}] Image with tag:${tag} does not exist in ecr repo ${repoName}`);
+ getLogger().debug(`ECR: tag=${tag} not found in ${repoName}`);
return false;
}
};
@@ -47,7 +41,6 @@ export const tagExists = async ({ tag, ecrRepo = 'lifecycle-deployments', uuid =
export const buildImage = async (options: ContainerBuildOptions) => {
const { repo: repositoryName, branch, uuid, revision: sha, tag } = options;
const metrics = new Metrics('build.codefresh.image', { uuid, repositoryName, branch, sha });
- const prefix = uuid ? `[DEPLOY ${uuid}][buildImage]:` : '[DEPLOY][buildImage]:';
const suffix = `${repositoryName}/${branch}:${sha}`;
const eventDetails = {
title: 'Codefresh Build Image',
@@ -62,7 +55,7 @@ export const buildImage = async (options: ContainerBuildOptions) => {
metrics
.increment('total', { error: 'error_with_cli_output', result: 'error', codefreshBuildId: '' })
.event(eventDetails.title, eventDetails.description);
- logger.child({ output }).error(`${prefix}[noCodefreshBuildOutput] no output from Codefresh for ${suffix}`);
+ getLogger().error({ output }, `Codefresh: build output missing suffix=${suffix}`);
if (!hasOutput) throw Error('no output from Codefresh');
}
const codefreshBuildId = getCodefreshPipelineIdFromOutput(output);
@@ -77,7 +70,7 @@ export const buildImage = async (options: ContainerBuildOptions) => {
.event(eventDetails.title, eventDetails.description);
return codefreshBuildId;
} catch (error) {
- logger.child({ error }).error(`${prefix} failed for ${suffix}`);
+ getLogger().error({ error }, `Codefresh: build failed suffix=${suffix}`);
throw error;
}
};
@@ -98,6 +91,7 @@ export const waitForImage = async (id: string, { timeoutMs = 180000, intervalMs
const checkStatus = checkPipelineStatus(id);
return await waitUntil(checkStatus, { timeoutMs, intervalMs });
} catch (error) {
+ getLogger().error({ error }, `Codefresh: waitForImage failed pipelineId=${id}`);
return false;
}
};
@@ -147,6 +141,7 @@ export const getLogs = async (id: string) => {
const output = await shellPromise(command);
return output;
} catch (error) {
- return error;
+ getLogger().error({ error }, `Codefresh: getLogs failed pipelineId=${id}`);
+ return '';
}
};
diff --git a/src/server/lib/codefresh/utils/index.ts b/src/server/lib/codefresh/utils/index.ts
index d00e2ec..5d771b3 100644
--- a/src/server/lib/codefresh/utils/index.ts
+++ b/src/server/lib/codefresh/utils/index.ts
@@ -17,6 +17,7 @@
import { generateYaml } from 'server/lib/codefresh/utils/generateYaml';
import { generateCodefreshCmd } from 'server/lib/codefresh/utils/generateCodefreshCmd';
import { CF, CF_CHECKOUT_STEP, CF_BUILD_STEP, CF_AFTER_BUILD_STEP } from 'server/lib/codefresh/constants';
+import { updateLogContext } from 'server/lib/logger';
export const constructBuildArgs = (envVars = {}) => {
const envVarsItems = Object.keys(envVars);
@@ -88,6 +89,7 @@ export const getCodefreshPipelineIdFromOutput = (output: string) => {
for (const line of lines) {
const trimmedLine = line.trim();
if (regex.test(trimmedLine)) {
+ updateLogContext({ pipelineId: trimmedLine });
return trimmedLine;
}
}
diff --git a/src/server/lib/comment.ts b/src/server/lib/comment.ts
index 5c09626..24f15c1 100644
--- a/src/server/lib/comment.ts
+++ b/src/server/lib/comment.ts
@@ -14,14 +14,10 @@
* limitations under the License.
*/
-import rootLogger from './logger';
+import { getLogger } from './logger';
import { CommentParser } from 'shared/constants';
import { compact, flatten, set } from 'lodash';
-const logger = rootLogger.child({
- filename: 'lib/comment.ts',
-});
-
export class CommentHelper {
public static parseServiceBranches(comment: string): Array<{
active: boolean;
@@ -75,7 +71,7 @@ export class CommentHelper {
});
const obj = {};
envLines.forEach((line) => {
- logger.debug('Parsing line: %s', line);
+ getLogger().debug(`Parsing environment override line=${line}`);
const match = line.match(/ENV:([^:]*):(.*)/m);
const key = match[1];
const value = match[2];
diff --git a/src/server/lib/configFileWebhookEnvVariables.ts b/src/server/lib/configFileWebhookEnvVariables.ts
index 7c6eff2..2afea52 100644
--- a/src/server/lib/configFileWebhookEnvVariables.ts
+++ b/src/server/lib/configFileWebhookEnvVariables.ts
@@ -16,14 +16,10 @@
import { EnvironmentVariables } from 'server/lib/envVariables';
import { Build } from 'server/models';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { Webhook } from 'server/models/yaml';
import { FeatureFlags } from 'shared/constants';
-const logger = rootLogger.child({
- filename: 'lib/configFileWebhookEnvVariables.ts',
-});
-
export class ConfigFileWebhookEnvironmentVariables extends EnvironmentVariables {
/**
* Use lifecycle yaml file while exists; otherwise, falling back to the old LC services table env column.
@@ -58,7 +54,7 @@ export class ConfigFileWebhookEnvironmentVariables extends EnvironmentVariables
await build?.$fetchGraph('[services, deploys.service.repository]');
} else {
- logger.fatal("Build and Webhook shouldn't be undefined.");
+ getLogger().fatal('Webhook: build and webhook undefined');
}
return result;
diff --git a/src/server/lib/deploymentManager/deploymentManager.ts b/src/server/lib/deploymentManager/deploymentManager.ts
index d5b0956..0320930 100644
--- a/src/server/lib/deploymentManager/deploymentManager.ts
+++ b/src/server/lib/deploymentManager/deploymentManager.ts
@@ -20,11 +20,10 @@ import { DeployStatus, DeployTypes, CLIDeployTypes } from 'shared/constants';
import { createKubernetesApplyJob, monitorKubernetesJob } from '../kubernetesApply/applyManifest';
import { nanoid, customAlphabet } from 'nanoid';
import DeployService from 'server/services/deploy';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount';
import { waitForDeployPodReady } from '../kubernetes';
-const logger = rootLogger.child({ filename: 'lib/deploymentManager/deploymentManager.ts' });
const generateJobId = customAlphabet('abcdefghijklmnopqrstuvwxyz0123456789', 6);
export class DeploymentManager {
@@ -78,7 +77,6 @@ export class DeploymentManager {
level++;
}
- // Log final deployment order in a single line
const orderSummary = Array.from({ length: this.deploymentLevels.size }, (_, i) => {
const services =
this.deploymentLevels
@@ -88,7 +86,7 @@ export class DeploymentManager {
return `L${i}=[${services}]`;
}).join(' ');
- logger.info(`DeploymentManager: Deployment order calculated levels=${this.deploymentLevels.size} ${orderSummary}`);
+ getLogger().info(`Deploy: ${this.deploymentLevels.size} levels ${orderSummary}`);
}
private removeInvalidDependencies(): void {
@@ -102,8 +100,6 @@ export class DeploymentManager {
}
public async deploy(): Promise {
- const buildUuid = this.deploys.values().next().value?.build?.uuid || 'unknown';
-
for (const value of this.deploys.values()) {
await value.$query().patch({ status: DeployStatus.QUEUED });
}
@@ -116,9 +112,7 @@ export class DeploymentManager {
const helmServices = helmDeploys.map((d) => d.deployable.name).join(',');
const k8sServices = githubDeploys.map((d) => d.deployable.name).join(',');
- logger.info(
- `DeploymentManager: Deploying level=${level} buildUuid=${buildUuid} helm=[${helmServices}] k8s=[${k8sServices}]`
- );
+ getLogger().info(`Deploy: level ${level} helm=[${helmServices}] k8s=[${k8sServices}]`);
await Promise.all([
helmDeploys.length > 0 ? deployHelm(helmDeploys) : Promise.resolve(),
@@ -140,76 +134,78 @@ export class DeploymentManager {
}
private async deployManifests(deploy: Deploy): Promise {
- const jobId = generateJobId();
- const deployService = new DeployService();
- const runUUID = deploy.runUUID || nanoid();
-
- try {
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.DEPLOYING,
- statusMessage: 'Creating Kubernetes apply job',
- },
- runUUID
- );
+ return withLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }, async () => {
+ const jobId = generateJobId();
+ const deployService = new DeployService();
+ const runUUID = deploy.runUUID || nanoid();
- await deploy.$fetchGraph('[build, deployable, service]');
+ try {
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.DEPLOYING,
+ statusMessage: 'Creating Kubernetes apply job',
+ },
+ runUUID
+ );
- if (!deploy.manifest) {
- throw new Error(`Deploy ${deploy.uuid} has no manifest. Ensure manifests are generated before deployment.`);
- }
+ await deploy.$fetchGraph('[build, deployable, service]');
- await ensureServiceAccountForJob(deploy.build.namespace, 'deploy');
+ if (!deploy.manifest) {
+ throw new Error(`Deploy ${deploy.uuid} has no manifest. Ensure manifests are generated before deployment.`);
+ }
- await createKubernetesApplyJob({
- deploy,
- namespace: deploy.build.namespace,
- jobId,
- });
+ await ensureServiceAccountForJob(deploy.build.namespace, 'deploy');
- const shortSha = deploy.sha?.substring(0, 7) || 'unknown';
- const jobName = `${deploy.uuid}-deploy-${jobId}-${shortSha}`;
- const result = await monitorKubernetesJob(jobName, deploy.build.namespace);
+ await createKubernetesApplyJob({
+ deploy,
+ namespace: deploy.build.namespace,
+ jobId,
+ });
- if (!result.success) {
- throw new Error(result.message);
- }
- // Wait for the actual application pods to be ready
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.DEPLOYING,
- statusMessage: 'Waiting for pods to be ready',
- },
- runUUID
- );
+ const shortSha = deploy.sha?.substring(0, 7) || 'unknown';
+ const jobName = `${deploy.uuid}-deploy-${jobId}-${shortSha}`;
+ const result = await monitorKubernetesJob(jobName, deploy.build.namespace);
+
+ if (!result.success) {
+ throw new Error(result.message);
+ }
- const cliDeploy = CLIDeployTypes.has(deploy.deployable.type);
- const isReady = cliDeploy ? true : await waitForDeployPodReady(deploy);
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.DEPLOYING,
+ statusMessage: 'Waiting for pods to be ready',
+ },
+ runUUID
+ );
- if (isReady) {
+ const cliDeploy = CLIDeployTypes.has(deploy.deployable.type);
+ const isReady = cliDeploy ? true : await waitForDeployPodReady(deploy);
+
+ if (isReady) {
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.READY,
+ statusMessage: cliDeploy ? 'CLI Deploy completed' : 'Kubernetes pods are ready',
+ },
+ runUUID
+ );
+ } else {
+ throw new Error('Pods failed to become ready within timeout');
+ }
+ } catch (error) {
await deployService.patchAndUpdateActivityFeed(
deploy,
{
- status: DeployStatus.READY,
- statusMessage: cliDeploy ? 'CLI Deploy completed' : 'Kubernetes pods are ready',
+ status: DeployStatus.DEPLOY_FAILED,
+ statusMessage: `Kubernetes apply failed: ${error.message}`,
},
runUUID
);
- } else {
- throw new Error('Pods failed to become ready within timeout');
+ throw error;
}
- } catch (error) {
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.DEPLOY_FAILED,
- statusMessage: `Kubernetes apply failed: ${error.message}`,
- },
- runUUID
- );
- throw error;
- }
+ });
}
}
diff --git a/src/server/lib/envVariables.ts b/src/server/lib/envVariables.ts
index 63b22fe..a10008e 100644
--- a/src/server/lib/envVariables.ts
+++ b/src/server/lib/envVariables.ts
@@ -27,15 +27,10 @@ import {
NO_DEFAULT_ENV_UUID,
} from 'shared/constants';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { LifecycleError } from './errors';
import GlobalConfigService from 'server/services/globalConfig';
-// eslint-disable-next-line no-unused-vars
-const logger = rootLogger.child({
- filename: 'lib/envVariables.ts',
-});
-
const ALLOWED_PROPERTIES = [
'branchName',
'ipAddress',
@@ -379,9 +374,7 @@ export abstract class EnvironmentVariables {
/_publicUrl$/,
`-${globalConfig.lifecycleDefaults.defaultPublicUrl}`
);
- logger.debug(
- `[BUILD ${data['buildUUID']}] The publicUrl for ${serviceToUpdate} has been defaulted to ${defaultedPublicUrl} using the global_config table`
- );
+ getLogger().debug(`publicUrl for ${serviceToUpdate} defaulted to ${defaultedPublicUrl} using global_config`);
template = template.replace(fullMatch, defaultedPublicUrl);
}
}
diff --git a/src/server/lib/fastly.ts b/src/server/lib/fastly.ts
index f0690d4..3ae61a8 100644
--- a/src/server/lib/fastly.ts
+++ b/src/server/lib/fastly.ts
@@ -15,7 +15,7 @@
*/
import * as FastlyInstance from 'fastly/dist/index.js';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { Redis } from 'ioredis';
import { FASTLY_TOKEN } from 'shared/config';
import GlobalConfigService from 'server/services/globalConfig';
@@ -24,10 +24,6 @@ FastlyInstance.ApiClient.instance.authenticate(FASTLY_TOKEN);
const fastlyService = new FastlyInstance.ServiceApi();
const fastlyPurge = new FastlyInstance.PurgeApi();
-const logger = rootLogger.child({
- filename: 'lib/fastly.ts',
-});
-
class Fastly {
redis: Redis;
@@ -68,7 +64,6 @@ class Fastly {
const serviceName = `${fastlyServiceType}-${uuid}`;
const FASTLY_URL = await this.getFastlyUrl();
const name = `${serviceName}.${FASTLY_URL}`;
- const text = `[BUILD ${uuid}][fastly][refresh][serviceName ${name}]`;
try {
if (!name) throw new Error('Service name is missing');
const service = await fastlyService.searchService({ name });
@@ -84,7 +79,7 @@ class Fastly {
this.redis.expire(cacheKey, 86400);
return id;
} catch (error) {
- logger.child({ error }).warn(`${text} There is an issue to retrieve Fastly service id from Fastly`);
+ getLogger().warn({ error }, `Fastly: lookup failed service=${name}`);
}
}
@@ -108,16 +103,11 @@ class Fastly {
* @param serviceId Fastly Service ID
*/
async purgeAllServiceCache(serviceId: string, uuid: string, fastlyServiceType: string) {
- const text = `[BUILD ${uuid}][fastly][purgeAllServiceCache]`;
try {
if (!serviceId) throw new Error('Service ID is missing');
await fastlyPurge.purgeAll({ service_id: serviceId });
} catch (error) {
- logger
- .child({ error })
- .info(
- `${text}[serviceid ${serviceId}] has an error with the ${fastlyServiceType} service with ${serviceId} service id`
- );
+ getLogger().warn({ error }, `Fastly: purge failed serviceId=${serviceId} type=${fastlyServiceType}`);
}
}
diff --git a/src/server/lib/github/__tests__/deployments.test.ts b/src/server/lib/github/__tests__/deployments.test.ts
index 5db3a7d..4682ea5 100644
--- a/src/server/lib/github/__tests__/deployments.test.ts
+++ b/src/server/lib/github/__tests__/deployments.test.ts
@@ -139,7 +139,7 @@ describe('GitHub Deployment Functions', () => {
const error = new Error('Network error');
mockOctokit.request.mockRejectedValue(error);
- await expect(deleteGithubDeployment(mockDeploy)).rejects.toThrow('Network error');
+ await expect(deleteGithubDeployment(mockDeploy)).rejects.toThrow('GitHub API request failed');
expect(mockOctokit.request).toHaveBeenCalledWith(
`DELETE /repos/${mockDeploy.build.pullRequest.repository.fullName}/deployments/${mockDeploy.githubDeploymentId}`
);
diff --git a/src/server/lib/github/__tests__/index.test.ts b/src/server/lib/github/__tests__/index.test.ts
index 3ff7c73..9051e0f 100644
--- a/src/server/lib/github/__tests__/index.test.ts
+++ b/src/server/lib/github/__tests__/index.test.ts
@@ -45,8 +45,15 @@ jest.mock('server/services/globalConfig', () => {
jest.mock('axios');
jest.mock('server/lib/github/client');
jest.mock('server/lib/github/utils');
-jest.mock('server/lib/logger');
-import logger from 'server/lib/logger';
+jest.mock('server/lib/logger', () => ({
+ getLogger: jest.fn().mockReturnValue({
+ info: jest.fn(),
+ debug: jest.fn(),
+ error: jest.fn(),
+ warn: jest.fn(),
+ }),
+}));
+import { getLogger, rootLogger as logger } from 'server/lib/logger';
test('createOrUpdatePullRequestComment success', async () => {
jest.spyOn(client, 'createOctokitClient').mockResolvedValue({
@@ -138,7 +145,7 @@ test('getSHAForBranch failure', async () => {
const mockError = new Error('error');
(utils.getRefForBranchName as jest.Mock).mockRejectedValue(mockError);
await expect(getSHAForBranch('main', 'foo', 'bar')).rejects.toThrow('error');
- expect(logger.child).toHaveBeenCalledWith({ error: mockError });
+ expect(getLogger).toHaveBeenCalledWith({ error: mockError, repo: 'foo/bar', branch: 'main' });
});
test('checkIfCommentExists to return true', async () => {
diff --git a/src/server/lib/github/__tests__/utils.test.ts b/src/server/lib/github/__tests__/utils.test.ts
index 121aaf3..031536c 100644
--- a/src/server/lib/github/__tests__/utils.test.ts
+++ b/src/server/lib/github/__tests__/utils.test.ts
@@ -35,9 +35,9 @@ jest.mock('server/services/globalConfig', () => {
jest.mock('server/lib/github/client');
-jest.mock('server/lib/logger');
+jest.mock('server/lib/logger/rootLogger');
-import logger from 'server/lib/logger';
+import { rootLogger as logger } from 'server/lib/logger';
test('getAppToken success', async () => {
const app = jest.fn().mockResolvedValue({ token: '123' });
diff --git a/src/server/lib/github/cacheRequest.ts b/src/server/lib/github/cacheRequest.ts
index be30b78..04a511b 100644
--- a/src/server/lib/github/cacheRequest.ts
+++ b/src/server/lib/github/cacheRequest.ts
@@ -15,24 +15,19 @@
*/
import { cloneDeep, merge } from 'lodash';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { GITHUB_API_CACHE_EXPIRATION_SECONDS } from 'shared/constants';
import { createOctokitClient } from 'server/lib/github/client';
import { CacheRequestData } from 'server/lib/github/types';
import { redisClient } from 'server/lib/dependencies';
-const initialLogger = rootLogger.child({
- filename: 'lib/github/cacheRequest.ts',
-});
-
export async function cacheRequest(
endpoint: string,
requestData = {} as CacheRequestData,
- { logger = initialLogger, cache = redisClient.getRedis(), ignoreCache = false } = {}
+ { cache = redisClient.getRedis(), ignoreCache = false } = {}
) {
const cacheKey = `github:req_cache:${endpoint}`;
- const text = `[GITHUB ${cacheKey}][cacheRequest]`;
let cached;
try {
const octokit = await createOctokitClient({ caller: 'cacheRequest' });
@@ -70,17 +65,14 @@ export async function cacheRequest(
const data = JSON.parse(cached?.data);
return { data };
} catch (error) {
- return cacheRequest(endpoint, requestData, { logger, cache, ignoreCache: true });
+ return cacheRequest(endpoint, requestData, { cache, ignoreCache: true });
}
} else if (error?.status === 404) {
- const msg = '[retryCacheRequest] The requested resource was not found. Maybe the branch was deleted?';
- logger.child({ error }).info(`${text} ${msg}`);
- throw new Error(error?.message || msg);
+ getLogger().info(`GitHub: cache request not found endpoint=${endpoint}`);
+ throw new Error('Resource not found');
} else {
- const msg = 'cache request request error';
- const message = error?.message || msg;
- logger.child({ error }).error(`${text} ${msg}`);
- throw new Error(message);
+ getLogger().error({ error }, `GitHub: cache request failed endpoint=${endpoint}`);
+ throw new Error('GitHub API request failed');
}
}
}
diff --git a/src/server/lib/github/client.ts b/src/server/lib/github/client.ts
index 3ab3ea3..7e52b68 100644
--- a/src/server/lib/github/client.ts
+++ b/src/server/lib/github/client.ts
@@ -18,13 +18,8 @@ import PQueue from 'p-queue';
import { constructOctokitClient, constructClientRequestData } from 'server/lib/github/utils';
import { CreateOctokitClientOptions } from 'server/lib/github/types';
import GlobalConfigService from 'server/services/globalConfig';
-import rootLogger from 'server/lib/logger';
import { Metrics } from 'server/lib/metrics';
-const initialLogger = rootLogger.child({
- filename: 'lib/github/client.ts',
-});
-
const queue = new PQueue({
concurrency: 100,
intervalCap: 40,
@@ -32,12 +27,7 @@ const queue = new PQueue({
carryoverConcurrencyCount: true,
});
-export const createOctokitClient = async ({
- accessToken,
- // eslint-disable-next-line no-unused-vars
- logger = initialLogger,
- caller = '',
-}: CreateOctokitClientOptions = {}) => {
+export const createOctokitClient = async ({ accessToken, caller = '' }: CreateOctokitClientOptions = {}) => {
let token: string | undefined = await GlobalConfigService.getInstance().getGithubClientToken();
if (!token) token = accessToken;
const octokit = constructOctokitClient({ token });
diff --git a/src/server/lib/github/deployments.ts b/src/server/lib/github/deployments.ts
index a2f3f41..b03efe0 100644
--- a/src/server/lib/github/deployments.ts
+++ b/src/server/lib/github/deployments.ts
@@ -18,11 +18,7 @@ import { Deploy } from 'server/models';
import { cacheRequest } from 'server/lib/github/cacheRequest';
import { getPullRequest } from 'server/lib/github/index';
import { DeployStatus } from 'shared/constants';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'github/deployments.ts',
-});
+import { getLogger } from 'server/lib/logger';
const githubDeploymentStatuses = {
deployed: 'success',
@@ -43,12 +39,9 @@ function lifecycleToGithubStatus(status: string) {
}
export async function createOrUpdateGithubDeployment(deploy: Deploy) {
- const uuid = deploy.uuid;
- const text = `[DEPLOY ${uuid}][createOrUpdateGithubDeployment]`;
- const suffix = 'creating or updating github deployment';
- logger.debug(`${text} ${suffix}`);
+ getLogger().debug('Creating or updating github deployment');
try {
- logger.child({ deploy }).info(`${text}[deploymentStatus] deploy status`);
+ getLogger().info('GitHub: deployment status updated');
await deploy.$fetchGraph('build.pullRequest.repository');
const githubDeploymentId = deploy?.githubDeploymentId;
const build = deploy?.build;
@@ -63,9 +56,6 @@ export async function createOrUpdateGithubDeployment(deploy: Deploy) {
if (hasDeployment) {
const deploymentResp = await getDeployment(deploy);
const deploymentSha = deploymentResp?.data?.sha;
- /**
- * @note If the last commit is different than the deploy sha, delete the deployment, time for a new deployment
- **/
if (lastCommit !== deploymentSha) {
await deleteGithubDeploymentAndEnvironment(deploy);
} else {
@@ -74,14 +64,11 @@ export async function createOrUpdateGithubDeployment(deploy: Deploy) {
}
}
await createGithubDeployment(deploy, lastCommit);
- /**
- * @note this captures a redeployed deployment; sometimes it happens immediately
- */
if (build?.status === 'deployed') {
await updateDeploymentStatus(deploy, githubDeploymentId);
}
} catch (error) {
- logger.child({ error }).error(`${text} error ${suffix}`);
+ getLogger({ error }).error('GitHub: deployment update failed');
throw error;
}
}
@@ -95,7 +82,6 @@ export async function deleteGithubDeploymentAndEnvironment(deploy: Deploy) {
export async function createGithubDeployment(deploy: Deploy, ref: string) {
const environment = deploy.uuid;
- const text = `[DEPLOY ${environment}][createGithubDeployment]`;
const pullRequest = deploy?.build?.pullRequest;
const repository = pullRequest?.repository;
const fullName = repository?.fullName;
@@ -115,13 +101,16 @@ export async function createGithubDeployment(deploy: Deploy, ref: string) {
await deploy.$query().patch({ githubDeploymentId });
return resp;
} catch (error) {
- logger.child({ error }).error(`${text} Error creating github deployment`);
+ getLogger({
+ error,
+ repo: fullName,
+ }).error('GitHub: deployment create failed');
throw error;
}
}
export async function deleteGithubDeployment(deploy: Deploy) {
- logger.debug(`[DEPLOY ${deploy.uuid}] Deleting github deployment for deploy ${deploy.uuid}`);
+ getLogger().debug('Deleting github deployment');
if (!deploy?.build) await deploy.$fetchGraph('build.pullRequest.repository');
const resp = await cacheRequest(
`DELETE /repos/${deploy.build.pullRequest.repository.fullName}/deployments/${deploy.githubDeploymentId}`
@@ -133,13 +122,12 @@ export async function deleteGithubDeployment(deploy: Deploy) {
}
export async function deleteGithubEnvironment(deploy: Deploy) {
- logger.debug(`[DEPLOY ${deploy.uuid}] Deleting github environment for deploy ${deploy.uuid}`);
+ getLogger().debug('Deleting github environment');
if (!deploy?.build) await deploy.$fetchGraph('build.pullRequest.repository');
const repository = deploy.build.pullRequest.repository;
try {
await cacheRequest(`DELETE /repos/${repository.fullName}/environments/${deploy.uuid}`);
} catch (e) {
- // If the environment doesn't exist, we don't care
if (e.status !== 404) {
throw e;
}
@@ -147,7 +135,7 @@ export async function deleteGithubEnvironment(deploy: Deploy) {
}
export async function updateDeploymentStatus(deploy: Deploy, deploymentId: number) {
- logger.debug(`[DEPLOY ${deploy.uuid}] Updating github deployment status for deploy ${deploy.uuid}`);
+ getLogger().debug('Updating github deployment status');
const repository = deploy.build.pullRequest.repository;
let buildStatus = determineStatus(deploy);
const resp = await cacheRequest(`POST /repos/${repository.fullName}/deployments/${deploymentId}/statuses`, {
diff --git a/src/server/lib/github/index.ts b/src/server/lib/github/index.ts
index 21e6f00..8a19ee2 100644
--- a/src/server/lib/github/index.ts
+++ b/src/server/lib/github/index.ts
@@ -19,7 +19,7 @@ import crypto from 'crypto';
import { NextApiRequest } from 'next';
import { GITHUB_WEBHOOK_SECRET } from 'shared/config';
import { LifecycleError } from 'server/lib/errors';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { createOctokitClient } from 'server/lib/github/client';
import { cacheRequest } from 'server/lib/github/cacheRequest';
import { LIFECYCLE_FILE_NAME_REGEX } from 'server/lib/github/constants';
@@ -28,10 +28,6 @@ import { getRefForBranchName } from 'server/lib/github/utils';
import { Deploy } from 'server/models';
import { LifecycleYamlConfigOptions } from 'server/models/yaml/types';
-export const initialLogger = rootLogger.child({
- filename: 'lib/github/index.ts',
-});
-
export async function createOrUpdatePullRequestComment({
installationId,
pullRequestNumber,
@@ -50,9 +46,12 @@ export async function createOrUpdatePullRequestComment({
headers: { etag },
});
} catch (error) {
- const msg = 'Unable to create or update pull request comment';
- initialLogger.child({ error }).error(`[GITHUB ${fullName}/${pullRequestNumber}] ${msg} - original error: ${error}`);
- throw new Error(error?.message || msg);
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ }).error('GitHub: comment update failed');
+ throw new Error(error?.message || 'Unable to create or update pull request comment');
}
}
@@ -74,42 +73,39 @@ export async function updatePullRequestLabels({
data: { labels },
});
} catch (error) {
- initialLogger
- .child({ error })
- .error(
- `[GITHUB ${fullName}/${pullRequestNumber}] Unable to update pull request with '${labels.toString()}': ${error}`
- );
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ labels: labels.toString(),
+ }).error('GitHub: labels update failed');
throw error;
}
}
-export async function getPullRequest(
- owner: string,
- name: string,
- pullRequestNumber: number,
- _installationId: number,
- logger = initialLogger
-) {
+export async function getPullRequest(owner: string, name: string, pullRequestNumber: number, _installationId: number) {
try {
return await cacheRequest(`GET /repos/${owner}/${name}/pulls/${pullRequestNumber}`);
} catch (error) {
- const msg = 'Unable to retrieve pull request';
- logger.error(`[GITHUB ${owner}/${name}/pulls/${pullRequestNumber}] ${msg}: ${error}`);
- throw new Error(error?.message || msg);
+ getLogger({
+ error,
+ repo: `${owner}/${name}`,
+ pr: pullRequestNumber,
+ }).error('GitHub: pull request fetch failed');
+ throw new Error(error?.message || 'Unable to retrieve pull request');
}
}
-export async function getPullRequestByRepositoryFullName(
- fullName: string,
- pullRequestNumber: number,
- logger = initialLogger
-) {
+export async function getPullRequestByRepositoryFullName(fullName: string, pullRequestNumber: number) {
try {
return await cacheRequest(`GET /repos/${fullName}/pulls/${pullRequestNumber}`);
} catch (error) {
- const msg = 'Unable to retrieve pull request';
- logger.error(`[GITHUB ${fullName}/pulls/${pullRequestNumber}] ${msg}: ${error}`);
- throw new Error(error?.message || msg);
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ }).error('GitHub: pull request fetch failed');
+ throw new Error(error?.message || 'Unable to retrieve pull request');
}
}
@@ -134,12 +130,16 @@ export async function getPullRequestLabels({
const response = await client.request(`GET /repos/${fullName}/issues/${pullRequestNumber}`);
return response.data.labels.map((label: any) => label.name);
} catch (error) {
- initialLogger.error(`[GITHUB ${fullName}/${pullRequestNumber}] Unable to fetch labels: ${error}`);
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ }).error('GitHub: labels fetch failed');
throw error;
}
}
-export async function createDeploy({ owner, name, branch, installationId, logger = initialLogger }: RepoOptions) {
+export async function createDeploy({ owner, name, branch, installationId }: RepoOptions) {
try {
const octokit = await createOctokitClient({ installationId, caller: 'createDeploy' });
return await octokit.request(`POST /repos/${owner}/${name}/builds`, {
@@ -149,9 +149,12 @@ export async function createDeploy({ owner, name, branch, installationId, logger
},
});
} catch (error) {
- const msg = 'Unable to create deploy';
- logger.child({ error }).error(`[GITHUB ${owner}/${name}/${branch}] ${msg}`);
- throw new Error(error?.message || msg);
+ getLogger({
+ error,
+ repo: `${owner}/${name}`,
+ branch,
+ }).error('GitHub: deploy create failed');
+ throw new Error(error?.message || 'Unable to create deploy');
}
}
@@ -187,24 +190,21 @@ export async function getShaForDeploy(deploy: Deploy) {
}
}
-export async function getSHAForBranch(
- branchName: string,
- owner: string,
- name: string,
- logger = initialLogger
-): Promise {
+export async function getSHAForBranch(branchName: string, owner: string, name: string): Promise {
try {
const ref = await getRefForBranchName(owner, name, branchName);
return ref?.data?.object?.sha;
} catch (error) {
- const msg = 'Unable to retrieve SHA from branch';
- logger.child({ error }).warn(`[GITHUB ${owner}/${name}/${branchName}] ${msg}`);
- throw new Error(error?.message || msg);
+ getLogger({
+ error,
+ repo: `${owner}/${name}`,
+ branch: branchName,
+ }).warn('GitHub: SHA fetch failed');
+ throw new Error(error?.message || 'Unable to retrieve SHA from branch');
}
}
-export async function getYamlFileContent({ fullName, branch = '', sha = '', isJSON = false, logger = initialLogger }) {
- const text = `[${fullName}:${branch}][getYamlFileContent]`;
+export async function getYamlFileContent({ fullName, branch = '', sha = '', isJSON = false }) {
try {
const identifier = sha?.length > 0 ? sha : branch;
const treeResp = await cacheRequest(`GET /repos/${fullName}/git/trees/${identifier}`);
@@ -238,18 +238,12 @@ export async function getYamlFileContent({ fullName, branch = '', sha = '', isJS
return configData;
} catch (error) {
- const msg = 'warning: no lifecycle yaml found or parsed';
- logger.child({ error }).warn(`${text}${msg}`);
- throw new ConfigFileNotFound(error?.message || msg);
+ getLogger({ error, repo: fullName, branch }).warn('GitHub: yaml fetch failed');
+ throw new ConfigFileNotFound('Config file not found');
}
}
-export async function getYamlFileContentFromPullRequest(
- fullName: string,
- pullRequestNumber: number,
- logger = initialLogger
-) {
- const [owner, repo] = fullName.split('/');
+export async function getYamlFileContentFromPullRequest(fullName: string, pullRequestNumber: number) {
try {
const pullRequestResp = await getPullRequestByRepositoryFullName(fullName, pullRequestNumber);
const branch = pullRequestResp?.data?.head?.ref;
@@ -258,25 +252,29 @@ export async function getYamlFileContentFromPullRequest(
if (!config) throw new Error('Unable to get config from pull request');
return config;
} catch (error) {
- const msg = 'Unable to retrieve YAML file content from pull request';
- logger.child({ error }).warn(`[GITHUB ${owner}/${repo}/pulls/${pullRequestNumber}] ${msg}`);
- throw new ConfigFileNotFound(error?.message || msg);
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ }).warn('GitHub: yaml fetch failed');
+ throw new ConfigFileNotFound('Config file not found');
}
}
export async function getYamlFileContentFromBranch(
fullName: string,
- branchName: string,
- logger = initialLogger
+ branchName: string
): Promise {
- const [owner, repo] = fullName.split('/');
try {
const config = await getYamlFileContent({ fullName, branch: branchName });
return config;
} catch (error) {
- const msg = 'Unable to retrieve YAML file content from branch';
- logger.child({ error }).warn(`[GITHUB ${owner}/${repo}/${branchName}] ${msg}`);
- throw new ConfigFileNotFound(error?.message || msg);
+ getLogger({
+ error,
+ repo: fullName,
+ branch: branchName,
+ }).warn('GitHub: yaml fetch failed');
+ throw new ConfigFileNotFound('Config file not found');
}
}
@@ -284,7 +282,6 @@ export async function checkIfCommentExists({
fullName,
pullRequestNumber,
commentIdentifier,
- logger = initialLogger,
}: CheckIfCommentExistsOptions) {
try {
const resp = await cacheRequest(`GET /repos/${fullName}/issues/${pullRequestNumber}/comments`);
@@ -292,8 +289,11 @@ export async function checkIfCommentExists({
const isExistingComment = comments.find(({ body }) => body?.includes(commentIdentifier)) || false;
return isExistingComment;
} catch (error) {
- const msg = 'Unable check for coments';
- logger.child({ error }).error(`[GITHUB ${fullName}][checkIfCommentExists] ${msg}`);
+ getLogger({
+ error,
+ repo: fullName,
+ pr: pullRequestNumber,
+ }).error('GitHub: comments check failed');
return false;
}
}
diff --git a/src/server/lib/github/types.ts b/src/server/lib/github/types.ts
index 2bd7173..d86e41a 100644
--- a/src/server/lib/github/types.ts
+++ b/src/server/lib/github/types.ts
@@ -26,7 +26,6 @@ export interface RepoOptions {
owner?: string;
name?: string;
githubPullRequestId?: number;
- logger?: Logger;
}
export type DeployState = 'error' | 'failure' | 'inactive' | 'in_progress' | 'queued' | 'pending' | 'success';
@@ -40,7 +39,6 @@ export type GetAppTokenOptions = {
export type CreateOctokitClientOptions = {
accessToken?: string;
installationId?: number;
- logger?: Logger;
caller?: string;
cache?: typeof Redis;
};
@@ -118,7 +116,6 @@ export interface CheckIfCommentExistsOptions {
fullName: string;
pullRequestNumber: number;
commentIdentifier: string;
- logger?: Logger;
}
export interface DetermineIfQueueIsNeededOptions {
diff --git a/src/server/lib/github/utils/index.ts b/src/server/lib/github/utils/index.ts
index ccb425d..4fc6fe4 100644
--- a/src/server/lib/github/utils/index.ts
+++ b/src/server/lib/github/utils/index.ts
@@ -15,22 +15,20 @@
*/
import { Octokit } from '@octokit/core';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { cacheRequest } from 'server/lib/github/cacheRequest';
import { ConstructOctokitClientOptions, GetAppTokenOptions } from 'server/lib/github/types';
-const initialLogger = rootLogger.child({
- filename: 'lib/github/utils.ts',
-});
-
-export const getAppToken = async ({ installationId, app, logger = initialLogger }: GetAppTokenOptions) => {
+export const getAppToken = async ({ installationId, app }: Omit) => {
try {
const resp = await app({ type: 'installation', installationId });
return resp?.token;
} catch (error) {
const msg = 'Unable to get App Token';
- logger.child({ error }).error(`[GITHUB createOctokitClient] Unable to create a new client`);
+ getLogger().error(
+ `GitHub: unable to get app token installationId=${installationId} error=${error?.message || msg}`
+ );
throw new Error(error?.message || msg);
}
};
@@ -45,12 +43,14 @@ export const constructOctokitClient = ({ token }: ConstructOctokitClientOptions)
});
};
-export async function getRefForBranchName(owner: string, name: string, branchName: string, logger = initialLogger) {
+export async function getRefForBranchName(owner: string, name: string, branchName: string) {
try {
return await cacheRequest(`GET /repos/${owner}/${name}/git/ref/heads/${branchName}`);
} catch (error) {
const msg = 'Unable to get ref for Branch Name';
- logger.child({ error }).error(`[GITHUB ${owner}/${name}:${branchName}][getRefForBranchName] ${msg}`);
+ getLogger().error(
+ `GitHub: unable to get ref for branch repo=${owner}/${name} branch=${branchName} error=${error?.message || msg}`
+ );
throw new Error(error?.message || msg);
}
}
diff --git a/src/server/lib/helm/helm.ts b/src/server/lib/helm/helm.ts
index b069382..4fb02d0 100644
--- a/src/server/lib/helm/helm.ts
+++ b/src/server/lib/helm/helm.ts
@@ -20,7 +20,7 @@ import Deploy from 'server/models/Deploy';
import GlobalConfigService from 'server/services/globalConfig';
import { TMP_PATH } from 'shared/config';
import { DeployStatus } from 'shared/constants';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { shellPromise } from 'server/lib/shell';
import { kubeContextStep } from 'server/lib/codefresh';
import Build from 'server/models/Build';
@@ -36,10 +36,6 @@ import {
const CODEFRESH_PATH = `${TMP_PATH}/codefresh`;
-const logger = rootLogger.child({
- filename: 'lib/helm/helm.ts',
-});
-
/**
* Generates codefresh deployment step for public Helm charts.
* We are manily using the `helm` column from deployable table.
@@ -250,7 +246,7 @@ export async function deployHelm(deploys: Deploy[]) {
*/
export async function fetchUntilSuccess(url, retries, deploy, namespace) {
- logger.info(`[Number of maxRetries: ${retries}] Trying to fetch the url: ${url}`);
+ getLogger().debug(`Helm: waiting for pods url=${url} maxRetries=${retries}`);
for (let i = 0; i < retries; i++) {
const pods = await shellPromise(
`kubectl get deploy ${deploy} -n ${namespace} -o jsonpath='{.status.availableReplicas}'`
@@ -258,14 +254,14 @@ export async function fetchUntilSuccess(url, retries, deploy, namespace) {
try {
const response = await fetch(url);
if (1 <= parseInt(pods, 10)) {
- logger.info(` [ On Deploy ${deploy} ] There's ${pods} pods available for deployment`);
+ getLogger().debug(`Pods: available deploy=${deploy} pods=${pods}`);
return;
} else {
- logger.info(` [ On Deploy ${deploy} ] There's 0 pods available for deployment`);
- logger.error(`[ REQUEST TO ${url}] Request failed and Status code number: ${response.status}`);
+ getLogger().debug(`Pods: unavailable deploy=${deploy}`);
+ getLogger().error(`Helm: request failed url=${url} status=${response.status}`);
}
} catch (error) {
- logger.error(`[ Error function fetchUntilSuccess : ${error.message}`);
+ getLogger().error({ error }, `Helm: fetch failed url=${url}`);
}
await new Promise((resolve) => setTimeout(resolve, 10000));
}
@@ -291,7 +287,7 @@ export async function generateCodefreshRunCommand(deploy: Deploy): Promise {
deploy?.build.isStatic != undefined
) {
ingressValues.push(`ingress.backendService=${deploy.uuid}-external-service`, 'ingress.port=8080');
- logger.info(`[INGRESS] Redirect ingress request to Keda proxy`);
+ getLogger().debug(`Helm: redirecting ingress to KEDA proxy`);
}
return ingressValues;
@@ -547,9 +543,7 @@ export const constructHelmDeploysBuildMetaData = async (deploys: Deploy[]) => {
error: '',
};
} catch (error) {
- logger
- .child({ error })
- .error(`[BUILD][constructHelmDeploysBuildMetaData] Failed to construct Helm deploy metadata: ${error?.message}`);
+ getLogger().error({ error }, `Helm: metadata construction failed`);
return {
uuid: '',
branchName: '',
diff --git a/src/server/lib/k8sStreamer.ts b/src/server/lib/k8sStreamer.ts
index 77674b2..2e75028 100644
--- a/src/server/lib/k8sStreamer.ts
+++ b/src/server/lib/k8sStreamer.ts
@@ -15,14 +15,10 @@
*/
import { KubeConfig } from '@kubernetes/client-node';
-import rootLogger from './logger';
+import { getLogger } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
import { PassThrough, Writable } from 'stream';
-const logger = rootLogger.child({
- filename: 'lib/k8sStreamer.ts',
-});
-
export interface AbortHandle {
abort: () => void;
}
@@ -52,7 +48,6 @@ export function streamK8sLogs(
): AbortHandle {
const { podName, namespace, containerName: rawContainerName, follow, tailLines, timestamps } = params;
const containerName = rawContainerName.startsWith('[init] ') ? rawContainerName.substring(7) : rawContainerName;
- const logCtx = { podName, namespace, containerName, follow, tailLines };
const kc = new KubeConfig();
kc.loadFromDefault();
@@ -77,7 +72,10 @@ export function streamK8sLogs(
}
}
} catch (e: any) {
- logger.error({ ...logCtx, err: e }, 'Error processing log stream data chunk');
+ getLogger().error(
+ { error: e },
+ `K8sStream: data chunk processing failed podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
}
});
@@ -91,7 +89,10 @@ export function streamK8sLogs(
}
callbacks.onEnd();
} catch (e: any) {
- logger.error({ ...logCtx, err: e }, 'Error during log stream end processing');
+ getLogger().error(
+ { error: e },
+ `K8sStream: end processing failed podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
callbacks.onError(e instanceof Error ? e : new Error(String(e)));
}
});
@@ -99,7 +100,10 @@ export function streamK8sLogs(
stream.on('error', (err) => {
if (streamEnded) return;
streamEnded = true;
- logger.error({ ...logCtx, err }, 'K8s log stream encountered an error event.');
+ getLogger().error(
+ { error: err },
+ `K8sStream: error event received podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
buffer = '';
callbacks.onError(err);
});
@@ -115,12 +119,17 @@ export function streamK8sLogs(
k8sRequest = await k8sLog.log(namespace, podName, containerName, stream as Writable, logOptions);
- logger.debug(logCtx, 'k8sLog.log promise resolved (stream likely ended or follow=false).');
+ getLogger().debug(
+ `K8sStream: promise resolved podName=${podName} namespace=${namespace} containerName=${containerName} follow=${follow}`
+ );
if (k8sRequest) {
k8sRequest.on('error', (err: Error) => {
if (streamEnded) return;
- logger.error({ ...logCtx, err }, 'K8s request object emitted error.');
+ getLogger().error(
+ { error: err },
+ `K8sStream: request error emitted podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
if (stream.writable) {
stream.emit('error', err);
} else {
@@ -137,7 +146,10 @@ export function streamK8sLogs(
} catch (err: any) {
if (streamEnded) return;
if (err.name !== 'AbortError') {
- logger.error({ ...logCtx, err }, 'Failed to establish K8s log stream connection.');
+ getLogger().error(
+ { error: err },
+ `K8sStream: connection failed podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
buffer = '';
if (stream.writable) {
stream.emit('error', err);
@@ -158,10 +170,15 @@ export function streamK8sLogs(
try {
k8sRequest.abort();
} catch (abortErr) {
- logger.error({ ...logCtx, err: abortErr }, 'Error calling abort() on K8s request.');
+ getLogger().error(
+ { error: abortErr },
+ `K8sStream: abort call failed podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
}
} else {
- logger.warn(logCtx, "Abort requested, but K8s request object not available or doesn't have abort method.");
+ getLogger().warn(
+ `K8sStream: abort requested but request unavailable podName=${podName} namespace=${namespace} containerName=${containerName}`
+ );
}
stream.destroy();
streamEnded = true;
diff --git a/src/server/lib/kubernetes.ts b/src/server/lib/kubernetes.ts
index b4d4312..c062c2b 100644
--- a/src/server/lib/kubernetes.ts
+++ b/src/server/lib/kubernetes.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import rootLogger from './logger';
+import { getLogger } from './logger';
import yaml from 'js-yaml';
import _ from 'lodash';
import { Build, Deploy, Deployable, Service } from 'server/models';
@@ -31,10 +31,6 @@ import GlobalConfigService from 'server/services/globalConfig';
import { setupServiceAccountWithRBAC } from './kubernetes/rbac';
import { staticEnvTolerations } from './helm/constants';
-const logger = rootLogger.child({
- filename: 'lib/kubernetes.ts',
-});
-
interface VOLUME {
name: string;
emptyDir?: {};
@@ -51,7 +47,7 @@ async function namespaceExists(client: k8s.CoreV1Api, name: string): Promise {
- // Check if this is a legacy deployment (has build.manifest)
if (!build.manifest || build.manifest.trim().length === 0) {
- // New deployments are handled by DeploymentManager
- logger.info(`[Build ${build.uuid}] No build manifest found, using new deployment pattern via DeploymentManager`);
+ getLogger().info('Deploy: starting method=deploymentManager');
return [];
}
- // Legacy deployment path - apply manifest directly
- logger.info(`[Build ${build.uuid}] Using legacy deployment pattern with build.manifest`);
+ getLogger().info('Deploy: starting method=legacyManifest');
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
@@ -383,12 +370,14 @@ export async function applyManifests(build: Build): Promise 0) {
- logger.info(`${buildTaxonomy} Pods created ${gitTaxonomy}`);
+ getLogger(logCtx).info('Deploy: pods created');
break;
} else if (retries < 60) {
- // wait for 5 minutes for pods to be created
retries += 1;
await new Promise((r) => setTimeout(r, 5000));
} else {
- logger.warn(`${buildTaxonomy} No pods found within 5 minutes ${gitTaxonomy}. `);
+ getLogger(logCtx).warn('Pod: not found timeout=5m');
break;
}
}
retries = 0;
- logger.info(`${buildTaxonomy} Waiting 15 minutes for pods to be ready ${gitTaxonomy}`);
+ getLogger(logCtx).info('Deploy: waiting for pods state=ready');
// eslint-disable-next-line no-constant-condition
while (true) {
let isReady = false;
try {
const pods = await getPods({ uuid, namespace });
- // only check pods that are not managed by Helm
const matches =
pods?.filter(
(pod) =>
@@ -553,18 +545,20 @@ export async function waitForPodReady(build: Build) {
return conditions.some((condition) => condition?.type === 'Ready' && condition?.status === 'True');
});
} catch (error) {
- logger.child({ error, isReady }).warn(`${buildTaxonomy} error checking pod readiness ${gitTaxonomy}`);
+ getLogger({ ...logCtx, error, isReady }).warn('Pod: readiness check failed');
}
if (isReady) {
- logger.info(`${buildTaxonomy} Pods are ready ${gitTaxonomy}`);
+ getLogger(logCtx).info('Deploy: pods ready');
return true;
}
if (retries < 180) {
retries += 1;
await new Promise((r) => setTimeout(r, 5000));
} else {
- throw new Error(`${buildTaxonomy} Pods for build not ready after 15 minutes ${gitTaxonomy}`);
+ throw new Error(
+ `Pods for build not ready after 15 minutes buildUuid=${uuid} repo=${fullName} branch=${branchName}`
+ );
}
}
}
@@ -578,9 +572,12 @@ export async function deleteBuild(build: Build) {
await shellPromise(
`kubectl delete all,pvc,mapping,Httpscaledobjects -l lc_uuid=${build.uuid} --namespace ${build.namespace}`
);
- logger.info(`[DELETE ${build.uuid}] Deleted kubernetes resources`);
+ getLogger({ namespace: build.namespace }).info('Deploy: resources deleted');
} catch (e) {
- logger.error(`[DELETE ${build.uuid}] Error deleting kubernetes resources: ${e}`);
+ getLogger({
+ namespace: build.namespace,
+ error: e,
+ }).error('Resources: delete failed');
}
}
@@ -589,21 +586,16 @@ export async function deleteBuild(build: Build) {
* @param name namespace to delete
*/
export async function deleteNamespace(name: string) {
- // this is a final safety check to only delete namespaces that start with `env-`
if (!name.startsWith('env-')) return;
try {
- // Native helm now uses namespace-scoped RBAC (Role/RoleBinding) which gets deleted with the namespace
- // No need for manual cleanup of cluster-level resources
-
- // adding a grace-period to make sure resources and finalizers are gone before we delete the namespace
await shellPromise(`kubectl delete ns ${name} --grace-period 120`);
- logger.info(`[DELETE ${name}] Deleted namespace`);
+ getLogger({ namespace: name }).info('Deploy: namespace deleted');
} catch (e) {
if (e.includes('Error from server (NotFound): namespaces')) {
- logger.info(`[DELETE ${name}] Namespace not found, skipping deletion.`);
+ getLogger({ namespace: name }).info('Deploy: namespace skipped reason=notFound');
} else {
- logger.error(`[DELETE ${name}] Error deleting namespace: ${e}`);
+ getLogger({ namespace: name, error: e }).error('Namespace: delete failed');
}
}
}
@@ -657,7 +649,7 @@ export function generateManifest({
const manifest = `${disks}---\n${builds}---\n${nodePorts}---\n${grpcMappings}---\n${loadBalancers}---\n${externalNameServices}`;
const isDev = APP_ENV?.includes('dev') ?? false;
if (!isDev) {
- logger.child({ manifest }).info(`[BUILD ${build.uuid}][lifecycleConfigLog][kubernetesManifest] Generated manifest`);
+ getLogger({ manifest }).info('Manifest: generated');
}
return manifest;
}
@@ -1120,13 +1112,13 @@ export function generateDeployManifests(
});
break;
default:
- logger.warn(`Unknown disk medium type: ${disk.medium}`);
+ getLogger({ medium: disk.medium }).warn(`Disk: unknown medium medium=${disk.medium}`);
}
});
}
}
} else {
- logger.debug('Service disks: %j', service.serviceDisks);
+ getLogger({ serviceDisks: service.serviceDisks }).debug('Processing service disks');
if (service.serviceDisks && service.serviceDisks.length > 0) {
strategy = {
// @ts-ignore
@@ -1199,7 +1191,7 @@ export function generateDeployManifests(
'tags.datadoghq.com/version': buildUUID,
};
- if (build.isStatic) logger.info(`${buildUUID} building static environment`);
+ if (build.isStatic) getLogger().info('Build: static environment=true');
const yamlManifest = yaml.dump(
{
@@ -1484,13 +1476,13 @@ export function generateExternalNameManifests(deploys: Deploy[], buildUUID: stri
return deploys
.filter((deploy) => {
if (deploy.active) {
- logger.debug(`Deploy ${deploy.id} ${deploy.cname}`);
+ getLogger({ deployId: deploy.id, cname: deploy.cname }).debug('Checking deploy for external service');
return deploy.cname !== undefined && deploy.cname !== null;
}
})
.map((deploy) => {
const name = deploy.uuid;
- logger.debug(`Creating external service for ${name}`);
+ getLogger().debug('Creating external service');
return yaml.dump(
{
apiVersion: 'v1',
@@ -1575,7 +1567,7 @@ export async function checkKubernetesStatus(build: Build) {
try {
status += (await shellPromise(command)) + '\n';
} catch (err) {
- logger.debug(`[${build.uuid}] ${command} ==> ${err}`);
+ getLogger({ command, error: err }).debug('Error executing kubectl command');
}
return status;
@@ -1598,7 +1590,7 @@ async function getExistingIngress(ingressName: string, namespace: string): Promi
const response = await k8sApi.readNamespacedIngress(ingressName, namespace);
return response.body;
} catch (error) {
- logger.warn(`Failed to get existing ingress ${ingressName}: ${error}`);
+ getLogger({ ingressName, namespace, error }).warn('Ingress: fetch failed');
return null;
}
}
@@ -1647,9 +1639,9 @@ export async function patchIngress(ingressName: string, bannerSnippet: any, name
`kubectl patch ingress ${ingressName} --namespace ${namespace} --type merge --patch-file ${localPath}`
);
- logger.info(`Successfully patched ingress ${ingressName}`);
+ getLogger({ ingressName, namespace }).info('Deploy: ingress patched');
} catch (error) {
- logger.warn(`Unable to patch ingress ${ingressName}, banner might not work: ${error}`);
+ getLogger({ ingressName, namespace, error }).warn('Ingress: patch failed (banner may not work)');
throw error;
}
}
@@ -1677,7 +1669,7 @@ export async function updateSecret(secretName: string, secretData: Record {
const { namespace } = build;
const deployableName = deploy.deployable?.name || deploy.service?.name || 'unknown';
+ const logCtx = { deployUuid: uuid, service: deployableName, namespace };
+
let retries = 0;
- logger.info(`[DEPLOY ${uuid}] Waiting for pods service=${deployableName} namespace=${namespace}`);
+ getLogger(logCtx).info('Deploy: waiting for pods');
- // Wait up to 5 minutes for pods to be created
while (retries < 60) {
const k8sApi = getK8sApi();
const resp = await k8sApi?.listNamespacedPod(
@@ -2180,7 +2173,6 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise {
`deploy_uuid=${uuid}`
);
const allPods = resp?.body?.items || [];
- // Filter out job pods - we only want deployment/statefulset pods
const pods = allPods.filter((pod) => !pod.metadata?.name?.includes('-deploy-'));
if (pods.length > 0) {
@@ -2192,13 +2184,12 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise {
}
if (retries >= 60) {
- logger.warn(`[DEPLOY ${uuid}] No pods found within 5 minutes service=${deployableName}`);
+ getLogger(logCtx).warn('Pod: not found timeout=5m');
return false;
}
retries = 0;
- // Wait up to 15 minutes for pods to be ready
while (retries < 180) {
const k8sApi = getK8sApi();
const resp = await k8sApi?.listNamespacedPod(
@@ -2210,11 +2201,10 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise {
`deploy_uuid=${uuid}`
);
const allPods = resp?.body?.items || [];
- // Filter out job pods - we only want deployment/statefulset pods
const pods = allPods.filter((pod) => !pod.metadata?.name?.includes('-deploy-'));
if (pods.length === 0) {
- logger.warn(`[DEPLOY ${uuid}] No deployment pods found service=${deployableName}`);
+ getLogger(logCtx).warn('Pod: deployment pods not found');
return false;
}
@@ -2225,7 +2215,7 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise {
});
if (allReady) {
- logger.info(`[DEPLOY ${uuid}] Pods ready service=${deployableName} count=${pods.length}`);
+ getLogger({ ...logCtx, podCount: pods.length }).info('Deploy: pods ready');
return true;
}
@@ -2233,6 +2223,6 @@ export async function waitForDeployPodReady(deploy: Deploy): Promise {
await new Promise((r) => setTimeout(r, 5000));
}
- logger.warn(`[DEPLOY ${uuid}] Pods not ready within 15 minutes service=${deployableName}`);
+ getLogger(logCtx).warn('Pod: not ready timeout=15m');
return false;
}
diff --git a/src/server/lib/kubernetes/JobMonitor.ts b/src/server/lib/kubernetes/JobMonitor.ts
index 213c93c..92975d5 100644
--- a/src/server/lib/kubernetes/JobMonitor.ts
+++ b/src/server/lib/kubernetes/JobMonitor.ts
@@ -15,7 +15,7 @@
*/
import { shellPromise } from '../shell';
-import logger from '../logger';
+import { getLogger } from '../logger';
export interface JobStatus {
logs: string;
@@ -70,7 +70,7 @@ export class JobMonitor {
status,
};
} catch (error) {
- logger.error(`Error monitoring job ${this.jobName}: ${error.message}`);
+ getLogger().error({ error }, `Job: monitor failed name=${this.jobName}`);
return {
logs: logs || `Job monitoring failed: ${error.message}`,
success: false,
@@ -146,12 +146,14 @@ export class JobMonitor {
);
logs += `\n=== Init Container Logs (${initName}) ===\n${initLogs}\n`;
} catch (err: any) {
- logger.debug(`Could not get logs for init container ${initName}: ${err.message || 'Unknown error'}`);
+ getLogger().debug(
+ `K8s: init container logs failed container=${initName} error=${err.message || 'Unknown error'}`
+ );
}
}
}
} catch (error: any) {
- logger.debug(`No init containers found for pod ${podName}: ${error.message || 'Unknown error'}`);
+ getLogger().debug(`K8s: no init containers found pod=${podName} error=${error.message || 'Unknown error'}`);
}
return logs;
@@ -175,8 +177,8 @@ export class JobMonitor {
if (!allContainersReady) {
const waiting = statuses.find((s: any) => s.state.waiting);
if (waiting && waiting.state.waiting.reason) {
- logger.info(
- `Container ${waiting.name} is waiting: ${waiting.state.waiting.reason} - ${
+ getLogger().info(
+ `Container: waiting name=${waiting.name} reason=${waiting.state.waiting.reason} message=${
waiting.state.waiting.message || 'no message'
}`
);
@@ -209,7 +211,7 @@ export class JobMonitor {
containerNames = containerNames.filter((name) => containerFilters.includes(name));
}
} catch (error) {
- logger.warn(`Could not get container names: ${error}`);
+ getLogger().warn({ error }, `Container: names fetch failed`);
}
for (const containerName of containerNames) {
@@ -223,7 +225,7 @@ export class JobMonitor {
logs += `\n=== Container Logs (${containerName}) ===\n${containerLog}\n`;
}
} catch (error: any) {
- logger.warn(`Error getting logs from container ${containerName}: ${error.message}`);
+ getLogger().warn({ error }, `Container: logs fetch failed name=${containerName}`);
logs += `\n=== Container Logs (${containerName}) ===\nError retrieving logs: ${error.message}\n`;
}
}
@@ -252,7 +254,9 @@ export class JobMonitor {
await this.sleep(JobMonitor.POLL_INTERVAL);
}
} catch (error: any) {
- logger.debug(`Job status check failed for ${this.jobName}, will retry: ${error.message || 'Unknown error'}`);
+ getLogger().debug(
+ `Job status check failed for ${this.jobName}, will retry: ${error.message || 'Unknown error'}`
+ );
await this.sleep(JobMonitor.POLL_INTERVAL);
}
}
@@ -276,7 +280,17 @@ export class JobMonitor {
);
if (failedStatus.trim() === 'True') {
- logger.error(`Job ${this.jobName} failed`);
+ const failedReason = await shellPromise(
+ `kubectl get job ${this.jobName} -n ${this.namespace} -o jsonpath='{.status.conditions[?(@.type=="Failed")].reason}'`
+ );
+ const failedMessage = await shellPromise(
+ `kubectl get job ${this.jobName} -n ${this.namespace} -o jsonpath='{.status.conditions[?(@.type=="Failed")].message}'`
+ );
+ getLogger().error(
+ `Job: failed name=${this.jobName} reason=${failedReason.trim() || 'Unknown'} message=${
+ failedMessage.trim() || 'No message'
+ }`
+ );
// Check if job was superseded
try {
@@ -286,13 +300,13 @@ export class JobMonitor {
);
if (annotations === 'superseded-by-retry') {
- logger.info(`${logPrefix || ''} Job ${this.jobName} superseded by newer deployment`);
+ getLogger().info(`K8s: job superseded name=${this.jobName}`);
success = true;
status = 'superseded';
}
} catch (annotationError: any) {
- logger.debug(
- `Could not check supersession annotation for job ${this.jobName}: ${
+ getLogger().debug(
+ `K8s: supersession annotation check failed job=${this.jobName} error=${
annotationError.message || 'Unknown error'
}`
);
@@ -302,7 +316,7 @@ export class JobMonitor {
status = 'succeeded';
}
} catch (error) {
- logger.error(`Failed to check job status for ${this.jobName}:`, error);
+ getLogger().error({ error }, `Job: status check failed name=${this.jobName}`);
}
return { success, status };
diff --git a/src/server/lib/kubernetes/common/serviceAccount.ts b/src/server/lib/kubernetes/common/serviceAccount.ts
index 3081126..032099f 100644
--- a/src/server/lib/kubernetes/common/serviceAccount.ts
+++ b/src/server/lib/kubernetes/common/serviceAccount.ts
@@ -15,11 +15,9 @@
*/
import GlobalConfigService from 'server/services/globalConfig';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { setupServiceAccountInNamespace } from '../../nativeHelm/utils';
-const logger = rootLogger.child({ filename: 'lib/kubernetes/serviceAccount.ts' });
-
export async function ensureServiceAccountForJob(
namespace: string,
jobType: 'build' | 'deploy' | 'webhook'
@@ -28,8 +26,8 @@ export async function ensureServiceAccountForJob(
const serviceAccountName = serviceAccount?.name || 'default';
const role = serviceAccount?.role || 'default';
- logger.info(
- `Setting up service account for ${jobType} job: namespace=${namespace} serviceAccount=${serviceAccountName} role=${role}`
+ getLogger().info(
+ `ServiceAccount: setting up for job type=${jobType} namespace=${namespace} serviceAccount=${serviceAccountName} role=${role}`
);
await setupServiceAccountInNamespace(namespace, serviceAccountName, role);
diff --git a/src/server/lib/kubernetes/getDeploymentJobs.ts b/src/server/lib/kubernetes/getDeploymentJobs.ts
index 48294d1..0f65b2e 100644
--- a/src/server/lib/kubernetes/getDeploymentJobs.ts
+++ b/src/server/lib/kubernetes/getDeploymentJobs.ts
@@ -1,5 +1,5 @@
import * as k8s from '@kubernetes/client-node';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
export interface DeploymentJobInfo {
jobName: string;
@@ -14,10 +14,6 @@ export interface DeploymentJobInfo {
deploymentType: 'helm' | 'github';
}
-const logger = rootLogger.child({
- filename: __filename,
-});
-
export async function getDeploymentJobs(serviceName: string, namespace: string): Promise {
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
@@ -111,7 +107,7 @@ export async function getDeploymentJobs(serviceName: string, namespace: string):
}
}
} catch (podError) {
- logger.warn(`Failed to get pods for job ${jobName}:`, podError);
+ getLogger().warn({ error: podError }, `K8s: failed to get pods jobName=${jobName}`);
}
}
@@ -137,7 +133,7 @@ export async function getDeploymentJobs(serviceName: string, namespace: string):
return deploymentJobs;
} catch (error) {
- logger.error(`Error listing deployment jobs for service ${serviceName}:`, error);
+ getLogger().error({ error }, `K8s: failed to list deployment jobs service=${serviceName}`);
throw error;
}
}
diff --git a/src/server/lib/kubernetes/getNativeBuildJobs.ts b/src/server/lib/kubernetes/getNativeBuildJobs.ts
index de53b64..5897b5f 100644
--- a/src/server/lib/kubernetes/getNativeBuildJobs.ts
+++ b/src/server/lib/kubernetes/getNativeBuildJobs.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
export interface BuildJobInfo {
@@ -30,10 +30,6 @@ export interface BuildJobInfo {
podName?: string;
}
-const logger = rootLogger.child({
- filename: __filename,
-});
-
export async function getNativeBuildJobs(serviceName: string, namespace: string): Promise {
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
@@ -115,7 +111,7 @@ export async function getNativeBuildJobs(serviceName: string, namespace: string)
}
}
} catch (podError) {
- logger.warn(`Failed to get pods for job ${jobName}:`, podError);
+ getLogger().warn({ error: podError }, `K8s: failed to get pods jobName=${jobName}`);
}
}
@@ -141,7 +137,7 @@ export async function getNativeBuildJobs(serviceName: string, namespace: string)
return buildJobs;
} catch (error) {
- logger.error(`Error listing native build jobs for service ${serviceName}:`, error);
+ getLogger().error({ error }, `K8s: failed to list build jobs service=${serviceName}`);
throw error;
}
}
diff --git a/src/server/lib/kubernetes/rbac.ts b/src/server/lib/kubernetes/rbac.ts
index 6f8d6ab..f79d11c 100644
--- a/src/server/lib/kubernetes/rbac.ts
+++ b/src/server/lib/kubernetes/rbac.ts
@@ -16,7 +16,7 @@
import { V1ServiceAccount, V1Role, V1RoleBinding } from '@kubernetes/client-node';
import * as k8s from '@kubernetes/client-node';
-import logger from '../logger';
+import { getLogger } from '../logger';
export interface RBACConfig {
namespace: string;
@@ -77,7 +77,7 @@ export async function setupServiceAccountWithRBAC(config: RBACConfig): Promise setTimeout(resolve, 5000));
attempts++;
} catch (error) {
- logger.error(`Error monitoring job ${jobName}: ${error}`);
+ getLogger({ error }).error(`Job: monitor failed name=${jobName}`);
throw error;
}
}
diff --git a/src/server/lib/kubernetesApply/logs.ts b/src/server/lib/kubernetesApply/logs.ts
index 882512f..10b9001 100644
--- a/src/server/lib/kubernetesApply/logs.ts
+++ b/src/server/lib/kubernetesApply/logs.ts
@@ -16,9 +16,7 @@
import * as k8s from '@kubernetes/client-node';
import { Deploy } from 'server/models';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ filename: 'lib/kubernetesApply/logs.ts' });
+import { getLogger } from 'server/lib/logger';
/**
* Fetches logs from a Kubernetes apply job for a deploy
@@ -105,14 +103,14 @@ export async function getKubernetesApplyLogs(deploy: Deploy, tail?: number): Pro
allLogs.push(`=== Logs from pod ${podName} ===\n${podLogs.body}`);
}
} catch (podError) {
- logger.error(`Failed to fetch logs from pod ${podName}: ${podError}`);
+ getLogger({ error: podError }).error(`Pod: log fetch failed name=${podName}`);
allLogs.push(`=== Error fetching logs from pod ${podName} ===\n${(podError as Error).message || podError}`);
}
}
return allLogs.join('\n\n') || 'No logs available';
} catch (error) {
- logger.error(`Failed to fetch logs for deploy ${deploy.uuid}: ${error}`);
+ getLogger({ error }).error('Logs: fetch failed');
return `Failed to fetch logs: ${(error as Error).message || error}`;
}
}
@@ -245,7 +243,7 @@ export async function streamKubernetesApplyLogs(
onClose();
}
} catch (error) {
- logger.error(`Error polling logs for deploy ${deploy.uuid}: ${error}`);
+ getLogger({ error }).error('Logs: poll failed');
if ((error as any).response?.statusCode === 404) {
// Pod was deleted, stop polling
isActive = false;
@@ -263,7 +261,7 @@ export async function streamKubernetesApplyLogs(
clearInterval(pollInterval);
};
} catch (error) {
- logger.error(`Failed to start log stream for deploy ${deploy.uuid}: ${error}`);
+ getLogger({ error }).error('Logs: stream start failed');
onError(error as Error);
onClose();
return () => {};
diff --git a/src/server/lib/logStreamingHelper.ts b/src/server/lib/logStreamingHelper.ts
index 17980bd..774a422 100644
--- a/src/server/lib/logStreamingHelper.ts
+++ b/src/server/lib/logStreamingHelper.ts
@@ -14,15 +14,11 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
import { StreamingInfo, LogSourceStatus, K8sPodInfo, K8sContainerInfo } from 'shared/types';
import { HttpError, V1ContainerStatus } from '@kubernetes/client-node';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
/**
* Reusable logic to get log streaming info for a specific Kubernetes job name,
* using the provided namespace.
@@ -32,7 +28,7 @@ export async function getLogStreamingInfoForJob(
namespace: string
): Promise {
if (!jobName) {
- logger.warn(`Job name not provided. Cannot get logs.`);
+ getLogger().warn('LogStreaming: job name not provided');
const statusResponse: LogSourceStatus = {
status: 'Unavailable',
streamingRequired: false,
@@ -45,7 +41,7 @@ export async function getLogStreamingInfoForJob(
try {
podInfo = await getK8sJobStatusAndPod(jobName, namespace);
} catch (k8sError: any) {
- logger.error({ k8sError }, `Error calling getK8sJobStatusAndPod for ${jobName}.`);
+ getLogger().error({ error: k8sError }, `LogStreaming: job status fetch failed jobName=${jobName}`);
const errorStatus: LogSourceStatus = {
status: 'Unknown',
streamingRequired: false,
@@ -125,30 +121,28 @@ export async function getLogStreamingInfoForJob(
* @returns A promise resolving to K8sPodInfo containing status and container info, or null if not found/error.
*/
export async function getK8sJobStatusAndPod(jobName: string, namespace: string): Promise {
- const logCtx = { jobName, namespace };
-
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const coreV1Api = kc.makeApiClient(k8s.CoreV1Api);
const batchV1Api = kc.makeApiClient(k8s.BatchV1Api);
try {
- logger.debug(logCtx, `Reading Job details for namespace: ${namespace} and jobName: ${jobName}`);
+ getLogger().debug(`LogStreaming: reading job details namespace=${namespace} jobName=${jobName}`);
const jobResponse = await batchV1Api.readNamespacedJob(jobName, namespace);
const job = jobResponse.body;
if (!job?.spec?.selector?.matchLabels) {
if (job?.status?.succeeded) {
- logger.warn(logCtx, 'Job succeeded but selector missing.');
+ getLogger().warn(`LogStreaming: job succeeded but selector missing jobName=${jobName} namespace=${namespace}`);
return { podName: null, namespace, status: 'Succeeded', containers: [] };
}
if (job?.status?.failed) {
- logger.warn(logCtx, 'Job failed but selector missing.');
+ getLogger().warn(`LogStreaming: job failed but selector missing jobName=${jobName} namespace=${namespace}`);
const failedCondition = job.status.conditions?.find((c) => c.type === 'Failed' && c.status === 'True');
const failureMessage = failedCondition?.message || 'Job failed';
return { podName: null, namespace, status: 'Failed', containers: [], message: failureMessage };
}
- logger.error(logCtx, 'Job found, but missing spec.selector.matchLabels. Cannot find associated pods.');
+ getLogger().error(`LogStreaming: job found but missing selector jobName=${jobName} namespace=${namespace}`);
return { podName: null, namespace, status: 'Unknown', containers: [] };
}
@@ -156,7 +150,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
.map(([key, value]) => `${key}=${value}`)
.join(',');
- logger.debug({ ...logCtx, labelSelector }, 'Listing Pods with label selector');
+ getLogger().debug(
+ `LogStreaming: listing pods jobName=${jobName} namespace=${namespace} labelSelector=${labelSelector}`
+ );
const podListResponse = await coreV1Api.listNamespacedPod(
namespace,
@@ -169,7 +165,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
const pods = podListResponse.body.items;
if (!pods || pods.length === 0) {
- logger.warn(logCtx, 'No pods found matching the job selector.');
+ getLogger().warn(`LogStreaming: no pods found matching job selector jobName=${jobName} namespace=${namespace}`);
const jobStatus = job.status;
if (jobStatus?.succeeded && jobStatus.succeeded > 0) {
return { podName: null, namespace, status: 'Succeeded', containers: [] };
@@ -178,7 +174,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
const failedCondition = jobStatus.conditions?.find((c) => c.type === 'Failed' && c.status === 'True');
const failureReason = failedCondition?.reason || 'Failed';
const failureMessage = failedCondition?.message || 'Job failed';
- logger.warn({ ...logCtx, failureReason }, 'Job indicates failure, but no pods found.');
+ getLogger().warn(
+ `LogStreaming: job indicates failure but no pods found jobName=${jobName} namespace=${namespace} reason=${failureReason}`
+ );
return { podName: null, namespace, status: 'Failed', containers: [], message: failureMessage };
}
return { podName: null, namespace, status: 'NotFound', containers: [] };
@@ -190,11 +188,11 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
const latestPod = pods[0];
if (!latestPod?.metadata?.name || !latestPod?.status) {
- logger.error(logCtx, 'Found pod(s), but latest pod is missing metadata or status.');
+ getLogger().error(`LogStreaming: pod missing metadata or status jobName=${jobName} namespace=${namespace}`);
return null;
}
const podName = latestPod.metadata.name;
- logger.debug({ ...logCtx, podName }, 'Found latest pod');
+ getLogger().debug(`LogStreaming: found latest pod jobName=${jobName} namespace=${namespace} podName=${podName}`);
let podStatus: K8sPodInfo['status'] = 'Unknown';
const phase = latestPod.status.phase;
@@ -259,7 +257,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
return result;
} catch (error: any) {
if (error instanceof HttpError && error.response?.statusCode === 404) {
- logger.warn(logCtx, `Job or associated resource not found (404) ${error.message}`);
+ getLogger().warn(`LogStreaming: job not found jobName=${jobName} namespace=${namespace} error=${error.message}`);
return {
podName: null,
namespace,
@@ -268,7 +266,9 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
message: 'Job no longer exists. Logs have been cleaned up after 24 hours.',
};
}
- logger.error({ ...logCtx, err: error }, 'Error getting K8s job/pod status');
+ getLogger().error(
+ `LogStreaming: error getting job/pod status jobName=${jobName} namespace=${namespace} error=${error.message}`
+ );
return null;
}
}
@@ -280,9 +280,7 @@ export async function getK8sJobStatusAndPod(jobName: string, namespace: string):
* @returns A promise resolving to K8sPodInfo containing pod status and container info.
*/
export async function getK8sPodContainers(podName: string, namespace: string = 'lifecycle-app'): Promise {
- const logCtx = { podName, namespace };
-
- logger.debug(logCtx, 'Fetching container information for pod');
+ getLogger().debug(`LogStreaming: fetching container info podName=${podName} namespace=${namespace}`);
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
const coreV1Api = kc.makeApiClient(k8s.CoreV1Api);
@@ -351,7 +349,7 @@ export async function getK8sPodContainers(podName: string, namespace: string = '
};
} catch (error: any) {
if (error instanceof HttpError && error.response?.statusCode === 404) {
- logger.warn(logCtx, `Pod not found (404): ${error.message}`);
+ getLogger().warn(`LogStreaming: pod not found podName=${podName} namespace=${namespace} error=${error.message}`);
return {
podName: null,
namespace,
@@ -361,7 +359,9 @@ export async function getK8sPodContainers(podName: string, namespace: string = '
};
}
- logger.error({ ...logCtx, err: error }, 'Error getting container information');
+ getLogger().error(
+ `LogStreaming: error getting container info podName=${podName} namespace=${namespace} error=${error.message}`
+ );
throw error;
}
}
diff --git a/src/server/lib/logger/__tests__/context.test.ts b/src/server/lib/logger/__tests__/context.test.ts
new file mode 100644
index 0000000..1271f7a
--- /dev/null
+++ b/src/server/lib/logger/__tests__/context.test.ts
@@ -0,0 +1,148 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from '../context';
+
+describe('Logger Context', () => {
+ describe('getLogContext', () => {
+ it('should return empty object when no context is set', () => {
+ const context = getLogContext();
+ expect(context).toEqual({});
+ });
+ });
+
+ describe('withLogContext', () => {
+ it('should set context and make it available inside the callback', async () => {
+ const correlationId = 'test-correlation-id';
+
+ await withLogContext({ correlationId }, async () => {
+ const context = getLogContext();
+ expect(context.correlationId).toBe(correlationId);
+ });
+ });
+
+ it('should merge parent context with new context', async () => {
+ const parentCorrelationId = 'parent-id';
+ const buildUuid = 'build-123';
+
+ await withLogContext({ correlationId: parentCorrelationId }, async () => {
+ await withLogContext({ buildUuid }, async () => {
+ const context = getLogContext();
+ expect(context.correlationId).toBe(parentCorrelationId);
+ expect(context.buildUuid).toBe(buildUuid);
+ });
+ });
+ });
+
+ it('should use child correlationId if provided', async () => {
+ const parentCorrelationId = 'parent-id';
+ const childCorrelationId = 'child-id';
+
+ await withLogContext({ correlationId: parentCorrelationId }, async () => {
+ await withLogContext({ correlationId: childCorrelationId }, async () => {
+ const context = getLogContext();
+ expect(context.correlationId).toBe(childCorrelationId);
+ });
+ });
+ });
+
+ it('should default to "unknown" correlationId if none provided', async () => {
+ await withLogContext({}, async () => {
+ const context = getLogContext();
+ expect(context.correlationId).toBe('unknown');
+ });
+ });
+
+ it('should work with synchronous functions', () => {
+ const correlationId = 'sync-test';
+
+ const result = withLogContext({ correlationId }, () => {
+ const context = getLogContext();
+ expect(context.correlationId).toBe(correlationId);
+ return 'sync-result';
+ });
+
+ expect(result).toBe('sync-result');
+ });
+
+ it('should return value from async callback', async () => {
+ const result = await withLogContext({ correlationId: 'test' }, async () => {
+ return 'async-result';
+ });
+
+ expect(result).toBe('async-result');
+ });
+ });
+
+ describe('updateLogContext', () => {
+ it('should update context within withLogContext', async () => {
+ await withLogContext({ correlationId: 'initial' }, async () => {
+ updateLogContext({ buildUuid: 'new-build' });
+
+ const context = getLogContext();
+ expect(context.correlationId).toBe('initial');
+ expect(context.buildUuid).toBe('new-build');
+ });
+ });
+
+ it('should not throw when called outside withLogContext', () => {
+ expect(() => {
+ updateLogContext({ buildUuid: 'test' });
+ }).not.toThrow();
+ });
+ });
+
+ describe('extractContextForQueue', () => {
+ it('should extract only queue-relevant fields', async () => {
+ await withLogContext(
+ {
+ correlationId: 'corr-123',
+ buildUuid: 'build-456',
+ deployUuid: 'deploy-789',
+ service: 'my-service',
+ stage: 'webhook.received',
+ repo: 'owner/repo',
+ pr: 42,
+ branch: 'feature-branch',
+ sha: 'abc1234',
+ },
+ async () => {
+ const queueData = extractContextForQueue();
+
+ expect(queueData).toEqual({
+ correlationId: 'corr-123',
+ buildUuid: 'build-456',
+ deployUuid: 'deploy-789',
+ repo: 'owner/repo',
+ pr: 42,
+ branch: 'feature-branch',
+ sha: 'abc1234',
+ });
+
+ expect(queueData).not.toHaveProperty('service');
+ expect(queueData).not.toHaveProperty('stage');
+ }
+ );
+ });
+
+ it('should return undefined values for missing fields', () => {
+ const queueData = extractContextForQueue();
+
+ expect(queueData.correlationId).toBeUndefined();
+ expect(queueData.buildUuid).toBeUndefined();
+ });
+ });
+});
diff --git a/src/server/lib/logger/__tests__/contextLogger.test.ts b/src/server/lib/logger/__tests__/contextLogger.test.ts
new file mode 100644
index 0000000..73d5f60
--- /dev/null
+++ b/src/server/lib/logger/__tests__/contextLogger.test.ts
@@ -0,0 +1,170 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { withLogContext } from '../context';
+
+const mockChild = jest.fn().mockReturnValue({
+ info: jest.fn(),
+ warn: jest.fn(),
+ error: jest.fn(),
+ debug: jest.fn(),
+});
+
+jest.mock('../rootLogger', () => ({
+ __esModule: true,
+ default: {
+ child: (...args: unknown[]) => mockChild(...args),
+ },
+}));
+
+jest.mock('dd-trace', () => ({
+ scope: jest.fn(() => ({
+ active: jest.fn(() => null),
+ })),
+}));
+
+import { getLogger } from '../contextLogger';
+
+describe('contextLogger', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('getLogger', () => {
+ it('should pass AsyncLocalStorage context to logger.child()', async () => {
+ await withLogContext(
+ {
+ correlationId: 'test-corr-id',
+ buildUuid: 'build-123',
+ deployUuid: 'deploy-456',
+ repo: 'owner/repo',
+ pr: 42,
+ branch: 'feature-branch',
+ },
+ async () => {
+ getLogger();
+
+ expect(mockChild).toHaveBeenCalledWith(
+ expect.objectContaining({
+ correlationId: 'test-corr-id',
+ buildUuid: 'build-123',
+ deployUuid: 'deploy-456',
+ repo: 'owner/repo',
+ pr: 42,
+ branch: 'feature-branch',
+ })
+ );
+ }
+ );
+ });
+
+ it('should merge extra params with async context', async () => {
+ await withLogContext(
+ {
+ correlationId: 'test-corr-id',
+ buildUuid: 'build-123',
+ },
+ async () => {
+ getLogger({ stage: 'webhook.received', customField: 'custom-value' });
+
+ expect(mockChild).toHaveBeenCalledWith(
+ expect.objectContaining({
+ correlationId: 'test-corr-id',
+ buildUuid: 'build-123',
+ stage: 'webhook.received',
+ customField: 'custom-value',
+ })
+ );
+ }
+ );
+ });
+
+ it('should allow extra params to override async context stage', async () => {
+ await withLogContext(
+ {
+ correlationId: 'test-corr-id',
+ stage: 'original-stage',
+ },
+ async () => {
+ getLogger({ stage: 'overridden-stage' });
+
+ expect(mockChild).toHaveBeenCalledWith(
+ expect.objectContaining({
+ stage: 'overridden-stage',
+ })
+ );
+ }
+ );
+ });
+
+ it('should filter out undefined values from context', async () => {
+ await withLogContext(
+ {
+ correlationId: 'test-corr-id',
+ },
+ async () => {
+ getLogger();
+
+ const passedContext = mockChild.mock.calls[0][0];
+
+ expect(passedContext).toHaveProperty('correlationId', 'test-corr-id');
+ expect(passedContext).not.toHaveProperty('buildUuid');
+ expect(passedContext).not.toHaveProperty('deployUuid');
+ expect(passedContext).not.toHaveProperty('service');
+ }
+ );
+ });
+
+ it('should work outside of withLogContext with minimal context', () => {
+ getLogger({ stage: 'test-stage' });
+
+ const passedContext = mockChild.mock.calls[0][0];
+
+ expect(passedContext).toHaveProperty('stage', 'test-stage');
+ expect(passedContext).not.toHaveProperty('correlationId');
+ });
+
+ it('should include dd-trace context when span is active', async () => {
+ const tracer = require('dd-trace');
+ tracer.scope.mockReturnValueOnce({
+ active: jest.fn(() => ({
+ context: () => ({
+ toTraceId: () => 'trace-123',
+ toSpanId: () => 'span-456',
+ }),
+ })),
+ });
+
+ getLogger();
+
+ expect(mockChild).toHaveBeenCalledWith(
+ expect.objectContaining({
+ 'dd.trace_id': 'trace-123',
+ 'dd.span_id': 'span-456',
+ })
+ );
+ });
+
+ it('should not include dd-trace context when no span is active', () => {
+ getLogger();
+
+ const passedContext = mockChild.mock.calls[0][0];
+
+ expect(passedContext).not.toHaveProperty('dd.trace_id');
+ expect(passedContext).not.toHaveProperty('dd.span_id');
+ });
+ });
+});
diff --git a/src/server/lib/logger/__tests__/spans.test.ts b/src/server/lib/logger/__tests__/spans.test.ts
new file mode 100644
index 0000000..c6c297b
--- /dev/null
+++ b/src/server/lib/logger/__tests__/spans.test.ts
@@ -0,0 +1,112 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { withSpan } from '../spans';
+import { withLogContext } from '../context';
+
+const mockSetTag = jest.fn();
+const mockSpan = {
+ setTag: mockSetTag,
+};
+
+jest.mock('dd-trace', () => ({
+ trace: jest.fn((_name, _options, fn) => fn(mockSpan)),
+}));
+
+describe('spans', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('withSpan', () => {
+ it('should execute the function and return its result', async () => {
+ const result = await withSpan('test.operation', async () => {
+ return 'test-result';
+ });
+
+ expect(result).toBe('test-result');
+ });
+
+ it('should set success tag on successful completion', async () => {
+ await withSpan('test.operation', async () => {
+ return 'success';
+ });
+
+ expect(mockSetTag).toHaveBeenCalledWith('lifecycle.success', true);
+ });
+
+ it('should set error tags on failure and rethrow', async () => {
+ const testError = new Error('Test error');
+
+ await expect(
+ withSpan('test.operation', async () => {
+ throw testError;
+ })
+ ).rejects.toThrow('Test error');
+
+ expect(mockSetTag).toHaveBeenCalledWith('error', true);
+ expect(mockSetTag).toHaveBeenCalledWith('lifecycle.success', false);
+ expect(mockSetTag).toHaveBeenCalledWith('error.message', 'Test error');
+ });
+
+ it('should include context from AsyncLocalStorage', async () => {
+ const tracer = require('dd-trace');
+
+ await withLogContext(
+ {
+ correlationId: 'corr-123',
+ buildUuid: 'build-456',
+ repo: 'owner/repo',
+ },
+ async () => {
+ await withSpan('test.operation', async () => 'result');
+ }
+ );
+
+ expect(tracer.trace).toHaveBeenCalledWith(
+ 'test.operation',
+ expect.objectContaining({
+ tags: expect.objectContaining({
+ 'lifecycle.correlation_id': 'corr-123',
+ 'lifecycle.build_uuid': 'build-456',
+ 'lifecycle.repo': 'owner/repo',
+ }),
+ }),
+ expect.any(Function)
+ );
+ });
+
+ it('should accept custom resource and tags', async () => {
+ const tracer = require('dd-trace');
+
+ await withSpan('test.operation', async () => 'result', {
+ resource: 'custom-resource',
+ tags: { customTag: 'customValue' },
+ });
+
+ expect(tracer.trace).toHaveBeenCalledWith(
+ 'test.operation',
+ expect.objectContaining({
+ resource: 'custom-resource',
+ tags: expect.objectContaining({
+ customTag: 'customValue',
+ }),
+ }),
+ expect.any(Function)
+ );
+ });
+ });
+});
diff --git a/src/server/lib/logger/__tests__/stages.test.ts b/src/server/lib/logger/__tests__/stages.test.ts
new file mode 100644
index 0000000..d57f0c4
--- /dev/null
+++ b/src/server/lib/logger/__tests__/stages.test.ts
@@ -0,0 +1,118 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { LogStage, LogStageType } from '../stages';
+
+describe('LogStage', () => {
+ it('should export all required webhook stages', () => {
+ const webhookStages = ['WEBHOOK_RECEIVED', 'WEBHOOK_QUEUED', 'WEBHOOK_PROCESSING', 'WEBHOOK_SKIPPED'];
+ for (const stage of webhookStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required build stages', () => {
+ const buildStages = [
+ 'BUILD_CREATED',
+ 'BUILD_QUEUED',
+ 'BUILD_STARTING',
+ 'BUILD_IMAGE_BUILDING',
+ 'BUILD_IMAGE_PUSHING',
+ 'BUILD_COMPLETE',
+ 'BUILD_FAILED',
+ ];
+ for (const stage of buildStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required deploy stages', () => {
+ const deployStages = [
+ 'DEPLOY_QUEUED',
+ 'DEPLOY_STARTING',
+ 'DEPLOY_HELM_INSTALLING',
+ 'DEPLOY_HELM_COMPLETE',
+ 'DEPLOY_COMPLETE',
+ 'DEPLOY_FAILED',
+ ];
+ for (const stage of deployStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required cleanup stages', () => {
+ const cleanupStages = ['CLEANUP_STARTING', 'CLEANUP_COMPLETE', 'CLEANUP_FAILED'];
+ for (const stage of cleanupStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required label stages', () => {
+ const labelStages = ['LABEL_PROCESSING', 'LABEL_COMPLETE', 'LABEL_FAILED'];
+ for (const stage of labelStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required comment stages', () => {
+ const commentStages = ['COMMENT_PROCESSING', 'COMMENT_COMPLETE', 'COMMENT_FAILED'];
+ for (const stage of commentStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required config stages', () => {
+ const configStages = ['CONFIG_REFRESH', 'CONFIG_FAILED'];
+ for (const stage of configStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should export all required ingress stages', () => {
+ const ingressStages = ['INGRESS_PROCESSING', 'INGRESS_COMPLETE', 'INGRESS_FAILED'];
+ for (const stage of ingressStages) {
+ expect(LogStage).toHaveProperty(stage);
+ expect(typeof LogStage[stage as keyof typeof LogStage]).toBe('string');
+ }
+ });
+
+ it('should have stage values following dot-notation convention', () => {
+ const allValues = Object.values(LogStage);
+
+ for (const value of allValues) {
+ expect(value).toMatch(/^[a-z]+\.[a-z.]+$/);
+ }
+ });
+
+ it('should allow LogStageType to accept any LogStage value', () => {
+ const assignStage = (stage: LogStageType): string => stage;
+
+ expect(assignStage(LogStage.WEBHOOK_RECEIVED)).toBe('webhook.received');
+ expect(assignStage(LogStage.BUILD_COMPLETE)).toBe('build.complete');
+ expect(assignStage(LogStage.DEPLOY_FAILED)).toBe('deploy.failed');
+ expect(assignStage(LogStage.LABEL_PROCESSING)).toBe('label.processing');
+ expect(assignStage(LogStage.COMMENT_COMPLETE)).toBe('comment.complete');
+ expect(assignStage(LogStage.CONFIG_REFRESH)).toBe('config.refresh');
+ expect(assignStage(LogStage.INGRESS_COMPLETE)).toBe('ingress.complete');
+ });
+});
diff --git a/src/server/lib/logger/context.ts b/src/server/lib/logger/context.ts
new file mode 100644
index 0000000..0d61f79
--- /dev/null
+++ b/src/server/lib/logger/context.ts
@@ -0,0 +1,102 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const { AsyncLocalStorage } = require('async_hooks') as {
+ AsyncLocalStorage: new () => {
+ getStore(): T | undefined;
+ run(store: T, callback: () => R): R;
+ };
+};
+import type { LogContext, JobDataWithContext } from './types';
+import tracer from 'dd-trace';
+
+const asyncLocalStorage = new AsyncLocalStorage();
+
+export function getLogContext(): Partial {
+ return asyncLocalStorage.getStore() || {};
+}
+
+type ContextWithTrace = Partial & { _ddTraceContext?: Record };
+
+export function withLogContext(context: ContextWithTrace, fn: () => T | Promise): T | Promise {
+ const parentContext = getLogContext();
+ const mergedContext: LogContext = {
+ ...parentContext,
+ ...context,
+ correlationId: context.correlationId || parentContext.correlationId || 'unknown',
+ };
+
+ const runWithContext = () => asyncLocalStorage.run(mergedContext, fn);
+
+ if (
+ context._ddTraceContext &&
+ Object.keys(context._ddTraceContext).length > 0 &&
+ typeof tracer?.scope === 'function'
+ ) {
+ const parentSpanContext = tracer.extract('text_map', context._ddTraceContext);
+ if (parentSpanContext) {
+ const span = tracer.startSpan('queue.process', { childOf: parentSpanContext });
+ span.setTag('correlationId', mergedContext.correlationId);
+ if (mergedContext.buildUuid) span.setTag('buildUuid', mergedContext.buildUuid);
+ if (mergedContext.deployUuid) span.setTag('deployUuid', mergedContext.deployUuid);
+
+ return tracer.scope().activate(span, () => {
+ const result = runWithContext();
+ if (result instanceof Promise) {
+ return result.finally(() => span.finish()) as T | Promise;
+ }
+ span.finish();
+ return result;
+ });
+ }
+ }
+
+ return runWithContext();
+}
+
+export function updateLogContext(updates: Partial): void {
+ const current = asyncLocalStorage.getStore();
+ if (current) {
+ Object.assign(current, updates);
+ }
+}
+
+export function extractContextForQueue(): JobDataWithContext {
+ const ctx = getLogContext();
+
+ let traceContext: Record | undefined;
+ if (typeof tracer?.scope === 'function') {
+ const activeSpan = tracer.scope().active();
+ if (activeSpan) {
+ traceContext = {};
+ tracer.inject(activeSpan, 'text_map', traceContext);
+ }
+ }
+
+ return {
+ correlationId: ctx.correlationId,
+ buildUuid: ctx.buildUuid,
+ deployUuid: ctx.deployUuid,
+ serviceName: ctx.serviceName,
+ sender: ctx.sender,
+ repo: ctx.repo,
+ pr: ctx.pr,
+ branch: ctx.branch,
+ sha: ctx.sha,
+ _ddTraceContext: traceContext,
+ };
+}
diff --git a/src/server/lib/logger/contextLogger.ts b/src/server/lib/logger/contextLogger.ts
new file mode 100644
index 0000000..065be4d
--- /dev/null
+++ b/src/server/lib/logger/contextLogger.ts
@@ -0,0 +1,55 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import tracer from 'dd-trace';
+import rootLogger from './rootLogger';
+import { getLogContext } from './context';
+import type { LogContext } from './types';
+
+function getTraceContext(): { traceId?: string; spanId?: string } {
+ if (typeof tracer?.scope !== 'function') return {};
+ const span = tracer.scope()?.active();
+ if (!span) return {};
+ const context = span.context();
+ return {
+ traceId: context.toTraceId(),
+ spanId: context.toSpanId(),
+ };
+}
+
+export function getLogger(extra?: Partial & Record) {
+ const asyncContext = getLogContext();
+ const traceContext = getTraceContext();
+
+ const fullContext: Record = {
+ correlationId: asyncContext.correlationId,
+ buildUuid: asyncContext.buildUuid,
+ deployUuid: asyncContext.deployUuid,
+ serviceName: asyncContext.serviceName,
+ sender: asyncContext.sender,
+ stage: extra?.stage || asyncContext.stage,
+ repo: asyncContext.repo,
+ pr: asyncContext.pr,
+ branch: asyncContext.branch,
+ 'dd.trace_id': traceContext.traceId,
+ 'dd.span_id': traceContext.spanId,
+ ...extra,
+ };
+
+ const cleanContext = Object.fromEntries(Object.entries(fullContext).filter(([_, v]) => v !== undefined));
+
+ return rootLogger.child(cleanContext);
+}
diff --git a/src/server/lib/logger/index.ts b/src/server/lib/logger/index.ts
new file mode 100644
index 0000000..774291e
--- /dev/null
+++ b/src/server/lib/logger/index.ts
@@ -0,0 +1,23 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export { default as rootLogger } from './rootLogger';
+export { getLogContext, withLogContext, updateLogContext, extractContextForQueue } from './context';
+export { getLogger } from './contextLogger';
+export { withSpan } from './spans';
+export { LogStage } from './stages';
+export type { LogContext, JobDataWithContext } from './types';
+export type { LogStageType } from './stages';
diff --git a/src/server/lib/logger.ts b/src/server/lib/logger/rootLogger.ts
similarity index 62%
rename from src/server/lib/logger.ts
rename to src/server/lib/logger/rootLogger.ts
index fca83df..45b51da 100644
--- a/src/server/lib/logger.ts
+++ b/src/server/lib/logger/rootLogger.ts
@@ -16,7 +16,7 @@
import pino from 'pino';
import pinoCaller from 'pino-caller';
-import { LOG_LEVEL } from '../../shared/config';
+import { LOG_LEVEL } from '../../../shared/config';
export const enabled = process.env.PINO_LOGGER === 'false' ? false : true;
export const level = LOG_LEVEL || 'info';
@@ -31,9 +31,32 @@ const transport = {
},
};
+const serializers = {
+ error: (value: unknown): Record | string => {
+ if (value instanceof Error) {
+ return {
+ type: value.name,
+ message: value.message,
+ stack: value.stack,
+ ...((value as any).code && { code: (value as any).code }),
+ ...((value as any).statusCode && { statusCode: (value as any).statusCode }),
+ };
+ }
+ if (typeof value === 'object' && value !== null) {
+ try {
+ return JSON.stringify(value);
+ } catch {
+ return '[Unserializable Object]';
+ }
+ }
+ return String(value);
+ },
+};
+
let rootLogger = pino({
level,
enabled,
+ serializers,
...(pinoPretty ? transport : {}),
});
diff --git a/src/server/lib/logger/spans.ts b/src/server/lib/logger/spans.ts
new file mode 100644
index 0000000..20d7623
--- /dev/null
+++ b/src/server/lib/logger/spans.ts
@@ -0,0 +1,58 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import tracer from 'dd-trace';
+import { getLogContext } from './context';
+
+export interface SpanOptions {
+ resource?: string;
+ tags?: Record;
+}
+
+export async function withSpan(operationName: string, fn: () => Promise, options: SpanOptions = {}): Promise {
+ if (typeof tracer?.trace !== 'function') {
+ return fn();
+ }
+
+ const context = getLogContext();
+
+ return tracer.trace(
+ operationName,
+ {
+ resource: options.resource,
+ tags: {
+ 'lifecycle.correlation_id': context.correlationId,
+ 'lifecycle.build_uuid': context.buildUuid,
+ 'lifecycle.deploy_uuid': context.deployUuid,
+ 'lifecycle.repo': context.repo,
+ 'lifecycle.pr': context.pr,
+ ...options.tags,
+ },
+ },
+ async (span) => {
+ try {
+ const result = await fn();
+ span?.setTag('lifecycle.success', true);
+ return result;
+ } catch (error) {
+ span?.setTag('error', true);
+ span?.setTag('lifecycle.success', false);
+ span?.setTag('error.message', error instanceof Error ? error.message : String(error));
+ throw error;
+ }
+ }
+ );
+}
diff --git a/src/server/lib/logger/stages.ts b/src/server/lib/logger/stages.ts
new file mode 100644
index 0000000..6491d59
--- /dev/null
+++ b/src/server/lib/logger/stages.ts
@@ -0,0 +1,59 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export const LogStage = {
+ WEBHOOK_RECEIVED: 'webhook.received',
+ WEBHOOK_QUEUED: 'webhook.queued',
+ WEBHOOK_PROCESSING: 'webhook.processing',
+ WEBHOOK_COMPLETE: 'webhook.complete',
+ WEBHOOK_SKIPPED: 'webhook.skipped',
+
+ BUILD_CREATED: 'build.created',
+ BUILD_QUEUED: 'build.queued',
+ BUILD_STARTING: 'build.starting',
+ BUILD_IMAGE_BUILDING: 'build.image.building',
+ BUILD_IMAGE_PUSHING: 'build.image.pushing',
+ BUILD_COMPLETE: 'build.complete',
+ BUILD_FAILED: 'build.failed',
+
+ DEPLOY_QUEUED: 'deploy.queued',
+ DEPLOY_STARTING: 'deploy.starting',
+ DEPLOY_HELM_INSTALLING: 'deploy.helm.installing',
+ DEPLOY_HELM_COMPLETE: 'deploy.helm.complete',
+ DEPLOY_COMPLETE: 'deploy.complete',
+ DEPLOY_FAILED: 'deploy.failed',
+
+ CLEANUP_STARTING: 'cleanup.starting',
+ CLEANUP_COMPLETE: 'cleanup.complete',
+ CLEANUP_FAILED: 'cleanup.failed',
+
+ LABEL_PROCESSING: 'label.processing',
+ LABEL_COMPLETE: 'label.complete',
+ LABEL_FAILED: 'label.failed',
+
+ COMMENT_PROCESSING: 'comment.processing',
+ COMMENT_COMPLETE: 'comment.complete',
+ COMMENT_FAILED: 'comment.failed',
+
+ CONFIG_REFRESH: 'config.refresh',
+ CONFIG_FAILED: 'config.failed',
+
+ INGRESS_PROCESSING: 'ingress.processing',
+ INGRESS_COMPLETE: 'ingress.complete',
+ INGRESS_FAILED: 'ingress.failed',
+} as const;
+
+export type LogStageType = (typeof LogStage)[keyof typeof LogStage];
diff --git a/src/server/lib/logger/types.ts b/src/server/lib/logger/types.ts
new file mode 100644
index 0000000..936ee18
--- /dev/null
+++ b/src/server/lib/logger/types.ts
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2025 GoodRx, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export interface LogContext {
+ correlationId: string;
+ buildUuid?: string;
+ deployUuid?: string;
+ serviceName?: string;
+ sender?: string;
+ stage?: string;
+ repo?: string;
+ pr?: number;
+ branch?: string;
+ sha?: string;
+}
+
+export interface JobDataWithContext {
+ correlationId?: string;
+ buildUuid?: string;
+ deployUuid?: string;
+ serviceName?: string;
+ sender?: string;
+ repo?: string;
+ pr?: number;
+ branch?: string;
+ sha?: string;
+ _ddTraceContext?: Record;
+}
diff --git a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts
index 16d0f80..84afd93 100644
--- a/src/server/lib/nativeBuild/__tests__/buildkit.test.ts
+++ b/src/server/lib/nativeBuild/__tests__/buildkit.test.ts
@@ -57,8 +57,7 @@ jest.mock('../../logger', () => {
})),
};
return {
- __esModule: true,
- default: mockLogger,
+ getLogger: jest.fn(() => mockLogger),
};
});
diff --git a/src/server/lib/nativeBuild/engines.ts b/src/server/lib/nativeBuild/engines.ts
index e752590..5816776 100644
--- a/src/server/lib/nativeBuild/engines.ts
+++ b/src/server/lib/nativeBuild/engines.ts
@@ -16,7 +16,7 @@
import { Deploy } from '../../models';
import { shellPromise } from '../shell';
-import logger from '../logger';
+import { getLogger } from '../logger';
import GlobalConfigService from '../../services/globalConfig';
import {
waitForJobAndGetLogs,
@@ -256,11 +256,7 @@ export async function buildWithEngine(
const jobName = `${options.deployUuid}-build-${jobId}-${shortSha}`.substring(0, 63);
const contextPath = `/workspace/repo-${shortRepoName}`;
- logger.info(
- `[${engine.name}] Building image(s) for ${options.deployUuid}: dockerfilePath=${
- options.dockerfilePath
- }, initDockerfilePath=${options.initDockerfilePath || 'none'}, repo=${options.repo}`
- );
+ getLogger().debug(`Build: preparing ${engine.name} job dockerfile=${options.dockerfilePath}`);
const githubToken = await getGitHubToken();
const gitUsername = 'x-access-token';
@@ -360,7 +356,7 @@ export async function buildWithEngine(
options.ecrDomain
)
);
- logger.info(`[${engine.name}] Job ${jobName} will build both main and init images in parallel`);
+ getLogger().debug('Build: including init image');
}
await deploy.$fetchGraph('build');
@@ -394,16 +390,16 @@ export async function buildWithEngine(
});
const jobYaml = yaml.dump(job, { quotingType: '"', forceQuotes: true });
- const applyResult = await shellPromise(`cat <<'EOF' | kubectl apply -f -
+ await shellPromise(`cat <<'EOF' | kubectl apply -f -
${jobYaml}
EOF`);
- logger.info(`Created ${engineName} job ${jobName} in namespace ${options.namespace}`, { applyResult });
+ getLogger().debug(`Job: created ${jobName}`);
try {
const { logs, success } = await waitForJobAndGetLogs(jobName, options.namespace, jobTimeout);
return { success, logs, jobName };
} catch (error) {
- logger.error(`Error getting logs for ${engineName} job ${jobName}`, { error });
+ getLogger({ error }).error(`Job: log retrieval failed name=${jobName}`);
try {
const jobStatus = await shellPromise(
@@ -412,11 +408,11 @@ EOF`);
const jobSucceeded = jobStatus.trim() === 'True';
if (jobSucceeded) {
- logger.info(`Job ${jobName} completed successfully despite log retrieval error`);
+ getLogger().debug(`Job: completed (logs unavailable) job=${jobName}`);
return { success: true, logs: 'Log retrieval failed but job completed successfully', jobName };
}
} catch (statusError) {
- logger.error(`Failed to check job status for ${jobName}`, { statusError });
+ getLogger({ error: statusError }).error(`Job: status check failed name=${jobName}`);
}
return { success: false, logs: `Build failed: ${error.message}`, jobName };
diff --git a/src/server/lib/nativeBuild/index.ts b/src/server/lib/nativeBuild/index.ts
index a5508ed..46027d1 100644
--- a/src/server/lib/nativeBuild/index.ts
+++ b/src/server/lib/nativeBuild/index.ts
@@ -15,7 +15,7 @@
*/
import { Deploy } from '../../models';
-import logger from '../logger';
+import { getLogger, withSpan, withLogContext } from '../logger';
import { ensureNamespaceExists } from './utils';
import { buildWithEngine, NativeBuildOptions } from './engines';
import { ensureServiceAccountForJob } from '../kubernetes/common/serviceAccount';
@@ -29,47 +29,51 @@ export interface NativeBuildResult {
}
export async function buildWithNative(deploy: Deploy, options: NativeBuildOptions): Promise {
- const startTime = Date.now();
- logger.info(`[Native Build] Starting build for ${options.deployUuid} in namespace ${options.namespace}`);
+ return withLogContext({ deployUuid: options.deployUuid, serviceName: deploy.deployable?.name }, async () => {
+ return withSpan(
+ 'lifecycle.build.image',
+ async () => {
+ const startTime = Date.now();
+ getLogger().info('Build: starting (native)');
- try {
- await ensureNamespaceExists(options.namespace);
+ try {
+ await ensureNamespaceExists(options.namespace);
- const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build');
+ const serviceAccountName = await ensureServiceAccountForJob(options.namespace, 'build');
- const buildOptions = {
- ...options,
- serviceAccount: serviceAccountName,
- };
+ const buildOptions = {
+ ...options,
+ serviceAccount: serviceAccountName,
+ };
- await deploy.$fetchGraph('[deployable]');
- const builderEngine = deploy.deployable?.builder?.engine;
+ await deploy.$fetchGraph('[deployable]');
+ const builderEngine = deploy.deployable?.builder?.engine;
- let result: NativeBuildResult;
+ let result: NativeBuildResult;
- if (builderEngine === 'buildkit' || builderEngine === 'kaniko') {
- logger.info(`[Native Build] Using ${builderEngine} engine for ${options.deployUuid}`);
- result = await buildWithEngine(deploy, buildOptions, builderEngine);
- } else {
- throw new Error(`Unsupported builder engine: ${builderEngine}`);
- }
+ if (builderEngine === 'buildkit' || builderEngine === 'kaniko') {
+ getLogger().debug(`Build: using ${builderEngine} engine`);
+ result = await buildWithEngine(deploy, buildOptions, builderEngine);
+ } else {
+ throw new Error(`Unsupported builder engine: ${builderEngine}`);
+ }
- const duration = Date.now() - startTime;
- logger.info(
- `[Native Build] Build completed for ${options.deployUuid}: jobName=${result.jobName}, success=${result.success}, duration=${duration}ms, namespace=${options.namespace}`
- );
+ const duration = Date.now() - startTime;
+ getLogger().info(`Build: completed success=${result.success} duration=${duration}ms`);
- return result;
- } catch (error) {
- const duration = Date.now() - startTime;
- logger.error(
- `[Native Build] Build failed for ${options.deployUuid}: error=${error.message}, duration=${duration}ms, namespace=${options.namespace}`
- );
+ return result;
+ } catch (error) {
+ const duration = Date.now() - startTime;
+ getLogger().error({ error }, `Build: failed duration=${duration}ms`);
- return {
- success: false,
- logs: `Build error: ${error.message}`,
- jobName: '',
- };
- }
+ return {
+ success: false,
+ logs: `Build error: ${error.message}`,
+ jobName: '',
+ };
+ }
+ },
+ { resource: options.deployUuid }
+ );
+ });
}
diff --git a/src/server/lib/nativeBuild/utils.ts b/src/server/lib/nativeBuild/utils.ts
index 1ce9712..8b104a7 100644
--- a/src/server/lib/nativeBuild/utils.ts
+++ b/src/server/lib/nativeBuild/utils.ts
@@ -16,7 +16,7 @@
import { V1Job } from '@kubernetes/client-node';
import { shellPromise } from '../shell';
-import logger from '../logger';
+import { getLogger } from '../logger';
import * as k8s from '@kubernetes/client-node';
import GlobalConfigService from '../../services/globalConfig';
import { createBuildJob } from '../kubernetes/jobFactory';
@@ -30,10 +30,10 @@ export async function ensureNamespaceExists(namespace: string): Promise {
try {
await coreV1Api.readNamespace(namespace);
- logger.info(`Namespace ${namespace} already exists`);
+ getLogger().debug('Namespace: exists');
} catch (error) {
if (error?.response?.statusCode === 404) {
- logger.info(`Creating namespace ${namespace}`);
+ getLogger().debug('Namespace: creating');
await coreV1Api.createNamespace({
metadata: {
name: namespace,
diff --git a/src/server/lib/nativeHelm/helm.ts b/src/server/lib/nativeHelm/helm.ts
index 8a6f1ac..0aaddc2 100644
--- a/src/server/lib/nativeHelm/helm.ts
+++ b/src/server/lib/nativeHelm/helm.ts
@@ -18,7 +18,7 @@ import yaml from 'js-yaml';
import fs from 'fs';
import Deploy from 'server/models/Deploy';
import GlobalConfigService from 'server/services/globalConfig';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withSpan, withLogContext } from 'server/lib/logger';
import { shellPromise } from 'server/lib/shell';
import { randomAlphanumeric } from 'server/lib/random';
import { nanoid } from 'nanoid';
@@ -53,10 +53,6 @@ import {
import { createHelmJob as createHelmJobFromFactory } from 'server/lib/kubernetes/jobFactory';
import { ensureServiceAccountForJob } from 'server/lib/kubernetes/common/serviceAccount';
-const logger = rootLogger.child({
- filename: 'lib/nativeHelm/helm.ts',
-});
-
export interface JobResult {
completed: boolean;
logs: string;
@@ -250,10 +246,10 @@ export async function shouldUseNativeHelm(deploy: Deploy): Promise {
}
export async function deployNativeHelm(deploy: Deploy): Promise {
- logger.info(`[HELM ${deploy.uuid}] Starting native helm deployment`);
-
const { deployable, build } = deploy;
+ getLogger().info('Helm: deploying method=native');
+
if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) {
await applyHttpScaleObjectManifestYaml(deploy, build.namespace);
await applyExternalServiceManifestYaml(deploy, build.namespace);
@@ -279,7 +275,12 @@ export async function deployNativeHelm(deploy: Deploy): Promise {
await patchIngress(deploy.uuid, ingressBannerSnippet(deploy), build.namespace);
}
} catch (error) {
- logger.warn(`[DEPLOY ${deploy.uuid}] Unable to patch ingress: ${error}`);
+ getLogger().warn(
+ {
+ error,
+ },
+ 'Unable to patch ingress'
+ );
}
if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) {
@@ -310,7 +311,7 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService,
const deployPipelineId = getCodefreshPipelineIdFromOutput(output);
const statusMessage = 'Starting deployment via Helm';
- logger.info(`[DEPLOY ${deploy.uuid}] Deploying via codefresh build: ${deployPipelineId}`);
+ getLogger().info(`Helm: deploying method=codefresh`);
await deployService.patchAndUpdateActivityFeed(
deploy,
@@ -330,7 +331,12 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService,
await patchIngress(deploy.uuid, ingressBannerSnippet(deploy), build.namespace);
}
} catch (error) {
- logger.warn(`[DEPLOY ${deploy.uuid}] Unable to patch ingress: ${error}`);
+ getLogger().warn(
+ {
+ error,
+ },
+ 'Unable to patch ingress'
+ );
}
if (deploy?.kedaScaleToZero?.type === 'http' && !build.isStatic) {
@@ -345,61 +351,69 @@ async function deployCodefreshHelm(deploy: Deploy, deployService: DeployService,
}
export async function deployHelm(deploys: Deploy[]): Promise {
- logger.info(`[DEPLOY ${deploys.map((d) => d.uuid).join(', ')}] Deploying with helm`);
-
if (deploys?.length === 0) return;
+ getLogger().info(`Helm: deploying services=${deploys.map((d) => d.deployable?.name || d.uuid).join(',')}`);
+
await Promise.all(
deploys.map(async (deploy) => {
- const startTime = Date.now();
- const runUUID = deploy.runUUID ?? nanoid();
- const deployService = new DeployService();
-
- try {
- const useNative = await shouldUseNativeHelm(deploy);
- const method = useNative ? 'Native Helm' : 'Codefresh Helm';
-
- logger.info(`[DEPLOY ${deploy.uuid}] Using ${method} deployment`);
-
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.DEPLOYING,
- statusMessage: `Deploying via ${method}`,
- },
- runUUID
- );
-
- if (useNative) {
- await deployNativeHelm(deploy);
- } else {
- await deployCodefreshHelm(deploy, deployService, runUUID);
- }
-
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.READY,
- statusMessage: `Successfully deployed via ${method}`,
+ return withLogContext({ deployUuid: deploy.uuid, serviceName: deploy.deployable?.name }, async () => {
+ return withSpan(
+ 'lifecycle.helm.deploy',
+ async () => {
+ const startTime = Date.now();
+ const runUUID = deploy.runUUID ?? nanoid();
+ const deployService = new DeployService();
+
+ try {
+ const useNative = await shouldUseNativeHelm(deploy);
+ const method = useNative ? 'Native Helm' : 'Codefresh Helm';
+
+ getLogger().debug(`Using ${method}`);
+
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.DEPLOYING,
+ statusMessage: `Deploying via ${method}`,
+ },
+ runUUID
+ );
+
+ if (useNative) {
+ await deployNativeHelm(deploy);
+ } else {
+ await deployCodefreshHelm(deploy, deployService, runUUID);
+ }
+
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.READY,
+ statusMessage: `Successfully deployed via ${method}`,
+ },
+ runUUID
+ );
+
+ await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime);
+ } catch (error) {
+ await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message);
+
+ await deployService.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.DEPLOY_FAILED,
+ statusMessage: `Helm deployment failed: ${error.message}`,
+ },
+ runUUID
+ );
+
+ throw error;
+ }
},
- runUUID
+ { resource: deploy.uuid, tags: { 'deploy.uuid': deploy.uuid } }
);
-
- await trackHelmDeploymentMetrics(deploy, 'success', Date.now() - startTime);
- } catch (error) {
- await trackHelmDeploymentMetrics(deploy, 'failure', Date.now() - startTime, error.message);
-
- await deployService.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.DEPLOY_FAILED,
- statusMessage: `Helm deployment failed: ${error.message}`,
- },
- runUUID
- );
-
- throw error;
- }
+ });
})
);
}
diff --git a/src/server/lib/nativeHelm/utils.ts b/src/server/lib/nativeHelm/utils.ts
index 5791320..7ef29f1 100644
--- a/src/server/lib/nativeHelm/utils.ts
+++ b/src/server/lib/nativeHelm/utils.ts
@@ -26,14 +26,10 @@ import {
setupDeployServiceAccountInNamespace,
} from 'server/lib/kubernetes/rbac';
import { HelmConfigBuilder } from 'server/lib/config/ConfigBuilder';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { shellPromise } from 'server/lib/shell';
import { normalizeKubernetesLabelValue } from 'server/lib/kubernetes/utils';
-const logger = rootLogger.child({
- filename: 'lib/nativeHelm/utils.ts',
-});
-
export interface HelmReleaseState {
status: 'deployed' | 'pending-install' | 'pending-upgrade' | 'pending-rollback' | 'failed' | 'unknown';
revision: number;
@@ -54,7 +50,7 @@ export async function getHelmReleaseStatus(releaseName: string, namespace: strin
if (error.message?.includes('release: not found')) {
return null;
}
- logger.warn(`[HELM] Failed to get status for release ${releaseName}: ${error.message}`);
+ getLogger().warn({ error }, `Helm: release status fetch failed name=${releaseName}`);
return null;
}
}
@@ -67,14 +63,14 @@ export async function isReleaseBlocked(releaseState: HelmReleaseState | null): P
}
export async function uninstallHelmRelease(releaseName: string, namespace: string): Promise {
- logger.info(`[HELM] Uninstalling release ${releaseName} in namespace ${namespace}`);
+ getLogger().debug(`Helm: uninstalling release namespace=${namespace}`);
try {
await shellPromise(`helm uninstall ${releaseName} -n ${namespace} --wait --timeout 5m`);
- logger.info(`[HELM] Successfully uninstalled release ${releaseName}`);
+ getLogger().debug('Helm: release uninstalled');
} catch (error) {
if (error.message?.includes('release: not found')) {
- logger.info(`[HELM] Release ${releaseName} not found, nothing to uninstall`);
+ getLogger().debug('Helm: release not found, skipping uninstall');
return;
}
throw error;
@@ -82,7 +78,8 @@ export async function uninstallHelmRelease(releaseName: string, namespace: strin
}
export async function killHelmJobsAndPods(releaseName: string, namespace: string): Promise {
- logger.info(`[HELM ${releaseName}] Checking for existing helm jobs`);
+ const log = getLogger();
+ log.debug('Helm: checking existing jobs');
try {
const existingJobs = await shellPromise(
@@ -91,7 +88,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string
const jobsData = JSON.parse(existingJobs);
if (jobsData.items && jobsData.items.length > 0) {
- logger.warn(`[HELM ${releaseName}] Found ${jobsData.items.length} existing job(s), terminating`);
+ log.warn(`Found ${jobsData.items.length} existing job(s), terminating`);
for (const job of jobsData.items) {
const jobName = job.metadata.name;
@@ -104,7 +101,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string
`--overwrite`
);
} catch (annotateError) {
- logger.warn(`[HELM ${releaseName}] Failed to annotate job ${jobName}: ${annotateError.message}`);
+ log.warn({ error: annotateError }, `Failed to annotate job: jobName=${jobName}`);
}
const podsOutput = await shellPromise(`kubectl get pods -n ${namespace} -l job-name=${jobName} -o json`);
@@ -116,7 +113,7 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string
try {
await shellPromise(`kubectl delete pod ${podName} -n ${namespace} --force --grace-period=0`);
} catch (podError) {
- logger.warn(`[HELM ${releaseName}] Failed to delete pod ${podName}: ${podError.message}`);
+ log.warn({ error: podError }, `Failed to delete pod: podName=${podName}`);
}
}
}
@@ -124,17 +121,18 @@ export async function killHelmJobsAndPods(releaseName: string, namespace: string
try {
await shellPromise(`kubectl delete job ${jobName} -n ${namespace} --force --grace-period=0`);
} catch (jobError) {
- logger.warn(`[HELM ${releaseName}] Failed to delete job ${jobName}: ${jobError.message}`);
+ log.warn({ error: jobError }, `Failed to delete job: jobName=${jobName}`);
}
}
}
} catch (error) {
- logger.warn(`[HELM ${releaseName}] Error checking for existing jobs: ${error.message}`);
+ log.warn({ error }, 'Error checking for existing jobs');
}
}
export async function resolveHelmReleaseConflicts(releaseName: string, namespace: string): Promise {
- logger.info(`[HELM ${releaseName}] Resolving release conflicts`);
+ const log = getLogger();
+ log.debug('Helm: resolving conflicts');
await killHelmJobsAndPods(releaseName, namespace);
@@ -147,7 +145,7 @@ export async function resolveHelmReleaseConflicts(releaseName: string, namespace
}
if (await isReleaseBlocked(releaseState)) {
- logger.warn(`[HELM ${releaseName}] Release blocked (${releaseState.status}), uninstalling`);
+ log.warn(`Release blocked: status=${releaseState.status}, uninstalling`);
await uninstallHelmRelease(releaseName, namespace);
@@ -177,7 +175,7 @@ export async function checkIfJobWasSuperseded(jobName: string, namespace: string
return annotations === 'superseded-by-retry';
} catch (error) {
- logger.debug(`Could not check job supersession status for ${jobName}: ${error.message}`);
+ getLogger().debug({ error }, `Helm: job supersession check failed jobName=${jobName}`);
return false;
}
}
@@ -442,7 +440,7 @@ export async function setupServiceAccountInNamespace(
): Promise {
await createServiceAccountUsingExistingFunction(namespace, serviceAccountName, role);
await setupDeployServiceAccountInNamespace(namespace, serviceAccountName, role);
- logger.info(`[RBAC] Setup complete for '${serviceAccountName}' in ${namespace}`);
+ getLogger().debug(`RBAC: configured serviceAccount=${serviceAccountName} namespace=${namespace}`);
}
export async function createNamespacedRoleAndBinding(namespace: string, serviceAccountName: string): Promise {
@@ -499,8 +497,10 @@ export async function createNamespacedRoleAndBinding(namespace: string, serviceA
},
};
+ const log = getLogger();
+
try {
- logger.info(`[NS ${namespace}] Creating Role and RoleBinding for: ${serviceAccountName}`);
+ log.debug(`RBAC: creating role and binding namespace=${namespace} serviceAccount=${serviceAccountName}`);
try {
await rbacApi.readNamespacedRole(roleName, namespace);
@@ -528,24 +528,24 @@ export async function createNamespacedRoleAndBinding(namespace: string, serviceA
await rbacApi.readNamespacedRole(roleName, namespace);
await rbacApi.readNamespacedRoleBinding(roleBindingName, namespace);
} catch (verifyError) {
- logger.error(`[NS ${namespace}] Failed to verify RBAC resources:`, verifyError.message);
+ log.error({ error: verifyError }, `Failed to verify RBAC resources: namespace=${namespace}`);
}
} catch (error) {
- logger.warn(error);
- logger.error(`[NS ${namespace}] Error creating namespace-scoped RBAC:`, {
- error,
- statusCode: error?.response?.statusCode,
- statusMessage: error?.response?.statusMessage,
- body: error?.response?.body,
- serviceAccountName,
- namespace,
- roleName,
- roleBindingName,
- });
-
- logger.warn(
- `[NS ${namespace}] ⚠️ RBAC setup failed, helm deployment may have permission issues. Consider updating lifecycle-app service account permissions to allow Role/RoleBinding creation.`
+ log.warn({ error }, `Error creating namespace-scoped RBAC: namespace=${namespace}`);
+ log.error(
+ {
+ error,
+ statusCode: error?.response?.statusCode,
+ statusMessage: error?.response?.statusMessage,
+ serviceAccountName,
+ namespace,
+ roleName,
+ roleBindingName,
+ },
+ `RBAC creation failed: namespace=${namespace}`
);
+
+ log.warn(`RBAC setup failed, helm deployment may have permission issues: namespace=${namespace}`);
}
}
diff --git a/src/server/lib/queueManager.ts b/src/server/lib/queueManager.ts
index f541d53..6be81f3 100644
--- a/src/server/lib/queueManager.ts
+++ b/src/server/lib/queueManager.ts
@@ -16,11 +16,7 @@
import { Queue, Worker, QueueOptions, WorkerOptions, Processor } from 'bullmq';
import { Redis } from 'ioredis';
-import rootLogger from './logger';
-
-const logger = rootLogger.child({
- filename: 'lib/queueManager.ts',
-});
+import { getLogger } from 'server/lib/logger';
interface RegisteredQueue {
queue: Queue;
@@ -52,7 +48,7 @@ export default class QueueManager {
return existing.queue;
}
- logger.debug(`Registering queue ${queueName}`);
+ getLogger().debug(`Registering queue: queueName=${queueName}`);
const queue = new Queue(queueName, {
connection: options.connection.duplicate ? options.connection.duplicate() : options.connection,
@@ -76,7 +72,7 @@ export default class QueueManager {
};
}
): Worker {
- logger.debug(`Registering worker for queue ${queueName}`);
+ getLogger().debug(`Registering worker: queueName=${queueName}`);
const workerConnection = options.connection.duplicate ? options.connection.duplicate() : options.connection;
// ensure maxRetriesPerRequest is null for workers
@@ -109,23 +105,23 @@ export default class QueueManager {
public async emptyAndCloseAllQueues(): Promise {
for (const { queue, worker } of this.registeredQueues) {
if (worker) {
- logger.debug(`Closing worker for queue: ${worker.name}`);
+ getLogger().debug(`Closing worker: queueName=${worker.name}`);
try {
await worker.close();
} catch (error) {
- logger.warn(`⚠️ Error closing worker for queue ${worker.name}:`, error.message);
+ getLogger().warn({ error: error.message }, `Queue: worker close failed name=${worker.name}`);
}
}
if (queue) {
- logger.debug(`Closing queue: ${queue.name}`);
+ getLogger().debug(`Closing queue: queueName=${queue.name}`);
try {
await queue.close();
} catch (error) {
- logger.warn(`⚠️ Error closing queue ${queue.name}:`, error.message);
+ getLogger().warn({ error: error.message }, `Queue: close failed name=${queue.name}`);
}
}
}
- logger.info('✅ All queues have been closed successfully.');
+ getLogger().info('Queue: closed');
}
}
diff --git a/src/server/lib/redisClient.ts b/src/server/lib/redisClient.ts
index 560b961..13448b8 100644
--- a/src/server/lib/redisClient.ts
+++ b/src/server/lib/redisClient.ts
@@ -17,11 +17,7 @@
import Redis from 'ioredis';
import Redlock from 'redlock';
import { REDIS_URL, APP_REDIS_HOST, APP_REDIS_PORT, APP_REDIS_PASSWORD, APP_REDIS_TLS } from 'shared/config';
-import rootLogger from './logger';
-
-const logger = rootLogger.child({
- filename: 'lib/redisClient.ts',
-});
+import { getLogger } from 'server/lib/logger';
export class RedisClient {
private static instance: RedisClient;
@@ -97,9 +93,9 @@ export class RedisClient {
public async close(): Promise {
try {
await Promise.all([this.redis.quit(), this.subscriber.quit(), this.bullConn.quit()]);
- logger.info(' ✅All Redis connections closed successfully.');
+ getLogger().info('Redis: closed');
} catch (error) {
- logger.warn(' ⚠️Error closing Redis connections. Forcing disconnect.', error);
+ getLogger().warn({ error }, 'Redis: close failed forcing=true');
this.redis.disconnect();
this.subscriber.disconnect();
this.bullConn.disconnect();
diff --git a/src/server/lib/response.ts b/src/server/lib/response.ts
index 25b2132..30b3c6d 100644
--- a/src/server/lib/response.ts
+++ b/src/server/lib/response.ts
@@ -16,11 +16,7 @@
import { NextRequest, NextResponse } from 'next/server';
import { PaginationMetadata } from './paginate';
-import rootLogger from './logger';
-
-const logger = rootLogger.child({
- filename: 'server/lib/response.ts',
-});
+import { getLogger } from 'server/lib/logger';
interface Metadata {
pagination?: PaginationMetadata;
@@ -79,7 +75,7 @@ export function errorResponse(error: unknown, options: ErrorResponseOptions, req
errorStack = error.stack || '';
}
- logger.error(`API Error: ${errorMessage}`, { stack: errorStack });
+ getLogger().error({ error, stack: errorStack }, `API: error message=${errorMessage}`);
const { status } = options;
diff --git a/src/server/lib/shell.ts b/src/server/lib/shell.ts
index 0669f1a..ec2386f 100644
--- a/src/server/lib/shell.ts
+++ b/src/server/lib/shell.ts
@@ -14,13 +14,9 @@
* limitations under the License.
*/
-import rootLogger from './logger';
+import { getLogger } from 'server/lib/logger';
import shell, { ExecOptions } from 'shelljs';
-const logger = rootLogger.child({
- filename: 'lib/shell.ts',
-});
-
interface Options extends ExecOptions {
debug?: boolean;
}
@@ -39,7 +35,7 @@ export async function shellPromise(cmd: string, options: Options = {}): Promise<
shell.exec(cmd, opts, (code, stdout, stderr) => {
if (code !== 0) {
if (stderr.length > 0) {
- logger.debug(`Shell command failed: ${cmd} => ${stderr}`);
+ getLogger().debug(`Shell command failed: cmd=${cmd} stderr=${stderr}`);
}
const options = opts ? JSON.stringify(opts) : '';
reject(
diff --git a/src/server/lib/tracer/index.ts b/src/server/lib/tracer/index.ts
index 50e8b6c..b309dba 100644
--- a/src/server/lib/tracer/index.ts
+++ b/src/server/lib/tracer/index.ts
@@ -15,11 +15,7 @@
*/
import { Span, tracer, TracerOptions } from 'dd-trace';
-import rootLogger from 'server/lib/logger';
-
-export const logger = rootLogger.child({
- filename: 'lib/tracer/index.ts',
-});
+import { getLogger } from 'server/lib/logger';
// Refer to the readme for insights
@@ -31,7 +27,7 @@ export class Tracer {
private constructor() {
if (Tracer.instance) {
const errorMsg = 'This class is a singleton!';
- logger.error(errorMsg);
+ getLogger().error(`Tracer: singleton violation`);
throw new Error(errorMsg);
}
Tracer.instance = this;
@@ -51,30 +47,39 @@ export class Tracer {
this.updateTags(tags);
} else {
this.tags = { name, ...tags };
- const span = tracer.startSpan(name, { tags: this.tags });
- tracer.scope().activate(span, () => {
- span.finish();
- });
+ if (typeof tracer?.startSpan === 'function') {
+ const span = tracer.startSpan(name, { tags: this.tags });
+ if (typeof tracer?.scope === 'function') {
+ tracer.scope().activate(span, () => {
+ span.finish();
+ });
+ } else {
+ span.finish();
+ }
+ }
this.isInitialized = true;
}
return this;
} catch (error) {
- logger.error(`[Tracer][initialize] error: ${error}`);
+ getLogger().error({ error }, 'Tracer: initialization failed');
return this;
}
}
public wrap(name, fn, tags: TracerTags = {}): Function {
+ if (typeof tracer?.wrap !== 'function') return fn;
const updatedTags = { ...this.tags, ...tags };
return tracer.wrap(name, updatedTags, fn);
}
public trace(name: string, fn, tags: TracerTags = {}): Function {
+ if (typeof tracer?.trace !== 'function') return fn;
const updatedTags = { ...this.tags, ...tags };
return tracer.trace(name, updatedTags, fn);
}
- public startSpan(name: string, tags: TracerTags = {}): Span {
+ public startSpan(name: string, tags: TracerTags = {}): Span | undefined {
+ if (typeof tracer?.startSpan !== 'function') return undefined;
const updatedTags = { ...this.tags, ...tags };
return tracer.startSpan(name, { tags: updatedTags });
}
@@ -84,12 +89,11 @@ export class Tracer {
}
public static Trace(): Function {
- return function (target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor): any {
+ return function (_target: any, propertyKey: string | symbol, descriptor: PropertyDescriptor): any {
const originalMethod = descriptor?.value;
const profiler = Tracer.getInstance();
descriptor.value = function (...args: any[]) {
- if (!profiler.isInitialized) {
- logger.error(`[Tracer][Trace] Tracer not initialized`);
+ if (!profiler.isInitialized || typeof tracer?.trace !== 'function') {
return originalMethod.apply(this, args);
}
const spanOptions = { tags: { ...profiler.tags, decorator: 'Trace' } };
@@ -97,10 +101,10 @@ export class Tracer {
try {
return originalMethod.apply(this, args);
} catch (error) {
- tracer.scope().active()?.setTag('error', true);
- logger
- .child({ target, descriptor, error })
- .error(`[Tracer][Trace] error decorating ${propertyKey.toString()}`);
+ if (typeof tracer?.scope === 'function') {
+ tracer.scope().active()?.setTag('error', true);
+ }
+ getLogger().error({ error }, `Tracer: decorator failed method=${propertyKey.toString()}`);
throw error;
}
});
diff --git a/src/server/lib/utils.ts b/src/server/lib/utils.ts
index afde983..a4a851c 100644
--- a/src/server/lib/utils.ts
+++ b/src/server/lib/utils.ts
@@ -20,21 +20,17 @@ import { GithubPullRequestActions, PullRequestStatus, FallbackLabels } from 'sha
import GlobalConfigService from 'server/services/globalConfig';
import { GenerateDeployTagOptions, WaitUntilOptions, EnableKillswitchOptions } from 'server/lib/types';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { ENVIRONMENT } from 'shared/config';
-const initialLogger = rootLogger.child({
- filename: 'lib/utils.ts',
-});
-
const execFilePromise = promisify(execFile);
-export const exec = async (runner: string, cmd: string[], { logger = initialLogger, execCmd = execFilePromise }) => {
+export const exec = async (runner: string, cmd: string[], { execCmd = execFilePromise } = {}) => {
try {
const out = await execCmd(runner, cmd);
return out?.stdout || '';
} catch (err) {
- logger.error(`exec: error executing ${JSON.stringify(err)}`);
+ getLogger().error({ error: err }, `Exec: command failed runner=${runner}`);
return '';
}
};
@@ -153,11 +149,10 @@ export const enableKillSwitch = async ({
action = '',
branch = '',
fullName = '',
- logger = initialLogger,
isBotUser = false,
labels = [],
status = '',
-}: EnableKillswitchOptions) => {
+}: Omit) => {
try {
const isOpened = [GithubPullRequestActions.OPENED, GithubPullRequestActions.REOPENED].includes(
action as GithubPullRequestActions
@@ -194,7 +189,7 @@ export const enableKillSwitch = async ({
const isUnallowed = organizations.includes(owner?.toLowerCase());
return isIgnore || isReleaseBranch || isUnallowed;
} catch (error) {
- logger.warn(`[UTIL ${fullName}/${branch}][enableKillswitch] ${error}`);
+ getLogger().warn(`Killswitch: error checking fullName=${fullName} branch=${branch} error=${error}`);
return false;
}
};
@@ -293,7 +288,7 @@ export const isControlCommentsEnabled = async (): Promise => {
const labelsConfig = await GlobalConfigService.getInstance().getLabels();
return labelsConfig.defaultControlComments ?? true;
} catch (error) {
- initialLogger.warn('[isControlCommentsEnabled] Error retrieving config, defaulting to true', error);
+ getLogger().warn(`Config: error retrieving control comments config error=${error}`);
return true;
}
};
diff --git a/src/server/lib/webhook/index.ts b/src/server/lib/webhook/index.ts
index 0f984dd..e84444a 100644
--- a/src/server/lib/webhook/index.ts
+++ b/src/server/lib/webhook/index.ts
@@ -22,13 +22,9 @@ import { createWebhookJob, WebhookJobConfig } from 'server/lib/kubernetes/webhoo
import { shellPromise } from 'server/lib/shell';
import { waitForJobAndGetLogs } from 'server/lib/nativeBuild/utils';
import { ensureServiceAccountForJob } from 'server/lib/kubernetes/common/serviceAccount';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { nanoid } from 'nanoid';
-const logger = rootLogger.child({
- filename: 'lib/webhook/index.ts',
-});
-
const MANIFEST_PATH = process.env.MANIFEST_PATH || '/tmp/lifecycle/manifests';
export interface WebhookExecutionResult {
@@ -102,12 +98,9 @@ export async function executeCommandWebhook(
async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Promise {
const executionId = nanoid();
- logger.info(`[WEBHOOK ${build.uuid}] Starting ${jobConfig.webhookType} webhook: ${jobConfig.webhookName}`, {
- buildUuid: build.uuid,
- webhookName: jobConfig.webhookName,
- webhookType: jobConfig.webhookType,
- executionId,
- });
+ getLogger().info(
+ `Webhook: starting type=${jobConfig.webhookType} name=${jobConfig.webhookName} executionId=${executionId}`
+ );
try {
const job = createWebhookJob(jobConfig);
@@ -121,12 +114,9 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro
const jobResult = await waitForJobAndGetLogs(job.metadata.name, jobConfig.namespace, `[WEBHOOK ${build.uuid}]`);
- logger.info(`[WEBHOOK ${build.uuid}] Webhook execution completed`, {
- buildUuid: build.uuid,
- webhookName: jobConfig.webhookName,
- success: jobResult.success,
- status: jobResult.status,
- });
+ getLogger().info(
+ `Webhook: completed name=${jobConfig.webhookName} success=${jobResult.success} status=${jobResult.status}`
+ );
return {
success: jobResult.success,
@@ -136,19 +126,16 @@ async function executeWebhookJob(jobConfig: WebhookJobConfig, build: Build): Pro
metadata: {},
};
} catch (error) {
- logger.error(`[WEBHOOK ${build.uuid}] Webhook execution failed`, {
- buildUuid: build.uuid,
- webhookName: jobConfig.webhookName,
- error: error.message,
- });
+ getLogger().error({ error }, `Webhook: execution failed name=${jobConfig.webhookName}`);
+ const errorMessage = error instanceof Error ? error.message : String(error);
return {
success: false,
jobName: '',
- logs: error.message,
+ logs: errorMessage,
status: 'failed',
metadata: {
- error: error.message,
+ error: errorMessage,
},
};
}
diff --git a/src/server/lib/yamlConfigValidator.ts b/src/server/lib/yamlConfigValidator.ts
index e6748ae..46b2350 100644
--- a/src/server/lib/yamlConfigValidator.ts
+++ b/src/server/lib/yamlConfigValidator.ts
@@ -19,11 +19,7 @@ import { LifecycleError } from './errors';
import JsonSchema from 'jsonschema';
import { BuildStatus, CAPACITY_TYPE, DiskAccessMode } from 'shared/constants';
import { schema_1_0_0 } from './yamlSchemas';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'models/yaml/YamlService.ts',
-});
+import { getLogger } from 'server/lib/logger';
export class ValidationError extends LifecycleError {
constructor(msg: string, uuid: string = null, service: string = null) {
@@ -89,7 +85,7 @@ export class YamlConfigValidator {
throw new ValidationError('Config file is empty.');
}
- logger.debug(`Validating config file with version: ${version}`);
+ getLogger().debug(`Config: validating version=${version}`);
switch (version.toLowerCase()) {
case '1.0.0':
case 'latest':
diff --git a/src/server/models/config/index.ts b/src/server/models/config/index.ts
index 7cb628a..c562e24 100644
--- a/src/server/models/config/index.ts
+++ b/src/server/models/config/index.ts
@@ -24,11 +24,7 @@ import {
} from 'server/models/config/utils';
import { LifecycleConfig, Service } from 'server/models/config/types';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'models/config/index.ts',
-});
+import { getLogger } from 'server/lib/logger';
export const isGithubServiceDockerConfig = (obj) => isInObj(obj, 'dockerfilePath');
export const isDockerServiceConfig = (obj) => isInObj(obj, 'dockerImage');
@@ -127,7 +123,10 @@ export const fetchLifecycleConfig = async (repositoryName: string, branchName: s
const config = await fetchLifecycleConfigByRepository(repository, branchName);
return config;
} catch (err) {
- logger.error(`Unable to fetch configuration from ${repositoryName}/${branchName}: ${err}`);
+ getLogger().error(
+ { error: err instanceof Error ? err.message : String(err) },
+ `Failed to fetch config: repository=${repositoryName} branch=${branchName}`
+ );
}
};
diff --git a/src/server/models/config/utils.ts b/src/server/models/config/utils.ts
index 8558664..bdebce9 100644
--- a/src/server/models/config/utils.ts
+++ b/src/server/models/config/utils.ts
@@ -21,11 +21,7 @@ import { YamlConfigParser } from 'server/lib/yamlConfigParser';
import Repository from 'server/models/Repository';
import { Service } from 'server/models/yaml/types';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'models/yaml/utils.ts',
-});
+import { getLogger } from 'server/lib/logger';
export const isInObj = (obj, key) => (!obj ? false : key in obj);
@@ -40,7 +36,7 @@ export const resolveRepository = async (repositoryFullName: string) => {
const repositories = await Repository.query()
.where(raw('LOWER(??)', [key]), '=', name)
.catch((error) => {
- logger.error(`Unable to find ${repositoryFullName} from Lifecycle Database: ${error}`);
+ getLogger().error({ error }, `Repository: not found name=${repositoryFullName}`);
return null;
});
if (!repositories || repositories?.length === 0) {
@@ -48,7 +44,7 @@ export const resolveRepository = async (repositoryFullName: string) => {
}
return repositories[0];
} catch (err) {
- logger.error(`There was a problem resolving the repository ${repositoryFullName} \n Error: ${err}`);
+ getLogger().error({ error: err }, `Repository: resolution failed name=${repositoryFullName}`);
}
};
@@ -65,9 +61,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b
const validator = new YamlConfigValidator();
const isConfigValid = validator.validate(configVersion, config);
if (!isConfigValid) {
- logger.error(
- `YAML Config validation failed for ${name}/${branchName} using version Lifecyle Yaml version=${configVersion}`
- );
+ getLogger().error(`Config: validation failed repo=${name}/${branchName} version=${configVersion}`);
// TODO: This is a temporary fix to allow the UI to display the config
// throw new Error(
// `YAML Config validation failed for ${name}/${branchName} using version Lifecyle Yaml version=${configVersion}`
@@ -75,7 +69,7 @@ export const fetchLifecycleConfigByRepository = async (repository: Repository, b
}
return config;
} catch (err) {
- logger.error(`fetchLifecycleConfigByRepository error: ${err}`);
+ getLogger().error({ error: err }, `Config: fetch failed`);
return null;
}
};
diff --git a/src/server/models/yaml/Config.ts b/src/server/models/yaml/Config.ts
index e4c8494..bb9b6b3 100644
--- a/src/server/models/yaml/Config.ts
+++ b/src/server/models/yaml/Config.ts
@@ -21,11 +21,7 @@ import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValid
import Repository from '../Repository';
import { Environment } from './YamlEnvironment';
import { Service, Service001 } from './YamlService';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'models/yaml/Config.ts',
-});
+import { getLogger } from 'server/lib/logger';
export interface LifecycleConfig {
readonly version: string;
@@ -70,7 +66,7 @@ export async function fetchLifecycleConfigByRepository(
try {
config = await new YamlConfigParser().parseYamlConfigFromBranch(repository.fullName, branchName);
} catch (error) {
- logger.warn(`Unable to fetch configuration from ${repository.fullName}/${branchName}: ${error}`);
+ getLogger({ repository: repository.fullName, branch: branchName }).warn({ error }, 'Config: fetch failed');
if (error instanceof EmptyFileError) {
config = null;
@@ -80,12 +76,12 @@ export async function fetchLifecycleConfigByRepository(
}
if (config != null) {
- // The YAML config file could be syntax correctly but the schema could be wrong.
try {
new YamlConfigValidator().validate(config.version, config);
} catch (error) {
- logger.error(
- `YAML Config validation failed for ${repository.fullName}/${branchName} using version=${config.version}: ${error}`
+ getLogger({ repository: repository.fullName, branch: branchName, version: config.version }).error(
+ { error },
+ 'Config: validation failed'
);
throw new ValidationError(error);
}
@@ -110,9 +106,7 @@ export function getDeployingServicesByName(config: LifecycleConfig, serviceName:
}
}
} catch (error) {
- logger
- .child({ error })
- .error(`There was a problem getting the service by its name while searching for ${serviceName} service`);
+ getLogger({ serviceName }).error({ error }, 'Service: lookup failed');
throw error;
}
@@ -132,9 +126,7 @@ export async function resolveRepository(repositoryFullName: string): Promise {
- logger.error(
- `Unable to find ${repositoryFullName} from Lifecycle Database. Note that repository name is case sensitive: ${error}`
- );
+ getLogger({ repository: repositoryFullName }).error({ error }, 'Repository: not found');
return null;
});
@@ -143,9 +135,7 @@ export async function resolveRepository(repositoryFullName: string): Promise;
@@ -507,13 +503,14 @@ export async function getHelmConfigFromYaml(service: Service): Promise {
if (DeployTypes.HELM === getDeployType(service)) {
const helmService = (service as unknown as HelmService).helm;
- // First check for chart-specific configuration
if (!globalConfig[helmService?.chart?.name]) {
if (globalConfig?.publicChart?.block)
throw new Error(
`Unspported Chart: helmChart with name: ${helmService?.chart?.name} is not currently supported`
);
- logger.warn(`[helmChart with name: ${helmService?.chart?.name} is not currently supported, proceed with caution`);
+ getLogger({ chartName: helmService?.chart?.name }).warn(
+ `Helm: chart not supported name=${helmService?.chart?.name}`
+ );
}
// Merge in priority order:
@@ -579,13 +576,7 @@ export function getRepositoryName(service: Service): string {
break;
}
} catch (error) {
- logger.error(
- `There was a problem getting the repository name for service name: ${JSON.stringify(
- service,
- null,
- 2
- )} \n ${error}`
- );
+ getLogger({ serviceName: service?.name }).error({ error }, 'Service: repository name lookup failed');
throw error;
}
diff --git a/src/server/services/__tests__/github.test.ts b/src/server/services/__tests__/github.test.ts
index 56b0ce4..f73cc39 100644
--- a/src/server/services/__tests__/github.test.ts
+++ b/src/server/services/__tests__/github.test.ts
@@ -22,11 +22,16 @@ import { PushEvent } from '@octokit/webhooks-types';
mockRedisClient();
jest.mock('server/lib/logger', () => ({
- error: jest.fn(),
- info: jest.fn(),
- warn: jest.fn(),
- debug: jest.fn(),
- child: jest.fn().mockReturnThis(),
+ getLogger: jest.fn(() => ({
+ error: jest.fn(),
+ info: jest.fn(),
+ warn: jest.fn(),
+ debug: jest.fn(),
+ child: jest.fn().mockReturnThis(),
+ })),
+ withLogContext: jest.fn((ctx, fn) => fn()),
+ extractContextForQueue: jest.fn(() => ({})),
+ LogStage: {},
}));
describe('Github Service - handlePushWebhook', () => {
diff --git a/src/server/services/__tests__/globalConfig.test.ts b/src/server/services/__tests__/globalConfig.test.ts
index 4be891e..5af4d86 100644
--- a/src/server/services/__tests__/globalConfig.test.ts
+++ b/src/server/services/__tests__/globalConfig.test.ts
@@ -45,6 +45,7 @@ describe('GlobalConfigService', () => {
beforeEach(() => {
service = GlobalConfigService.getInstance();
+ service.clearMemoryCache();
});
describe('getAllConfigs', () => {
diff --git a/src/server/services/activityStream.ts b/src/server/services/activityStream.ts
index 3f3f766..5e4a1ab 100644
--- a/src/server/services/activityStream.ts
+++ b/src/server/services/activityStream.ts
@@ -15,7 +15,7 @@
*/
import BaseService from './_service';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger';
import { Build, PullRequest, Deploy, Repository } from 'server/models';
import * as github from 'server/lib/github';
import { APP_HOST, QUEUE_NAMES } from 'shared/config';
@@ -50,10 +50,6 @@ import GlobalConfigService from './globalConfig';
import { ChartType, determineChartType } from 'server/lib/nativeHelm';
import { shouldUseNativeHelm } from 'server/lib/nativeHelm';
-const logger = rootLogger.child({
- filename: 'services/activityStream.ts',
-});
-
const createDeployMessage = async () => {
const deployLabel = await getDeployLabel();
const disabledLabel = await getDisabledLabel();
@@ -74,29 +70,43 @@ export default class ActivityStream extends BaseService {
});
processComments = async (job) => {
- try {
- const pullRequest: PullRequest = await this.db.models.PullRequest.findOne({
- id: job.data,
- });
- await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]');
- const { build, repository } = pullRequest;
- if (!build) {
- logger.warn(`[BUILD] Build id not found for pull request with id: ${job.data}`);
- return;
+ const { id, sender, correlationId, _ddTraceContext, targetGithubRepositoryId } = job.data;
+
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ try {
+ getLogger({ stage: LogStage.COMMENT_PROCESSING }).debug(`Processing comment update for PR ${id}`);
+
+ const pullRequest: PullRequest = await this.db.models.PullRequest.findOne({
+ id,
+ });
+ await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]');
+ const { build } = pullRequest;
+ if (!build) {
+ getLogger({ stage: LogStage.COMMENT_FAILED }).warn(`Build: id not found pullRequestId=${id}`);
+ return;
+ }
+
+ const { repository } = pullRequest;
+ await this.db.services.ActivityStream.updatePullRequestActivityStream(
+ build,
+ build.deploys,
+ pullRequest,
+ repository,
+ true,
+ true,
+ null,
+ false,
+ targetGithubRepositoryId
+ );
+
+ getLogger({ stage: LogStage.COMMENT_COMPLETE }).debug(`Comment updated for PR ${id}`);
+ } catch (error) {
+ getLogger({ stage: LogStage.COMMENT_FAILED }).error(
+ { error },
+ `Comment: processing failed pullRequestId=${id}`
+ );
}
- await this.db.services.ActivityStream.updatePullRequestActivityStream(
- build,
- build.deploys,
- pullRequest,
- repository,
- true,
- true,
- null,
- false
- );
- } catch (error) {
- logger.error(`Error processing comment for PR ${job.data}:`, error);
- }
+ });
};
/**
@@ -116,56 +126,58 @@ export default class ActivityStream extends BaseService {
* @param body
*/
async updateBuildsAndDeploysFromCommentEdit(pullRequest: PullRequest, commentBody: string) {
- let shouldUpdateStatus = true;
-
await pullRequest.$fetchGraph('[build.[deploys.[service, deployable]], repository]');
const { build, repository } = pullRequest;
const { deploys, id: buildId } = build;
const buildUuid = build?.uuid;
- const runUuid = nanoid();
- const REDEPLOY_FLAG = '#REDEPLOY';
- const REDEPLOY_CHECKBOX = '[x] Redeploy Environment';
- const PURGE_FASTLY_CHECKBOX = '[x] Purge Fastly Service Cache';
+ return withLogContext({ buildUuid }, async () => {
+ let shouldUpdateStatus = true;
+ const runUuid = nanoid();
- const isRedeployRequested = [REDEPLOY_FLAG, REDEPLOY_CHECKBOX].some((flag) => commentBody.includes(flag));
- const isFastlyPurgeRequested = commentBody.includes(PURGE_FASTLY_CHECKBOX);
+ const REDEPLOY_FLAG = '#REDEPLOY';
+ const REDEPLOY_CHECKBOX = '[x] Redeploy Environment';
+ const PURGE_FASTLY_CHECKBOX = '[x] Purge Fastly Service Cache';
- try {
- if (isRedeployRequested) {
- // if redeploy from comment, add to build queue and return
- logger.info(`[BUILD ${buildUuid}] Redeploy triggered from comment edit`);
- await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
- buildId,
- runUUID: runUuid,
- });
- return;
- }
+ const isRedeployRequested = [REDEPLOY_FLAG, REDEPLOY_CHECKBOX].some((flag) => commentBody.includes(flag));
+ const isFastlyPurgeRequested = commentBody.includes(PURGE_FASTLY_CHECKBOX);
- if (isFastlyPurgeRequested) {
- // if fastly purge is requested from comment, we do not have to update the status
- await this.purgeFastlyServiceCache(buildUuid);
- shouldUpdateStatus = false;
- return;
- }
+ try {
+ if (isRedeployRequested) {
+ getLogger().info('Deploy: redeploy reason=commentEdit');
+ await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
+ buildId,
+ runUUID: runUuid,
+ ...extractContextForQueue(),
+ });
+ return;
+ }
- // handle all environment/service overrides
- await this.applyCommentOverrides({ build, deploys, pullRequest, commentBody, runUuid });
- } finally {
- // after everything update the pr comment
- await this.updatePullRequestActivityStream(
- build,
- deploys,
- pullRequest,
- repository,
- true,
- shouldUpdateStatus,
- null,
- true
- ).catch((error) => {
- logger.warn(`[BUILD ${buildUuid}] Failed to update the activity feed for comment edit: ${error}`);
- });
- }
+ if (isFastlyPurgeRequested) {
+ // if fastly purge is requested from comment, we do not have to update the status
+ await this.purgeFastlyServiceCache(buildUuid);
+ shouldUpdateStatus = false;
+ return;
+ }
+
+ // handle all environment/service overrides
+ await this.applyCommentOverrides({ build, deploys, pullRequest, commentBody, runUuid });
+ } finally {
+ // after everything update the pr comment
+ await this.updatePullRequestActivityStream(
+ build,
+ deploys,
+ pullRequest,
+ repository,
+ true,
+ shouldUpdateStatus,
+ null,
+ true
+ ).catch((error) => {
+ getLogger().warn({ error }, 'ActivityFeed: comment edit update failed');
+ });
+ }
+ });
}
private async applyCommentOverrides({
@@ -182,7 +194,7 @@ export default class ActivityStream extends BaseService {
runUuid: string;
}) {
if (!build.id) {
- logger.error(`[BUILD ${build.uuid}] No build provided to apply overrides from comment edit!`);
+ getLogger().error('Build: missing for comment edit overrides');
return;
}
@@ -191,7 +203,7 @@ export default class ActivityStream extends BaseService {
const envOverrides = CommentHelper.parseEnvironmentOverrides(commentBody);
const redeployOnPush = CommentHelper.parseRedeployOnPushes(commentBody);
- logger.debug(`[BUILD ${build.uuid}] Parsed environment overrides: ${JSON.stringify(envOverrides)}`);
+ getLogger().debug(`Parsed environment overrides: ${JSON.stringify(envOverrides)}`);
await build.$query().patch({
commentInitEnv: envOverrides,
@@ -199,7 +211,7 @@ export default class ActivityStream extends BaseService {
trackDefaultBranches: redeployOnPush,
});
- logger.debug(`[BUILD ${build.uuid}] Service overrides: %j`, serviceOverrides);
+ getLogger().debug(`Service overrides: ${JSON.stringify(serviceOverrides)}`);
await Promise.all(serviceOverrides.map((override) => this.patchServiceOverride(build, deploys, override)));
@@ -214,21 +226,20 @@ export default class ActivityStream extends BaseService {
await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
buildId: build.id,
runUUID: runUuid,
+ ...extractContextForQueue(),
});
}
}
private async patchServiceOverride(build: Build, deploys: Deploy[], { active, serviceName, branchOrExternalUrl }) {
- logger.debug(
- `[BUILD ${build.uuid}] Patching service: ${serviceName}, active: ${active}, branch/url: ${branchOrExternalUrl}`
- );
+ getLogger().debug(`Patching service: ${serviceName} active=${active} branch/url=${branchOrExternalUrl}`);
const deploy: Deploy = build.enableFullYaml
? deploys.find((d) => d.deployable.name === serviceName)
: deploys.find((d) => d.service.name === serviceName);
if (!deploy) {
- logger.warn(`[BUILD ${build.uuid}] No deploy found for service: ${serviceName}`);
+ getLogger().warn(`Deploy: not found service=${serviceName}`);
return;
}
@@ -246,22 +257,15 @@ export default class ActivityStream extends BaseService {
active,
})
.catch((error) => {
- logger.error(
- `[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deploy with external URL: ${error}`
- );
+ getLogger().error({ error }, `Deploy: patch failed service=${serviceName} field=externalUrl`);
});
} else {
- // Branch override
- logger.debug(
- `[BUILD ${build.uuid}] Setting branch override: ${branchOrExternalUrl} for deployable: ${deployable?.name}`
- );
+ getLogger().debug(`Setting branch override: ${branchOrExternalUrl} for deployable: ${deployable?.name}`);
await deploy.deployable
.$query()
.patch({ commentBranchName: branchOrExternalUrl })
.catch((error) => {
- logger.error(
- `[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deployable with branch: ${error}`
- );
+ getLogger().error({ error }, `Deployable: patch failed service=${serviceName} field=branch`);
});
await deploy
@@ -274,7 +278,7 @@ export default class ActivityStream extends BaseService {
active,
})
.catch((error) => {
- logger.error(`[BUILD ${build.uuid}] [SERVICE ${serviceName}] Failed to patch deploy with branch: ${error}`);
+ getLogger().error({ error }, `Deploy: patch failed service=${serviceName} field=branch`);
});
}
@@ -310,8 +314,7 @@ export default class ActivityStream extends BaseService {
});
if (hasGithubMissionControlComment && !pullRequest?.commentId) {
- const msg = `[BUILD ${build?.uuid}][activityStream][updateMissionControlComment] Status comment already exists but no mission control comment ID found!`;
- logger.child({ pullRequest }).error(msg);
+ getLogger().error('Comment: mission control id missing');
return;
}
@@ -331,9 +334,7 @@ export default class ActivityStream extends BaseService {
const commentId = response?.data?.id;
await pullRequest.$query().patch({ commentId, etag });
} catch (error) {
- logger.error(
- `[BUILD ${build?.uuid}] Failed to update Github mission control comment for ${fullName}/${branchName} - error: ${error}`
- );
+ getLogger().error({ error }, `GitHub: mission control update failed repo=${fullName}/${branchName}`);
}
}
@@ -351,8 +352,7 @@ export default class ActivityStream extends BaseService {
});
if (hasStatusComment && !commentId) {
- const msg = `[BUILD ${build?.uuid}][activityStream][updateStatusComment] Status comment already exists but no status comment ID found!`;
- logger.child({ pullRequest }).warn(msg);
+ getLogger().warn('Comment: status id missing');
return;
}
const message = await this.generateStatusCommentForBuild(build, deploys, pullRequest);
@@ -383,22 +383,21 @@ export default class ActivityStream extends BaseService {
updateMissionControl: boolean,
updateStatus: boolean,
error: Error = null,
- queue: boolean = true
+ queue: boolean = true,
+ targetGithubRepositoryId?: number
) {
const buildId = build?.id;
const uuid = build?.uuid;
const isFullYaml = build?.enableFullYaml;
const fullName = pullRequest?.fullName;
const branchName = pullRequest?.branchName;
- const prefix = `[BUILD ${uuid}]`;
- const suffix = `for ${fullName}/${branchName}`;
const isStatic = build?.isStatic ?? false;
const labels = pullRequest?.labels || [];
const hasStatusComment = await hasStatusCommentLabel(labels);
const isDefaultStatusEnabled = await isDefaultStatusCommentsEnabled();
const isShowingStatusComment = isStatic || hasStatusComment || isDefaultStatusEnabled;
if (!buildId) {
- logger.error(`${prefix}[buidIdError] No build ID found ${suffix}`);
+ getLogger().error(`Build: id not found repo=${fullName}/${branchName}`);
throw new Error('No build ID found for this build!');
}
const resource = `build.${buildId}`;
@@ -407,56 +406,70 @@ export default class ActivityStream extends BaseService {
try {
lock = await this.redlock.lock(resource, 9000);
if (queue && !error) {
- await this.commentQueue.add('comment', pullRequest.id, {
- jobId: `pr-${pullRequest.id}`,
- removeOnComplete: true,
- removeOnFail: true,
- });
+ await this.commentQueue.add(
+ 'comment',
+ { id: pullRequest.id, targetGithubRepositoryId, ...extractContextForQueue() },
+ {
+ jobId: `pr-${pullRequest.id}`,
+ removeOnComplete: true,
+ removeOnFail: true,
+ }
+ );
return;
}
if (updateStatus || updateMissionControl) {
- await this.manageDeployments(build, deploys);
+ const deploysForGithubDeployment = targetGithubRepositoryId
+ ? deploys.filter((d) => d.githubRepositoryId === targetGithubRepositoryId)
+ : deploys;
+
+ if (targetGithubRepositoryId) {
+ getLogger().info(
+ `Deploy: filtered deployCount=${deploysForGithubDeployment.length} totalCount=${deploys.length} targetRepoId=${targetGithubRepositoryId}`
+ );
+ }
+
+ await this.manageDeployments(build, deploysForGithubDeployment);
const isControlEnabled = await isControlCommentsEnabled();
if (isControlEnabled) {
await this.updateMissionControlComment(build, deploys, pullRequest, repository).catch((error) => {
- logger
- .child({ error })
- .warn(
- `${prefix} (Full YAML: ${isFullYaml}) Unable to update ${queued} mission control comment ${suffix}`
- );
+ getLogger().warn(
+ { error },
+ `Comment: mission control update failed repo=${fullName}/${branchName} fullYaml=${isFullYaml} queued=${queued}`
+ );
});
} else {
- logger.info(`${prefix} Mission control comments are disabled by configuration`);
+ getLogger().debug('Mission control comments are disabled');
}
}
if (updateStatus && isShowingStatusComment) {
await this.updateStatusComment(build, deploys, pullRequest, repository).catch((error) => {
- logger.warn(
- `${prefix} (Full YAML: ${isFullYaml}) Unable to update ${queued} status comment ${suffix}: ${error}`
+ getLogger().warn(
+ { error },
+ `Comment: status update failed repo=${fullName}/${branchName} fullYaml=${isFullYaml} queued=${queued}`
);
});
}
} catch (error) {
- logger.error(`${prefix} Failed to update the activity feed ${suffix}: ${error}`);
+ getLogger().error({ error }, `ActivityFeed: update failed repo=${fullName}/${branchName}`);
} finally {
if (lock) {
try {
await lock.unlock();
} catch (error) {
- await this.forceUnlock(resource, prefix, suffix);
+ await this.forceUnlock(resource, uuid, fullName, branchName);
}
}
}
}
- private async forceUnlock(resource: string, prefix: string, suffix: string) {
+ private async forceUnlock(resource: string, buildUuid: string, fullName: string, branchName: string) {
try {
await this.redis.del(resource);
} catch (error) {
- logger.child({ error }).error(`${prefix}[redlock] failed to forcefully unlock ${resource} ${suffix}`);
+ getLogger().error({ error }, `Lock: force unlock failed resource=${resource} repo=${fullName}/${branchName}`);
}
}
@@ -518,9 +531,7 @@ export default class ActivityStream extends BaseService {
break;
}
} else {
- logger.debug(
- `[BUILD ${build.uuid}] Skipping ${deploy.deployable.name} because it is an internal dependency.`
- );
+ getLogger().debug(`Skipping ${deploy.deployable.name} because it is an internal dependency`);
}
});
@@ -656,9 +667,7 @@ export default class ActivityStream extends BaseService {
const isDeployedWithActiveErrors = isDeployed && hasErroringActiveDeploys;
if (isDeployedWithActiveErrors) {
const deployStatuses = deploys.map(({ branchName, uuid, status }) => ({ branchName, uuid, status }));
- logger
- .child({ deployStatuses, buildStatus })
- .info(`[BUILD ${uuid}][generateMissionControlComment] deployed build has erroring deploys`);
+ getLogger().info(`Build: deployedWithErrors status=${buildStatus} deploys=${JSON.stringify(deployStatuses)}`);
metrics
.increment('deployWithErrors')
.event('Deploy Finished with Erroring Deploys', `${eventDetails.description} with erroring deploys`);
@@ -692,9 +701,7 @@ export default class ActivityStream extends BaseService {
}
message += await this.editCommentForBuild(build, deploys).catch((error) => {
- logger.error(
- `[BUILD ${build.uuid}][generateMissionControlComment] (Full YAML Support: ${build.enableFullYaml}) Unable to generate mission control: ${error}`
- );
+ getLogger().error({ error }, `Comment: mission control generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
@@ -702,9 +709,7 @@ export default class ActivityStream extends BaseService {
message += '\n---\n\n';
message += `## 📦 Deployments\n\n`;
message += await this.environmentBlock(build).catch((error) => {
- logger.error(
- `[BUILD ${build.uuid}][generateMissionControlComment] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${error}`
- );
+ getLogger().error({ error }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
}
@@ -712,21 +717,7 @@ export default class ActivityStream extends BaseService {
message += `\n\nmission control ${isStaging() ? 'stg ' : ''}comment: enabled \n`;
return message;
} catch (error) {
- logger
- .child({
- error,
- uuid,
- branchName,
- fullName,
- status,
- isOpen,
- sha,
- labels,
- buildStatus,
- })
- .error(
- `[BUILD ${uuid}][generateMissionControlComment] Failed to generate mission control comment for ${fullName}/${branchName}`
- );
+ getLogger().error({ error }, `Comment: mission control generation failed repo=${fullName}/${branchName}`);
return message;
}
}
@@ -818,19 +809,13 @@ export default class ActivityStream extends BaseService {
message += 'We are busy building your code...\n';
message += '## Build Status\n';
message += await this.buildStatusBlock(build, deploys, null).catch((error) => {
- logger
- .child({ build, deploys, error })
- .error(`[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`);
+ getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += `\nHere's where you can find your services after they're deployed:\n`;
message += await this.environmentBlock(build).catch((error) => {
- logger
- .child({ build, error })
- .error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block`
- );
+ getLogger().error({ error }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
@@ -844,30 +829,25 @@ export default class ActivityStream extends BaseService {
message += `We're deploying your code. Please stand by....\n\n`;
message += '## Build Status\n';
message += await this.buildStatusBlock(build, deploys, null).catch((error) => {
- logger
- .child({ build, deploys, error })
- .error(`[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`);
+ getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += `\nHere's where you can find your services after they're deployed:\n`;
message += await this.environmentBlock(build).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += await this.dashboardBlock(build, deploys).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
} else if (isReadyToDeployBuild) {
message += '## 🚀 Ready to deploy\n';
message += `Your code is built. We're ready to deploy whenever you are.\n`;
message += await this.deployingBlock(build).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate deployment status: ${e}`
+ getLogger().error(
+ { error: e },
+ `Comment: deployment status generation failed fullYaml=${build.enableFullYaml}`
);
return '';
});
@@ -879,23 +859,15 @@ export default class ActivityStream extends BaseService {
message += `There was a problem deploying your code. Some services may have not rolled out successfully. Here are the URLs for your services:\n\n`;
message += '## Build Status\n';
message += await this.buildStatusBlock(build, deploys, null).catch((error) => {
- logger
- .child({ build, deploys, error })
- .error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`
- );
+ getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += await this.environmentBlock(build).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += await this.dashboardBlock(build, deploys).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
} else if (build.status === BuildStatus.CONFIG_ERROR) {
@@ -905,24 +877,16 @@ export default class ActivityStream extends BaseService {
message += '## ✅ Deployed\n';
message += '## Build Status\n';
message += await this.buildStatusBlock(build, deploys, null).catch((error) => {
- logger
- .child({ build, deploys, error })
- .error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate build status`
- );
+ getLogger().error({ error }, `Comment: build status generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += `\nWe've deployed your code. Here's where you can find your services:\n`;
message += await this.environmentBlock(build).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate environment comment block: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: env block generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
message += await this.dashboardBlock(build, deploys).catch((e) => {
- logger.error(
- `[BUILD ${build.uuid}] (Full YAML Support: ${build.enableFullYaml}) Unable to generate dashboard: ${e}`
- );
+ getLogger().error({ error: e }, `Comment: dashboard generation failed fullYaml=${build.enableFullYaml}`);
return '';
});
} else {
@@ -1170,7 +1134,6 @@ export default class ActivityStream extends BaseService {
}
private async manageDeployments(build, deploys) {
- const uuid = build?.uuid;
const isGithubDeployments = build?.githubDeployments;
if (!isGithubDeployments) return;
const isFullYaml = build?.enableFullYaml;
@@ -1179,58 +1142,65 @@ export default class ActivityStream extends BaseService {
try {
await Promise.all(
deploys.map(async (deploy) => {
- const deployId = deploy?.id;
- const service = deploy?.service;
- const deployable = deploy?.deployable;
- const isActive = deploy?.active;
- const isOrgHelmChart = orgChartName === deployable?.helm?.chart?.name;
- const isPublic = isFullYaml ? deployable.public || isOrgHelmChart : service.public;
- const serviceType = isFullYaml ? deployable?.type : service?.type;
- const isActiveAndPublic = isActive && isPublic;
- const isDeploymentType = [DeployTypes.DOCKER, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(
- serviceType
+ return withLogContext(
+ { deployUuid: deploy?.uuid, serviceName: deploy?.deployable?.name || deploy?.service?.name },
+ async () => {
+ const deployId = deploy?.id;
+ const service = deploy?.service;
+ const deployable = deploy?.deployable;
+ const isActive = deploy?.active;
+ const isOrgHelmChart = orgChartName === deployable?.helm?.chart?.name;
+ const isPublic = isFullYaml ? deployable.public || isOrgHelmChart : service.public;
+ const serviceType = isFullYaml ? deployable?.type : service?.type;
+ const isActiveAndPublic = isActive && isPublic;
+ const isDeploymentType = [DeployTypes.DOCKER, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(
+ serviceType
+ );
+ const isDeployment = isActiveAndPublic && isDeploymentType;
+ if (!isDeployment) {
+ getLogger().debug(`Skipping deployment ${deploy?.name}`);
+ return;
+ }
+ await this.db.services.GithubService.githubDeploymentQueue
+ .add(
+ 'deployment',
+ { deployId, action: 'create', ...extractContextForQueue() },
+ { delay: 10000, jobId: `deploy-${deployId}` }
+ )
+ .catch((error) => getLogger().warn({ error }, `Deploy: management failed deployId=${deployId}`));
+ }
);
- const isDeployment = isActiveAndPublic && isDeploymentType;
- if (!isDeployment) {
- logger.debug(`Skipping deployment ${deploy?.name}`);
- return;
- }
- await this.db.services.GithubService.githubDeploymentQueue
- .add('deployment', { deployId, action: 'create' }, { delay: 10000, jobId: `deploy-${deployId}` })
- .catch((error) =>
- logger.child({ error }).warn(`[BUILD ${uuid}][manageDeployments] error with ${deployId}`)
- );
})
);
} catch (error) {
- logger.child({ error }).debug(`[BUILD ${uuid}][manageDeployments] error`);
+ getLogger().debug({ error }, 'manageDeployments error');
}
}
private async purgeFastlyServiceCache(uuid: string) {
- try {
- const computeShieldServiceId = await this.fastly.getFastlyServiceId(uuid, 'compute-shield');
- logger.child({ computeShieldServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] computeShieldServiceId`);
- if (computeShieldServiceId) {
- await this.fastly.purgeAllServiceCache(computeShieldServiceId, uuid, 'fastly');
- }
+ return withLogContext({ buildUuid: uuid }, async () => {
+ try {
+ const computeShieldServiceId = await this.fastly.getFastlyServiceId(uuid, 'compute-shield');
+ getLogger().debug(`Fastly computeShieldServiceId=${computeShieldServiceId}`);
+ if (computeShieldServiceId) {
+ await this.fastly.purgeAllServiceCache(computeShieldServiceId, uuid, 'fastly');
+ }
- const optimizelyServiceId = await this.fastly.getFastlyServiceId(uuid, 'optimizely');
- logger.child({ optimizelyServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] optimizelyServiceId`);
- if (optimizelyServiceId) {
- await this.fastly.purgeAllServiceCache(optimizelyServiceId, uuid, 'optimizely');
- }
+ const optimizelyServiceId = await this.fastly.getFastlyServiceId(uuid, 'optimizely');
+ getLogger().debug(`Fastly optimizelyServiceId=${optimizelyServiceId}`);
+ if (optimizelyServiceId) {
+ await this.fastly.purgeAllServiceCache(optimizelyServiceId, uuid, 'optimizely');
+ }
- const fastlyServiceId = await this.fastly.getFastlyServiceId(uuid, 'fastly');
- logger.child({ fastlyServiceId }).debug(`[BUILD ${uuid}][activityStream][fastly] fastlyServiceId`);
- if (fastlyServiceId) {
- await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly');
+ const fastlyServiceId = await this.fastly.getFastlyServiceId(uuid, 'fastly');
+ getLogger().debug(`Fastly fastlyServiceId=${fastlyServiceId}`);
+ if (fastlyServiceId) {
+ await this.fastly.purgeAllServiceCache(fastlyServiceId, uuid, 'fastly');
+ }
+ getLogger().info(`Fastly: purged serviceId=${fastlyServiceId}`);
+ } catch (error) {
+ getLogger().error({ error }, 'Fastly: cache purge failed');
}
- logger
- .child({ fastlyServiceId })
- .info(`[BUILD ${uuid}][activityStream][fastly][purgeFastlyServiceCache] success`);
- } catch (error) {
- logger.child({ error }).info(`[BUILD ${uuid}][activityStream][fastly][purgeFastlyServiceCache] error`);
- }
+ });
}
}
diff --git a/src/server/services/ai/conversation/manager.ts b/src/server/services/ai/conversation/manager.ts
index 29dc5f0..00a6be8 100644
--- a/src/server/services/ai/conversation/manager.ts
+++ b/src/server/services/ai/conversation/manager.ts
@@ -15,7 +15,7 @@
*/
import { LLMProvider, Message, StreamChunk } from '../types/provider';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
export interface ConversationState {
summary: string;
@@ -41,10 +41,7 @@ export class ConversationManager {
}
async compress(messages: Message[], llmProvider: LLMProvider, buildUuid?: string): Promise {
- const logger = buildUuid
- ? rootLogger.child({ component: 'AIAgentConversationManager', buildUuid })
- : rootLogger.child({ component: 'AIAgentConversationManager' });
- logger.info(`Starting conversation compression for ${messages.length} messages`);
+ getLogger().info(`AI: compression starting messageCount=${messages.length} buildUuid=${buildUuid || 'none'}`);
const compressionPrompt = `
Analyze this debugging conversation and create a structured summary.
@@ -79,8 +76,8 @@ ${this.formatMessages(messages)}
state.messageCount = messages.length;
state.compressionLevel = 1;
- logger.info(
- `Compression complete: ${messages.length} messages -> ${state.tokenCount} tokens, identified ${state.identifiedIssues.length} issues, investigated ${state.investigatedServices.length} services`
+ getLogger().info(
+ `AIAgentConversationManager: compression complete messageCount=${messages.length} tokenCount=${state.tokenCount} issueCount=${state.identifiedIssues.length} serviceCount=${state.investigatedServices.length}`
);
return state;
diff --git a/src/server/services/ai/orchestration/orchestrator.ts b/src/server/services/ai/orchestration/orchestrator.ts
index 644b025..26b0069 100644
--- a/src/server/services/ai/orchestration/orchestrator.ts
+++ b/src/server/services/ai/orchestration/orchestrator.ts
@@ -20,7 +20,7 @@ import { StreamCallbacks } from '../types/stream';
import { ToolRegistry } from '../tools/registry';
import { ToolSafetyManager } from './safety';
import { LoopDetector } from './loopProtection';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
export interface OrchestrationResult {
success: boolean;
@@ -55,9 +55,6 @@ export class ToolOrchestrator {
let totalToolCalls = 0;
let fullResponse = '';
const protection = this.loopDetector.getProtection();
- const logger = buildUuid
- ? rootLogger.child({ component: 'AIAgentOrchestrator', buildUuid })
- : rootLogger.child({ component: 'AIAgentOrchestrator' });
this.loopDetector.reset();
@@ -85,7 +82,7 @@ export class ToolOrchestrator {
}
}
} catch (error: any) {
- logger.error(`Stream error: ${error.message}`, error);
+ getLogger().error({ error }, `AI: stream error buildUuid=${buildUuid || 'none'}`);
return {
success: false,
error: error.message || 'Provider error',
@@ -106,7 +103,11 @@ export class ToolOrchestrator {
totalToolCalls += toolCalls.length;
if (totalToolCalls > protection.maxToolCalls) {
- logger.warn(`Tool call limit exceeded: ${totalToolCalls} > ${protection.maxToolCalls}`);
+ getLogger().warn(
+ `AI: tool call limit exceeded totalToolCalls=${totalToolCalls} maxToolCalls=${
+ protection.maxToolCalls
+ } buildUuid=${buildUuid || 'none'}`
+ );
return {
success: false,
error:
@@ -187,8 +188,10 @@ export class ToolOrchestrator {
});
}
- logger.warn(
- `Tool loop hit iteration limit: ${iteration}/${protection.maxIterations}, totalToolCalls=${totalToolCalls}`
+ getLogger().warn(
+ `AI: iteration limit reached iteration=${iteration} maxIterations=${
+ protection.maxIterations
+ } totalToolCalls=${totalToolCalls} buildUuid=${buildUuid || 'none'}`
);
return {
success: false,
diff --git a/src/server/services/ai/orchestration/safety.ts b/src/server/services/ai/orchestration/safety.ts
index a70a3a1..ab9efd0 100644
--- a/src/server/services/ai/orchestration/safety.ts
+++ b/src/server/services/ai/orchestration/safety.ts
@@ -17,7 +17,7 @@
import JsonSchema from 'jsonschema';
import { Tool, ToolResult, ToolSafetyLevel } from '../types/tool';
import { StreamCallbacks } from '../types/stream';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
export class ToolSafetyManager {
private requireConfirmation: boolean;
@@ -35,13 +35,13 @@ export class ToolSafetyManager {
signal?: AbortSignal,
buildUuid?: string
): Promise {
- const logger = buildUuid
- ? rootLogger.child({ component: 'AIAgentSafetyManager', buildUuid })
- : rootLogger.child({ component: 'AIAgentSafetyManager' });
-
const validation = this.validateArgs(tool.parameters, args);
if (!validation.valid) {
- logger.warn(`Tool ${tool.name} failed validation:`, validation.errors);
+ getLogger().warn(
+ `AI: validation failed tool=${tool.name} errors=${validation.errors.join(', ')} buildUuid=${
+ buildUuid || 'none'
+ }`
+ );
return {
success: false,
error: {
@@ -57,7 +57,7 @@ export class ToolSafetyManager {
if (confirmDetails) {
if (!callbacks.onToolConfirmation) {
- logger.error(`Tool ${tool.name} requires confirmation but no confirmation callback provided`);
+ getLogger().error(`AI: confirmation callback missing tool=${tool.name} buildUuid=${buildUuid || 'none'}`);
return {
success: false,
error: {
@@ -91,7 +91,7 @@ export class ToolSafetyManager {
return result;
} catch (error: any) {
if (error.message === 'Tool execution timeout') {
- logger.warn(`Tool ${tool.name} timed out after 30 seconds`);
+ getLogger().warn(`AI: tool timeout tool=${tool.name} timeout=30s buildUuid=${buildUuid || 'none'}`);
return {
success: false,
error: {
@@ -103,7 +103,9 @@ export class ToolSafetyManager {
};
}
- logger.error(`Tool ${tool.name} execution error:`, error);
+ getLogger().error(
+ `AI: tool execution failed tool=${tool.name} error=${error?.message} buildUuid=${buildUuid || 'none'}`
+ );
return {
success: false,
error: {
@@ -150,13 +152,11 @@ export class ToolSafetyManager {
private logToolExecution(name: string, args: Record, result: ToolResult, buildUuid?: string): void {
if (!result.success && !result.error?.recoverable) {
- const logger = buildUuid
- ? rootLogger.child({ component: 'AIAgentSafetyManager', buildUuid })
- : rootLogger.child({ component: 'AIAgentSafetyManager' });
- logger.error(`Tool ${name} failed with non-recoverable error: ${result.error?.message}`, {
- errorCode: result.error?.code,
- recoverable: result.error?.recoverable,
- });
+ getLogger().error(
+ `AI: non-recoverable tool error tool=${name} error=${result.error?.message} errorCode=${
+ result.error?.code
+ } buildUuid=${buildUuid || 'none'}`
+ );
}
}
}
diff --git a/src/server/services/ai/providers/gemini.ts b/src/server/services/ai/providers/gemini.ts
index 6ce3af5..ac86844 100644
--- a/src/server/services/ai/providers/gemini.ts
+++ b/src/server/services/ai/providers/gemini.ts
@@ -18,9 +18,7 @@ import { GoogleGenerativeAI, SchemaType } from '@google/generative-ai';
import { BaseLLMProvider } from './base';
import { ModelInfo, CompletionOptions, StreamChunk, Message } from '../types/provider';
import { Tool, ToolCall } from '../types/tool';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ component: 'GeminiProvider' });
+import { getLogger } from 'server/lib/logger';
export class GeminiProvider extends BaseLLMProvider {
name = 'gemini';
@@ -83,9 +81,12 @@ export class GeminiProvider extends BaseLLMProvider {
}
try {
JSON.parse(responseContent);
- } catch (e) {
- logger.warn(
- `Tool response is not valid JSON, sanitizing: ${responseContent.substring(0, 100)}...`
+ } catch (e: any) {
+ getLogger().warn(
+ `GeminiProvider: tool response not valid JSON, sanitizing preview=${responseContent.substring(
+ 0,
+ 100
+ )}`
);
responseContent = JSON.stringify({ content: responseContent });
}
@@ -105,8 +106,8 @@ export class GeminiProvider extends BaseLLMProvider {
}
return messages;
}
- } catch (e) {
- logger.warn(`Failed to parse tool results, treating as text: ${e.message}`);
+ } catch (e: any) {
+ getLogger().warn(`GeminiProvider: failed to parse tool results error=${e.message}`);
}
}
return [
@@ -140,10 +141,8 @@ export class GeminiProvider extends BaseLLMProvider {
lastCandidate = candidate;
if (candidate.finishReason === 'STOP' && (!candidate.content?.parts || candidate.content.parts.length === 0)) {
- logger.error(
- `Gemini returned STOP with no content. Safety ratings: ${JSON.stringify(
- candidate.safetyRatings
- )}, full candidate: ${JSON.stringify(candidate)}`
+ getLogger().error(
+ `GeminiProvider: returned STOP with no content safetyRatings=${JSON.stringify(candidate.safetyRatings)}`
);
}
@@ -167,22 +166,22 @@ export class GeminiProvider extends BaseLLMProvider {
const response = await result.response;
if (accumulatedText.length === 0 && functionCalls.length === 0) {
- let responseText = 'N/A';
+ let _responseText = 'N/A';
try {
- responseText = (response as any).text();
- } catch (e) {
- responseText = `Error getting text: ${e.message}`;
+ _responseText = (response as any).text();
+ } catch (e: any) {
+ _responseText = `Error getting text: ${e.message}`;
}
- logger.error(
- `Gemini returned empty response. Last candidate: ${JSON.stringify(
- lastCandidate
- )}, promptFeedback: ${JSON.stringify((response as any).promptFeedback)}, response.text: ${responseText}`
- );
- logger.error(
- `Full response object keys: ${Object.keys(response)}, candidates: ${JSON.stringify(
- (response as any).candidates
+ getLogger().error(
+ `GeminiProvider: empty response finishReason=${lastCandidate?.finishReason} promptFeedback=${JSON.stringify(
+ (response as any).promptFeedback
)}`
);
+ getLogger().error(
+ `GeminiProvider: debug info responseKeys=${Object.keys(response).join(',')} candidatesCount=${
+ (response as any).candidates?.length || 0
+ }`
+ );
throw new Error(
`Gemini returned an empty response. This may be due to: ` +
diff --git a/src/server/services/ai/service.ts b/src/server/services/ai/service.ts
index d167957..afd6cbc 100644
--- a/src/server/services/ai/service.ts
+++ b/src/server/services/ai/service.ts
@@ -39,7 +39,7 @@ import {
GitHubClient,
} from './tools';
import { DebugContext, DebugMessage } from '../types/aiAgent';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
export interface AIAgentConfig {
provider: ProviderType;
@@ -101,7 +101,6 @@ export class AIAgentCore {
signal: AbortSignal
): Promise {
const startTime = Date.now();
- const logger = rootLogger.child({ component: 'AIAgentCore', buildUuid: context.buildUuid });
try {
if (context.lifecycleContext.pullRequest.branch) {
@@ -121,14 +120,18 @@ export class AIAgentCore {
}));
if (await this.conversationManager.shouldCompress(messages)) {
- logger.info(`Compressing conversation history from ${messages.length} messages`);
+ getLogger().info(
+ `AIAgentCore: compressing conversation fromMessageCount=${messages.length} buildUuid=${context.buildUuid}`
+ );
const state = await this.conversationManager.compress(messages, this.provider, context.buildUuid);
messages.splice(0, messages.length - 1);
messages.unshift({
role: 'user',
content: this.conversationManager.buildPromptFromState(state),
});
- logger.info(`Conversation compressed to ${messages.length} messages`);
+ getLogger().info(
+ `AIAgentCore: conversation compressed toMessageCount=${messages.length} buildUuid=${context.buildUuid}`
+ );
}
const conversationHistoryForBuilder: DebugMessage[] = messages.map((m) => ({
@@ -172,10 +175,12 @@ export class AIAgentCore {
const duration = Date.now() - startTime;
- logger.info(
- `Query processing ${result.success ? 'completed' : 'failed'}: iterations=${
+ getLogger().info(
+ `AIAgentCore: query processing ${result.success ? 'completed' : 'failed'} iterations=${
result.metrics.iterations
- } toolCalls=${result.metrics.toolCalls} duration=${duration}ms isJson=${finalResult.isJson}`
+ } toolCalls=${result.metrics.toolCalls} duration=${duration}ms isJson=${finalResult.isJson} buildUuid=${
+ context.buildUuid
+ }`
);
return {
@@ -186,7 +191,9 @@ export class AIAgentCore {
} catch (error: any) {
const duration = Date.now() - startTime;
- logger.error(`Query processing error after ${duration}ms:`, error);
+ getLogger().error(
+ `AIAgentCore: query processing error duration=${duration}ms error=${error?.message} buildUuid=${context.buildUuid}`
+ );
throw error;
}
diff --git a/src/server/services/ai/streaming/jsonBuffer.ts b/src/server/services/ai/streaming/jsonBuffer.ts
index de1d2df..71bfa93 100644
--- a/src/server/services/ai/streaming/jsonBuffer.ts
+++ b/src/server/services/ai/streaming/jsonBuffer.ts
@@ -14,9 +14,7 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ component: 'JSONBuffer' });
+import { getLogger } from 'server/lib/logger';
export class JSONBuffer {
private buffer: string = '';
@@ -46,8 +44,8 @@ export class JSONBuffer {
try {
return JSON.parse(this.buffer);
- } catch (error) {
- logger.error({ error, bufferLength: this.buffer.length }, 'Failed to parse JSON buffer');
+ } catch (error: any) {
+ getLogger().error({ error }, `JSONBuffer: parse failed bufferLength=${this.buffer.length}`);
return null;
}
}
diff --git a/src/server/services/ai/streaming/responseHandler.ts b/src/server/services/ai/streaming/responseHandler.ts
index d172c32..3499dbe 100644
--- a/src/server/services/ai/streaming/responseHandler.ts
+++ b/src/server/services/ai/streaming/responseHandler.ts
@@ -16,27 +16,23 @@
import { StreamCallbacks } from '../types/stream';
import { JSONBuffer } from './jsonBuffer';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ component: 'AIAgentResponseHandler' });
+import { getLogger } from 'server/lib/logger';
export class ResponseHandler {
private jsonBuffer: JSONBuffer;
private isJsonResponse: boolean = false;
private textBuffer: string = '';
- private logger: typeof logger;
+ private buildUuid?: string;
constructor(private callbacks: StreamCallbacks, buildUuid?: string) {
this.jsonBuffer = new JSONBuffer();
- this.logger = buildUuid
- ? rootLogger.child({ component: 'AIAgentResponseHandler', buildUuid })
- : rootLogger.child({ component: 'AIAgentResponseHandler' });
+ this.buildUuid = buildUuid;
}
handleChunk(text: string): void {
if (!this.isJsonResponse && this.isJsonStart(text)) {
this.isJsonResponse = true;
- this.logger.info('Detected JSON response start, switching to JSON buffering mode');
+ getLogger().info(`AI: JSON response detected buildUuid=${this.buildUuid || 'none'}`);
this.callbacks.onThinking('Generating structured report...');
this.jsonBuffer.append(text);
return;
@@ -46,13 +42,13 @@ export class ResponseHandler {
this.jsonBuffer.append(text);
if (this.jsonBuffer.isComplete()) {
- this.logger.info('JSON response complete, parsing structured output');
+ getLogger().info(`AI: JSON response complete buildUuid=${this.buildUuid || 'none'}`);
const parsed = this.jsonBuffer.parse();
if (parsed) {
- this.logger.info(`Parsed structured output of type: ${parsed.type}`);
+ getLogger().info(`AI: structured output parsed type=${parsed.type} buildUuid=${this.buildUuid || 'none'}`);
this.callbacks.onStructuredOutput(parsed);
} else {
- this.logger.warn('Failed to parse completed JSON buffer');
+ getLogger().warn(`AI: JSON parse failed buildUuid=${this.buildUuid || 'none'}`);
}
}
return;
diff --git a/src/server/services/aiAgent.ts b/src/server/services/aiAgent.ts
index 0cb0ac6..bbd5244 100644
--- a/src/server/services/aiAgent.ts
+++ b/src/server/services/aiAgent.ts
@@ -31,9 +31,7 @@ import {
PatchK8sResourceTool,
GetIssueCommentTool,
} from './ai/tools';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({ component: 'AIAgentService' });
+import { getLogger } from 'server/lib/logger';
export default class AIAgentService extends BaseService {
private service: AIAgentCore | null = null;
@@ -239,10 +237,7 @@ Respond with ONLY the word INVESTIGATE or FIX, nothing else.`;
return 'investigate';
}
} catch (error: any) {
- logger.error(
- { error, errorMessage: error?.message, errorStack: error?.stack },
- 'Failed to classify user intent, defaulting to investigate'
- );
+ getLogger().error({ error }, 'AI: classifyUserIntent failed');
return 'investigate';
}
}
diff --git a/src/server/services/build.ts b/src/server/services/build.ts
index d748aae..50c5d65 100644
--- a/src/server/services/build.ts
+++ b/src/server/services/build.ts
@@ -30,7 +30,7 @@ import BaseService from './_service';
import _ from 'lodash';
import { QUEUE_NAMES } from 'shared/config';
import { LifecycleError } from 'server/lib/errors';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, extractContextForQueue, LogStage, updateLogContext } from 'server/lib/logger';
import { ParsingError, YamlConfigParser } from 'server/lib/yamlConfigParser';
import { ValidationError, YamlConfigValidator } from 'server/lib/yamlConfigValidator';
@@ -45,10 +45,6 @@ import GlobalConfigService from './globalConfig';
import { paginate, PaginationMetadata, PaginationParams } from 'server/lib/paginate';
import { getYamlFileContentFromBranch } from 'server/lib/github';
-const logger = rootLogger.child({
- filename: 'services/build.ts',
-});
-
const tracer = Tracer.getInstance();
tracer.initialize('build-service');
export interface IngressConfiguration {
@@ -85,14 +81,14 @@ export default class BuildService extends BaseService {
// Enqueue a deletion job
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][cleanupBuilds][buidIdError] No build ID found for this build!`);
+ getLogger().error('Build: id missing for=cleanup');
}
- logger.info(`[BUILD ${build?.uuid}] Queuing build for deletion`);
- await this.db.services.BuildService.deleteQueue.add('delete', { buildId });
+ getLogger().info('Build: queuing action=delete');
+ await this.db.services.BuildService.deleteQueue.add('delete', { buildId, ...extractContextForQueue() });
}
}
} catch (e) {
- logger.error(`[BUILD ${build.uuid}] Can't cleanup build: ${e}`);
+ getLogger().error({ error: e }, 'Build: cleanup failed');
}
}
}
@@ -256,7 +252,7 @@ export default class BuildService extends BaseService {
(deploy.service.type === DeployTypes.DOCKER || deploy.service.type === DeployTypes.GITHUB)
)
.map((deploy) => {
- logger.debug(`${deploy.uuid}: active = ${deploy.active}`);
+ getLogger().debug(`Deploy active status: deployUuid=${deploy.uuid} active=${deploy.active}`);
return this.ingressConfigurationForDeploy(deploy);
})
)
@@ -385,7 +381,7 @@ export default class BuildService extends BaseService {
const environments = await this.getEnvironmentsToBuild(environmentId, repositoryId);
if (!environments.length) {
- logger.debug('No matching environments');
+ getLogger().debug('Build: no matching environments');
return;
}
@@ -404,7 +400,7 @@ export default class BuildService extends BaseService {
});
await Promise.all(promises);
} catch (err) {
- logger.fatal(`Failed to create and deploy build due to fatal error: ${err}`);
+ getLogger().fatal({ error: err }, 'Build: create and deploy failed');
}
}
@@ -417,17 +413,15 @@ export default class BuildService extends BaseService {
await this.db.services.Webhook.upsertWebhooksWithYaml(build, build.pullRequest);
} catch (error) {
if (error instanceof ParsingError) {
- logger.error(`[BUILD ${build.uuid}] Invalid Lifecycle Config File: ${error}`);
+ getLogger().error({ error }, 'Config: parsing failed');
throw error;
} else if (error instanceof ValidationError) {
- logger.error(`[BUILD ${build.uuid}] Invalid Lifecycle Config File: ${error}`);
+ getLogger().error({ error }, 'Config: validation failed');
throw error;
} else {
- // Temporary warps around the new implementation so it won't F up production if i did something stupid.
- // This code has no use in production yet but will start collecting data to validate if implementation works or not.
- logger.warn(`[BUILD ${build.uuid}] No worry. Nothing is bombed. Can ignore this error: ${error}`);
+ getLogger().warn({ error }, 'Config: import warning');
}
}
}
@@ -440,6 +434,10 @@ export default class BuildService extends BaseService {
try {
const build = await this.findOrCreateBuild(environment, options, lifecycleConfig);
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
// After a build is susccessfully created or retrieved,
// we need to create or update the deployables to be used for build and deploy.
if (build && options != null) {
@@ -459,8 +457,8 @@ export default class BuildService extends BaseService {
}
if (options.repositoryId && options.repositoryBranchName) {
- logger.debug(
- `[BUILD ${build.uuid}] Setting up default build services for repositoryID:${options.repositoryId} branch:${options.repositoryBranchName}`
+ getLogger().debug(
+ `Setting up default build services: repositoryId=${options.repositoryId} branch=${options.repositoryBranchName}`
);
await this.setupDefaultBuildServiceOverrides(
@@ -492,7 +490,7 @@ export default class BuildService extends BaseService {
throw new Error('Missing build or deployment options from environment.');
}
} catch (error) {
- logger.fatal(`Failed to create build and deploys due to fatal error: ${error}`);
+ getLogger().fatal({ error }, 'Build: create deploys failed');
}
}
@@ -508,6 +506,10 @@ export default class BuildService extends BaseService {
const runUUID = nanoid();
/* We now own the build for as long as we see this UUID */
const uuid = build?.uuid;
+
+ if (uuid) {
+ updateLogContext({ buildUuid: uuid });
+ }
const pullRequest = build?.pullRequest;
const fullName = pullRequest?.fullName;
const branchName = pullRequest?.branchName;
@@ -522,10 +524,10 @@ export default class BuildService extends BaseService {
if (!latestCommit) {
latestCommit = await github.getSHAForBranch(branchName, owner, name);
}
- const deploys = await this.db.services.Deploy.findOrCreateDeploys(environment, build);
+ const deploys = await this.db.services.Deploy.findOrCreateDeploys(environment, build, githubRepositoryId);
build?.$setRelated('deploys', deploys);
await build?.$fetchGraph('pullRequest');
- await new BuildEnvironmentVariables(this.db).resolve(build);
+ await new BuildEnvironmentVariables(this.db).resolve(build, githubRepositoryId);
await this.markConfigurationsAsBuilt(build);
await this.updateStatusAndComment(build, BuildStatus.BUILDING, runUUID, true, true);
const pullRequest = build?.pullRequest;
@@ -537,8 +539,7 @@ export default class BuildService extends BaseService {
dependencyGraph,
});
} catch (error) {
- // do nothing
- logger.warn(`Unable to generate dependecy graph for ${build.uuid}`, error);
+ getLogger().warn({ error }, 'Graph: generation failed');
}
// Build Docker Images & Deploy CLI Based Infra At the Same Time
@@ -546,7 +547,7 @@ export default class BuildService extends BaseService {
this.buildImages(build, githubRepositoryId),
this.deployCLIServices(build, githubRepositoryId),
]);
- logger.debug(`[BUILD ${uuid}] Build results: buildImages=${results[0]}, deployCLIServices=${results[1]}`);
+ getLogger().debug(`Build results: buildImages=${results[0]} deployCLIServices=${results[1]}`);
const success = _.every(results);
/* Verify that all deploys are successfully built that are active */
if (success) {
@@ -567,15 +568,13 @@ export default class BuildService extends BaseService {
}
}
} else {
- // If it's in an error state, then update the build to an error state,
- // update the activity feed, and return.
- logger.warn(
- `[BUILD ${uuid}][resolveAndDeployBuild] Build is in an errored state. Not commencing with rollout for ${fullName}/${branchName}:${latestCommit}`
+ getLogger().warn(
+ `Build: errored skipping=rollout fullName=${fullName} branchName=${branchName} latestCommit=${latestCommit}`
);
await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true);
}
} catch (error) {
- logger.child({ error }).error(`[BUILD ${uuid}][resolveAndDeployBuild][ERROR] Failed to deploy build: ${error}`);
+ getLogger().error({ error }, 'Build: deploy failed');
await this.updateStatusAndComment(build, BuildStatus.ERROR, runUUID, true, true, error);
}
@@ -620,7 +619,7 @@ export default class BuildService extends BaseService {
githubDeployments,
namespace: `env-${uuid}`,
}));
- logger.info(`[BUILD ${build.uuid}] Created build for pull request branch: ${options.repositoryBranchName}`);
+ getLogger().info(`Build: created branch=${options.repositoryBranchName}`);
return build;
}
@@ -661,13 +660,11 @@ export default class BuildService extends BaseService {
): Promise {
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][createBuildServiceOverride][buidIdError] No build ID found for this build!`);
+ getLogger().error('Build: id missing for=createBuildServiceOverride');
}
const serviceId = service?.id;
if (!serviceId) {
- logger.error(
- `[BUILD ${build?.uuid}][createBuildServiceOverride][serviceIdError] No service ID found for this service!`
- );
+ getLogger().error('Service: id missing for=createBuildServiceOverride');
}
const buildServiceOverride =
(await this.db.models.BuildServiceOverride.findOne({
@@ -689,13 +686,17 @@ export default class BuildService extends BaseService {
await build.reload();
await build?.$fetchGraph('[services, deploys.[service, build]]');
- logger.debug(`[DELETE ${build?.uuid}] Triggering cleanup`);
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
+ getLogger().debug('Build: triggering cleanup');
await this.updateStatusAndComment(build, BuildStatus.TEARING_DOWN, build.runUUID, true, true).catch((error) => {
- logger.warn(`[BUILD: ${build.uuid}] Failed to update status to ${BuildStatus.TEARING_DOWN}: ${error}`);
+ getLogger().warn({ error }, `Build: status update failed status=${BuildStatus.TEARING_DOWN}`);
});
await Promise.all([k8s.deleteBuild(build), cli.deleteBuild(build), uninstallHelmReleases(build)]).catch(
- (error) => logger.child({ build, error }).error(`[DELETE ${build?.uuid}] Failed to cleanup build`)
+ (error) => getLogger().error({ error }, 'Build: cleanup failed')
);
await Promise.all(
@@ -705,6 +706,7 @@ export default class BuildService extends BaseService {
await this.db.services.GithubService.githubDeploymentQueue.add('deployment', {
deployId: deploy.id,
action: 'delete',
+ ...extractContextForQueue(),
});
})
);
@@ -712,15 +714,14 @@ export default class BuildService extends BaseService {
await k8s.deleteNamespace(build.namespace);
await this.db.services.Ingress.ingressCleanupQueue.add('cleanup', {
buildId: build.id,
+ ...extractContextForQueue(),
});
- logger.info(`[DELETE ${build?.uuid}] Deleted build`);
+ getLogger().info('Build: deleted');
await this.updateStatusAndComment(build, BuildStatus.TORN_DOWN, build.runUUID, true, true).catch((error) => {
- logger.warn(`[BUILD: ${build.uuid}] Failed to update status to ${BuildStatus.TORN_DOWN}: ${error}`);
+ getLogger().warn({ error }, `Build: status update failed status=${BuildStatus.TORN_DOWN}`);
});
} catch (e) {
- logger.error(
- `[DELETE ${build.uuid}] Error deleting build: ${e instanceof LifecycleError ? e.getMessage() : e}`
- );
+ getLogger().error({ error: e instanceof LifecycleError ? e.getMessage() : e }, 'Build: delete failed');
}
}
}
@@ -742,54 +743,54 @@ export default class BuildService extends BaseService {
updateStatus: boolean,
error: Error = null
) {
- try {
- await build.reload();
- await build?.$fetchGraph('[deploys.[service, deployable], pullRequest.[repository]]');
+ return withLogContext({ buildUuid: build.uuid }, async () => {
+ try {
+ await build.reload();
+ await build?.$fetchGraph('[deploys.[service, deployable], pullRequest.[repository]]');
- const { deploys, pullRequest } = build;
- const { repository } = pullRequest;
+ const { deploys, pullRequest } = build;
+ const { repository } = pullRequest;
- if (build.runUUID !== runUUID) {
- return;
- } else {
- await build.$query().patch({
- status,
- });
+ if (build.runUUID !== runUUID) {
+ return;
+ } else {
+ await build.$query().patch({
+ status,
+ });
- // add dashboard links to build database
- let dashboardLinks = constructBuildLinks(build.uuid);
- const hasFastly = determineIfFastlyIsUsed(deploys);
- if (hasFastly) {
- try {
- const fastlyDashboardUrl = await this.fastly.getServiceDashboardUrl(build.uuid, 'fastly');
- if (fastlyDashboardUrl) {
- dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href);
+ // add dashboard links to build database
+ let dashboardLinks = constructBuildLinks(build.uuid);
+ const hasFastly = determineIfFastlyIsUsed(deploys);
+ if (hasFastly) {
+ try {
+ const fastlyDashboardUrl = await this.fastly.getServiceDashboardUrl(build.uuid, 'fastly');
+ if (fastlyDashboardUrl) {
+ dashboardLinks = insertBuildLink(dashboardLinks, 'Fastly Dashboard', fastlyDashboardUrl.href);
+ }
+ } catch (err) {
+ getLogger().error({ error: err }, 'Fastly: dashboard URL fetch failed');
}
- } catch (err) {
- logger.error(`[BUILD ${build.uuid}] Unable to get Fastly dashboard URL: ${err}`);
}
+ await build.$query().patch({ dashboardLinks });
+
+ await this.db.services.ActivityStream.updatePullRequestActivityStream(
+ build,
+ deploys,
+ pullRequest,
+ repository,
+ updateMissionControl,
+ updateStatus,
+ error
+ ).catch((e) => {
+ getLogger().error({ error: e }, 'ActivityStream: update failed');
+ });
}
- await build.$query().patch({ dashboardLinks });
+ } finally {
+ getLogger().debug(`Build status changed: status=${build.status}`);
- await this.db.services.ActivityStream.updatePullRequestActivityStream(
- build,
- deploys,
- pullRequest,
- repository,
- updateMissionControl,
- updateStatus,
- error
- ).catch((e) => {
- logger.error(`[BUILD ${build.uuid}] Unable to update pull request activity stream: ${e}`);
- });
+ await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id, ...extractContextForQueue() });
}
- } finally {
- // Even S**T happen, we still try to fire the LC webhooks no matter what
- // Pull webhooks for this environment, and run them
- logger.debug(`[BUILD ${build.uuid}] Build status changed to ${build.status}.`);
-
- await this.db.services.Webhook.webhookQueue.add('webhook', { buildId: build.id });
- }
+ });
}
async markConfigurationsAsBuilt(build: Build) {
@@ -813,9 +814,9 @@ export default class BuildService extends BaseService {
await deploy.$query().patch({ status: DeployStatus.BUILT });
}
const configUUIDs = configDeploys.map((deploy) => deploy?.uuid).join(',');
- logger.info(`[BUILD ${build.uuid}] Updated configuration type deploy ${configUUIDs} as built`);
+ getLogger().info(`Build: config deploys marked built uuids=${configUUIDs}`);
} catch (error) {
- logger.error(`[BUILD ${build.uuid}] Failed to update configuration type deploy as built: ${error}`);
+ getLogger().error({ error }, 'Config: deploy update failed');
}
}
@@ -828,7 +829,7 @@ export default class BuildService extends BaseService {
});
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][deployCLIServices][buidIdError] No build ID found for this build!`);
+ getLogger().error('Build: id missing for=deployCLIServices');
}
const deploys = await Deploy.query()
.where({ buildId, ...(githubRepositoryId ? { githubRepositoryId } : {}) })
@@ -842,17 +843,14 @@ export default class BuildService extends BaseService {
.filter((d) => d.active && CLIDeployTypes.has(d.deployable.type))
.map(async (deploy) => {
if (!deploy) {
- logger.debug(
- `[BUILD ${build?.uuid}][deployCLIServices] This deploy is undefined. Deploys: %j`,
- deploys
- );
+ getLogger().debug(`Deploy is undefined in deployCLIServices: deploysLength=${deploys.length}`);
return false;
}
try {
const result = await this.db.services.Deploy.deployCLI(deploy);
return result;
} catch (err) {
- logger.error(`[BUILD ${build?.uuid}][DEPLOY ${deploy?.uuid}][deployCLIServices] Error: ${err}`);
+ getLogger().error({ error: err }, `CLI: deploy failed uuid=${deploy?.uuid}`);
return false;
}
})
@@ -865,25 +863,21 @@ export default class BuildService extends BaseService {
.filter((d) => d.active && CLIDeployTypes.has(d.service.type))
.map(async (deploy) => {
if (deploy === undefined) {
- logger.debug(
- "Somehow deploy is undefined here.... That shouldn't be possible? Build deploy length is %s",
- deploys.length
- );
+ getLogger().debug(`Deploy is undefined in deployCLIServices: deploysLength=${deploys.length}`);
}
const result = await this.db.services.Deploy.deployCLI(deploy).catch((error) => {
- logger.error(`[${build.uuid} Build Failure: CLI Failed => ${error}`);
+ getLogger().error({ error }, 'CLI: deploy failed');
return false;
});
- if (!result)
- logger.info(`[BUILD ${build?.uuid}][${deploy.uuid}][deployCLIServices] CLI deploy unsuccessful`);
+ if (!result) getLogger().info(`CLI: deploy failed uuid=${deploy.uuid}`);
return result;
})
)
);
}
} catch (error) {
- logger.error(`[${build.uuid} Build Failure: CLI Failed => ${error}`);
+ getLogger().error({ error }, 'CLI: build failed');
return false;
}
}
@@ -896,7 +890,7 @@ export default class BuildService extends BaseService {
async buildImages(build: Build, githubRepositoryId = null): Promise {
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][buildImages][buidIdError] No build ID found for this build!`);
+ getLogger().error('Build: id missing for=buildImages');
}
const deploys = await Deploy.query()
@@ -919,36 +913,31 @@ export default class BuildService extends BaseService {
d.deployable.type === DeployTypes.HELM)
);
});
- logger.debug(
- `[BUILD ${build.uuid}] Processing ${deploysToBuild.length} deploys for build: ${deploysToBuild
+ getLogger().debug(
+ `Processing deploys for build: count=${deploysToBuild.length} deployUuids=${deploysToBuild
.map((d) => d.uuid)
- .join(', ')}`
+ .join(',')}`
);
const results = await Promise.all(
deploysToBuild.map(async (deploy, index) => {
if (deploy === undefined) {
- logger.debug(
- "Somehow deploy deploy is undefined here.... That shouldn't be possible? Build deploy length is %s",
- build.deploys.length
- );
+ getLogger().debug(`Deploy is undefined in buildImages: deploysLength=${build.deploys.length}`);
}
await deploy.$query().patchAndFetch({
deployPipelineId: null,
deployOutput: null,
});
const result = await this.db.services.Deploy.buildImage(deploy, build.enableFullYaml, index);
- logger.debug(`[BUILD ${build.uuid}] Deploy ${deploy.uuid} buildImage completed with result: ${result}`);
+ getLogger().debug(`buildImage completed: deployUuid=${deploy.uuid} result=${result}`);
return result;
})
);
const finalResult = _.every(results);
- logger.debug(
- `[BUILD ${build.uuid}] Build results for each deploy: ${results.join(', ')}, final: ${finalResult}`
- );
+ getLogger().debug(`Build results: results=${results.join(',')} final=${finalResult}`);
return finalResult;
} catch (error) {
- logger.error(`[${build.uuid}] Uncaught Docker Build Error: ${error}`);
+ getLogger().error({ error }, 'Docker: build error');
return false;
}
} else {
@@ -956,25 +945,24 @@ export default class BuildService extends BaseService {
const results = await Promise.all(
deploys
.filter((d) => {
- logger.debug(`[${d.uuid}] Check for service type for docker builds: %j`, d.service);
+ getLogger().debug(
+ `Check service type for docker builds: deployUuid=${d.uuid} serviceType=${d.service?.type}`
+ );
return d.active && (d.service.type === DeployTypes.DOCKER || d.service.type === DeployTypes.GITHUB);
})
.map(async (deploy, index) => {
if (deploy === undefined) {
- logger.debug(
- "Somehow deploy deploy is undefined here.... That shouldn't be possible? Build deploy length is %s",
- build.deploys.length
- );
+ getLogger().debug(`Deploy is undefined in buildImages: deploysLength=${build.deploys.length}`);
}
const result = await this.db.services.Deploy.buildImage(deploy, build.enableFullYaml, index);
- logger.debug(`[BUILD ${build.uuid}] Deploy ${deploy.uuid} buildImage completed with result: ${result}`);
- if (!result) logger.info(`[BUILD ${build?.uuid}][${deploy.uuid}][buildImages] build image unsuccessful`);
+ getLogger().debug(`buildImage completed: deployUuid=${deploy.uuid} result=${result}`);
+ if (!result) getLogger().info(`Build: image failed deployUuid=${deploy.uuid}`);
return result;
})
);
return _.every(results);
} catch (error) {
- logger.error(`[${build.uuid}] Uncaught Docker Build Error: ${error}`);
+ getLogger().error({ error }, 'Docker: build error');
return false;
}
}
@@ -1056,6 +1044,7 @@ export default class BuildService extends BaseService {
// Queue ingress creation after all deployments
await this.db.services.Ingress.ingressManifestQueue.add('manifest', {
buildId,
+ ...extractContextForQueue(),
});
// Legacy manifest generation for backwards compatibility
@@ -1078,19 +1067,17 @@ export default class BuildService extends BaseService {
await build.$query().patch({ manifest: legacyManifest });
}
}
- await this.updateDeploysImageDetails(build);
+ await this.updateDeploysImageDetails(build, githubRepositoryId);
return true;
} catch (e) {
- logger.warn(`[BUILD ${build.uuid}] Some problem when deploying services to Kubernetes cluster: ${e}`);
+ getLogger().warn({ error: e }, 'K8s: deploy failed');
throw e;
}
} else {
try {
const buildId = build?.id;
if (!buildId) {
- logger.error(
- `[BUILD ${build?.uuid}][generateAndApplyManifests][buidIdError] No build ID found for this build!`
- );
+ getLogger().error('Build: id missing for=generateAndApplyManifests');
}
const { serviceAccount } = await GlobalConfigService.getInstance().getAllConfigs();
@@ -1120,6 +1107,7 @@ export default class BuildService extends BaseService {
/* Generate the nginx manifests for this new build */
await this.db.services.Ingress.ingressManifestQueue.add('manifest', {
buildId,
+ ...extractContextForQueue(),
});
const isReady = await k8s.waitForPodReady(build);
@@ -1138,12 +1126,12 @@ export default class BuildService extends BaseService {
)
)
);
- await this.updateDeploysImageDetails(build);
+ await this.updateDeploysImageDetails(build, githubRepositoryId);
}
return true;
} catch (e) {
- logger.warn(`[BUILD ${build.uuid}] Some problem when deploying services to Kubernetes cluster: ${e}`);
+ getLogger().warn({ error: e }, 'K8s: deploy failed');
return false;
}
}
@@ -1167,12 +1155,15 @@ export default class BuildService extends BaseService {
return environments;
}
- private async updateDeploysImageDetails(build: Build) {
+ private async updateDeploysImageDetails(build: Build, githubRepositoryId?: number) {
await build?.$fetchGraph('deploys');
+ const deploys = githubRepositoryId
+ ? build.deploys.filter((d) => d.githubRepositoryId === githubRepositoryId)
+ : build.deploys;
await Promise.all(
- build.deploys.map((deploy) => deploy.$query().patch({ isRunningLatest: true, runningImage: deploy?.dockerImage }))
+ deploys.map((deploy) => deploy.$query().patch({ isRunningLatest: true, runningImage: deploy?.dockerImage }))
);
- logger.debug(`[BUILD ${build.uuid}] Updated deploys with running image and latest status`);
+ getLogger().debug('Deploy: updated running image and status');
}
/**
@@ -1216,15 +1207,28 @@ export default class BuildService extends BaseService {
* @param job the BullMQ job with the buildId
*/
processDeleteQueue = async (job) => {
- try {
- const buildId = job.data.buildId;
- const build = await this.db.models.Build.query().findOne({
- id: buildId,
- });
- await this.db.services.BuildService.deleteBuild(build);
- } catch (error) {
- logger.error(`Error processing delete queue for build ${job.data.buildId}:`, error);
- }
+ const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => {
+ try {
+ const build = await this.db.models.Build.query().findOne({
+ id: buildId,
+ });
+
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Build: deleting');
+ await this.db.services.BuildService.deleteBuild(build);
+ getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Build: deleted');
+ } catch (error) {
+ getLogger({ stage: LogStage.CLEANUP_FAILED }).error(
+ { error },
+ `Queue: delete processing failed buildId=${buildId}`
+ );
+ }
+ });
};
/**
@@ -1232,36 +1236,43 @@ export default class BuildService extends BaseService {
* @param job the BullMQ job with the buildID
*/
processBuildQueue = async (job) => {
- // No retry behavior - catch errors and log them
- const buildId = job.data.buildId;
- const githubRepositoryId = job?.data?.githubRepositoryId;
- let build;
- try {
- build = await this.db.models.Build.query().findOne({
- id: buildId,
- });
+ const { buildId, githubRepositoryId, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ let build;
+ try {
+ build = await this.db.models.Build.query().findOne({
+ id: buildId,
+ });
- await build?.$fetchGraph('[pullRequest, environment]');
- await build.pullRequest.$fetchGraph('[repository]');
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
- await this.importYamlConfigFile(build?.environment, build);
- const deploys = await this.db.services.Deploy.findOrCreateDeploys(build?.environment, build);
+ getLogger({ stage: LogStage.BUILD_STARTING }).info('Build: started');
- build.$setRelated('deploys', deploys);
- await build?.$fetchGraph('deploys.[service, deployable]');
+ await build?.$fetchGraph('[pullRequest, environment]');
+ await build.pullRequest.$fetchGraph('[repository]');
- await this.db.services.BuildService.resolveAndDeployBuild(
- build,
- build?.pullRequest?.deployOnUpdate,
- githubRepositoryId
- );
- } catch (error) {
- if (error instanceof ParsingError || error instanceof ValidationError) {
- this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error);
- } else {
- logger.fatal(`[BUILD ${build?.uuid}] Uncaught exception: ${error}`);
+ if (!githubRepositoryId) {
+ await this.importYamlConfigFile(build?.environment, build);
+ }
+
+ await this.db.services.BuildService.resolveAndDeployBuild(
+ build,
+ build?.pullRequest?.deployOnUpdate,
+ githubRepositoryId
+ );
+
+ getLogger({ stage: LogStage.BUILD_COMPLETE }).info('Build: completed');
+ } catch (error) {
+ if (error instanceof ParsingError || error instanceof ValidationError) {
+ this.updateStatusAndComment(build, BuildStatus.CONFIG_ERROR, build?.runUUID, true, true, error);
+ } else {
+ getLogger({ stage: LogStage.BUILD_FAILED }).fatal({ error }, 'Build: uncaught exception');
+ }
}
- }
+ });
};
/**
@@ -1271,30 +1282,43 @@ export default class BuildService extends BaseService {
* @param done the Bull callback to invoke when we're done
*/
processResolveAndDeployBuildQueue = async (job) => {
- let jobId;
- let buildId: number;
- try {
- jobId = job?.data?.buildId;
- const githubRepositoryId = job?.data?.githubRepositoryId;
- if (!jobId) throw new Error('jobId is required but undefined');
- const build = await this.db.models.Build.query().findOne({
- id: jobId,
- });
+ const { sender, correlationId, _ddTraceContext } = job.data;
- await build?.$fetchGraph('[pullRequest, environment]');
- await build.pullRequest.$fetchGraph('[repository]');
- buildId = build?.id;
- if (!buildId) throw new Error('buildId is required but undefined');
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ let jobId;
+ let buildId: number;
+ try {
+ jobId = job?.data?.buildId;
+ const githubRepositoryId = job?.data?.githubRepositoryId;
+ if (!jobId) throw new Error('jobId is required but undefined');
+ const build = await this.db.models.Build.query().findOne({
+ id: jobId,
+ });
- if (!build.pullRequest.deployOnUpdate) {
- logger.info(`[BUILD ${build.uuid}] Pull request does not have deployOnUpdate enabled. Skipping build.`);
- return;
+ await build?.$fetchGraph('[pullRequest, environment]');
+ await build.pullRequest.$fetchGraph('[repository]');
+ buildId = build?.id;
+ if (!buildId) throw new Error('buildId is required but undefined');
+
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
+ getLogger({ stage: LogStage.BUILD_QUEUED }).info('Build: processing');
+
+ if (!build.pullRequest.deployOnUpdate) {
+ getLogger().info('Deploy: skipping reason=deployOnUpdateDisabled');
+ return;
+ }
+ // Enqueue a standard resolve build
+ await this.db.services.BuildService.buildQueue.add('build', {
+ buildId,
+ githubRepositoryId,
+ ...extractContextForQueue(),
+ });
+ } catch (error) {
+ getLogger().error({ error }, `Queue: processing failed buildId=${buildId} jobId=${jobId}`);
}
- // Enqueue a standard resolve build
- await this.db.services.BuildService.buildQueue.add('build', { buildId, githubRepositoryId });
- } catch (error) {
- const text = `[BUILD ${buildId}][processResolveAndDeployBuildQueue] error processing buildId with the jobId, ${jobId}`;
- logger.child({ error }).error(text);
- }
+ });
};
}
diff --git a/src/server/services/codefresh.ts b/src/server/services/codefresh.ts
index 176024e..52d5533 100644
--- a/src/server/services/codefresh.ts
+++ b/src/server/services/codefresh.ts
@@ -17,15 +17,13 @@
import BaseService from './_service';
import * as YamlService from 'server/models/yaml';
import { triggerPipeline } from 'server/lib/codefresh';
-import rootLogger from 'server/lib/logger';
-
-const logger = rootLogger.child({
- filename: 'services/codefresh.ts',
-});
+import { getLogger, updateLogContext } from 'server/lib/logger';
export default class CodefreshService extends BaseService {
async triggerYamlConfigWebhookPipeline(webhook: YamlService.Webhook, data: Record): Promise {
let buildId: string;
+ const buildUuid = data?.buildUUID;
+ updateLogContext({ buildUuid });
if (
webhook.state !== undefined &&
webhook.type !== undefined &&
@@ -34,11 +32,11 @@ export default class CodefreshService extends BaseService {
) {
buildId = await triggerPipeline(webhook.pipelineId, webhook.trigger, data);
} else {
- logger
- .child({ webhook })
- .error(
- `[WEBHOOK ${webhook.name ?? ''} ${webhook.pipelineId}/${webhook.trigger}] Invalid webhook configuration.`
- );
+ getLogger({ webhook }).error(
+ `Invalid webhook configuration: name=${webhook.name ?? ''} pipelineId=${webhook.pipelineId} trigger=${
+ webhook.trigger
+ }`
+ );
}
return buildId;
}
diff --git a/src/server/services/deploy.ts b/src/server/services/deploy.ts
index 17ffca5..419b4ff 100644
--- a/src/server/services/deploy.ts
+++ b/src/server/services/deploy.ts
@@ -17,7 +17,7 @@
import BaseService from './_service';
import { Environment, Build, Service, Deploy, Deployable } from 'server/models';
import * as codefresh from 'server/lib/codefresh';
-import rootLogger from 'server/lib/logger';
+import { getLogger, withLogContext } from 'server/lib/logger';
import hash from 'object-hash';
import { DeployStatus, DeployTypes } from 'shared/constants';
import * as cli from 'server/lib/cli';
@@ -38,10 +38,6 @@ import { buildWithNative } from 'server/lib/nativeBuild';
import { constructEcrTag } from 'server/lib/codefresh/utils';
import { ChartType, determineChartType } from 'server/lib/nativeHelm';
-const logger = rootLogger.child({
- filename: 'services/deploy.ts',
-});
-
export interface DeployOptions {
ownerId?: number;
repositoryId?: string;
@@ -66,8 +62,9 @@ export default class DeployService extends BaseService {
* Creates all of the relevant deploys for a build, based on the provided environment, if they do not already exist.
* @param environment the environment to use as a the template for these deploys
* @param build the build these deploys will be associated with
+ * @param githubRepositoryId optional filter to only update SHA for deploys from this repo
*/
- async findOrCreateDeploys(environment: Environment, build: Build): Promise {
+ async findOrCreateDeploys(environment: Environment, build: Build, githubRepositoryId?: number): Promise {
await build?.$fetchGraph('[deployables.[repository]]');
const { deployables } = build;
@@ -76,41 +73,48 @@ export default class DeployService extends BaseService {
//
// With full yaml enable. Creating deploys from deployables instead of services. This will include YAML only config.
//
+ const { kedaScaleToZero: defaultKedaScaleToZero } = await GlobalConfigService.getInstance().getAllConfigs();
+
+ const buildId = build?.id;
+ if (!buildId) {
+ getLogger().error('Deploy: build id missing for=findOrCreateDeploys');
+ return [];
+ }
+
+ const existingDeploys = await this.db.models.Deploy.query().where({ buildId }).withGraphFetched('deployable');
+ const existingDeployMap = new Map(existingDeploys.map((d) => [d.deployableId, d]));
+
await Promise.all(
deployables.map(async (deployable) => {
const uuid = `${deployable.name}-${build?.uuid}`;
- const buildId = build?.id;
- if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`);
- return;
- }
+ const patchFields: Objection.PartialModelObject = {};
+ const isTargetRepo = !githubRepositoryId || deployable.repositoryId === githubRepositoryId;
- let deploy = await this.db.models.Deploy.findOne({
- deployableId: deployable.id,
- buildId,
- }).catch((error) => {
- logger.warn(`[BUILD ${build?.uuid}] [Service ${deployable.id}] ${error}`);
- return null;
- });
+ let deploy = existingDeployMap.get(deployable.id) ?? null;
+ if (!deploy) {
+ deploy = await this.db.models.Deploy.findOne({
+ deployableId: deployable.id,
+ buildId,
+ }).catch((error) => {
+ getLogger().warn({ error, serviceId: deployable.id }, 'Deploy: find failed');
+ return null;
+ });
+ if (deploy) {
+ getLogger().warn(`Deploy: fallback find succeeded deployableId=${deployable.id}`);
+ }
+ }
if (deploy != null) {
- await deploy.$fetchGraph('deployable');
-
- // If deploy is already exists (re-deployment)
- await deploy.$query().patch({
- deployableId: deployable?.id ?? null,
- publicUrl: this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable),
- internalHostname: uuid,
- uuid,
- branchName: deployable.commentBranchName ?? deployable.branchName,
- tag: deployable.defaultTag,
- });
- } else {
- const buildId = build?.id;
- if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`);
+ if (!isTargetRepo) {
+ return;
}
- // Create deploy object if this is new deployment
+ patchFields.deployableId = deployable?.id ?? null;
+ patchFields.publicUrl = this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable);
+ patchFields.internalHostname = uuid;
+ patchFields.uuid = uuid;
+ patchFields.branchName = deployable.commentBranchName ?? deployable.branchName;
+ patchFields.tag = deployable.defaultTag;
+ } else {
deploy = await this.db.models.Deploy.create({
buildId,
serviceId: deployable.serviceId,
@@ -121,46 +125,34 @@ export default class DeployService extends BaseService {
active: deployable.active,
});
- await deploy.$fetchGraph('deployable');
-
- await deploy.$query().patch({
- branchName: deployable.branchName,
- tag: deployable.defaultTag,
- publicUrl: this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable),
- });
+ patchFields.branchName = deployable.branchName;
+ patchFields.tag = deployable.defaultTag;
+ patchFields.publicUrl = this.db.services.Deploy.hostForDeployableDeploy(deploy, deployable);
deploy.$setRelated('deployable', deployable);
deploy.$setRelated('build', build);
}
- // only set sha for deploys where needed
- if ([DeployTypes.HELM, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(deployable.type)) {
+ if (isTargetRepo && [DeployTypes.HELM, DeployTypes.GITHUB, DeployTypes.CODEFRESH].includes(deployable.type)) {
try {
const sha = await getShaForDeploy(deploy);
- await deploy.$query().patch({
- sha,
- });
+ patchFields.sha = sha;
} catch (error) {
- logger.debug(`[DEPLOY ${deploy.uuid}] Unable to get SHA, continuing: ${error}`);
+ getLogger().debug({ error }, 'Deploy: SHA fetch failed continuing=true');
}
}
- const { kedaScaleToZero: defaultKedaScaleToZero } = await GlobalConfigService.getInstance().getAllConfigs();
-
- const kedaScaleToZero =
+ patchFields.kedaScaleToZero =
deployable?.kedaScaleToZero?.type === 'http' && defaultKedaScaleToZero?.enabled
- ? {
- ...defaultKedaScaleToZero,
- ...deployable.kedaScaleToZero,
- }
+ ? { ...defaultKedaScaleToZero, ...deployable.kedaScaleToZero }
: null;
- await deploy.$query().patch({ kedaScaleToZero });
+ await deploy.$query().patch(patchFields);
})
).catch((error) => {
- logger.error(`[BUILD ${build?.uuid}] Failed to create deploys from deployables: ${error}`);
+ getLogger().error({ error }, 'Deploy: create from deployables failed');
});
- logger.info(`[BUILD ${build?.uuid}] Deploys created(or exists already) for deployables with YAML config`);
+ getLogger().info('Deploy: initialized');
} else {
const serviceInitFunc = async (service: Service, active: boolean): Promise => {
const newDeploys: Deploy[] = [];
@@ -188,9 +180,7 @@ export default class DeployService extends BaseService {
);
})
);
- logger.info(
- `[BUILD ${build?.uuid}] Created ${newDeploys.length} deploys from services table for non-YAML config`
- );
+ getLogger().info(`Deploy: created count=${newDeploys.length}`);
return newDeploys;
};
@@ -199,20 +189,21 @@ export default class DeployService extends BaseService {
environment.defaultServices.map((service) => serviceInitFunc(service, true)),
environment.optionalServices.map((service) => serviceInitFunc(service, false)),
]).catch((error) => {
- logger.error(`[BUILD ${build?.uuid}] Something is wrong when trying to create/update deploys: ${error}`);
+ getLogger().error({ error }, 'Deploy: create/update failed');
});
}
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`);
+ getLogger().error('Deploy: build id missing for=findOrCreateDeploys');
}
await this.db.models.Deploy.query().where({ buildId });
await build?.$fetchGraph('deploys');
if (build?.deployables?.length !== build?.deploys?.length) {
- logger.warn(
- `[BUILD ${build?.uuid} (${buildId})] No worry. Nothing critical yet: Deployables count (${build.deployables.length}) mismatch with Deploys count (${build.deploys.length}).`
+ getLogger().warn(
+ { buildId, deployablesCount: build.deployables.length, deploysCount: build.deploys.length },
+ 'Deployables count mismatch with Deploys count'
);
}
@@ -231,18 +222,18 @@ export default class DeployService extends BaseService {
const uuid = `${service.name}-${build?.uuid}`;
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`);
+ getLogger().error('Deploy: build id missing for=findOrCreateDeploy');
}
const serviceId = service?.id;
if (!serviceId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][serviceIdError] No service ID found for this service!`);
+ getLogger().error('Deploy: service id missing for=findOrCreateDeploy');
}
// Deployable should be find at this point; otherwise, something is very wrong.
const deployable: Deployable = await this.db.models.Deployable.query()
.findOne({ buildId, serviceId })
.catch((error) => {
- logger.error(`[BUILD ${build.uuid}] [Service ${serviceId}] ${error}`);
+ getLogger().error({ error, serviceId }, 'Deployable: find failed');
return null;
});
@@ -250,7 +241,7 @@ export default class DeployService extends BaseService {
serviceId,
buildId,
}).catch((error) => {
- logger.warn(`[BUILD ${build?.uuid}] [Service ${serviceId}] ${error}`);
+ getLogger().warn({ error, serviceId }, 'Deploy: find failed');
return null;
});
if (deploy != null) {
@@ -265,13 +256,11 @@ export default class DeployService extends BaseService {
} else {
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][findOrCreateDeploy][buidIdError] No build ID found for this build!`);
+ getLogger().error('Deploy: build id missing for=findOrCreateDeploy');
}
const serviceId = service?.id;
if (!serviceId) {
- logger.error(
- `[BUILD ${build?.uuid}][findOrCreateDeploy][serviceIdError] No service ID found for this service!`
- );
+ getLogger().error('Deploy: service id missing for=findOrCreateDeploy');
}
// Create deploy object if this is new deployment
deploy = await this.db.models.Deploy.create({
@@ -286,7 +275,7 @@ export default class DeployService extends BaseService {
await build?.$fetchGraph('[buildServiceOverrides]');
const override = build.buildServiceOverrides.find((bso) => bso.serviceId === serviceId);
- logger.debug(`[BUILD ${build.uuid}] Override: ${override}`);
+ getLogger().debug({ override: override ? JSON.stringify(override) : null }, 'Service override found');
/* Default to the service branch name */
let resolvedBranchName = service.branchName;
/* If the deploy already has a branch name set, use that */
@@ -375,201 +364,184 @@ export default class DeployService extends BaseService {
}
return null;
} catch (error) {
- logger.debug(`Error checking for existing Aurora database: ${error}`);
+ getLogger().debug({ error }, 'Aurora: check failed');
return null;
}
}
async deployAurora(deploy: Deploy): Promise {
- try {
- // For now, we're just going to shell out and run the deploy
- await deploy.reload();
- await deploy.$fetchGraph('[build, deployable]');
+ return withLogContext(
+ { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name },
+ async () => {
+ try {
+ await deploy.reload();
+ await deploy.$fetchGraph('[build, deployable]');
- if (!deploy.deployable) {
- logger.error(`[DEPLOY ${deploy?.uuid}] Missing deployable for Aurora restore`);
- return false;
- }
+ if (!deploy.deployable) {
+ getLogger().error('Aurora: deployable missing for=restore');
+ return false;
+ }
- /**
- * For now, only run the CLI deploy step one time.
- * Check for both BUILT and READY status because:
- * - deployAurora sets status to BUILT after successful creation
- * - DeploymentManager.deployManifests then changes it to READY after Kubernetes manifest deployment
- * Both statuses indicate the Aurora database already exists and should not be recreated
- */
- if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) {
- logger.info(`[DEPLOY ${deploy?.uuid}] Aurora restore already built (status: ${deploy.status})`);
- return true;
- }
+ if ((deploy.status === DeployStatus.BUILT || deploy.status === DeployStatus.READY) && deploy.cname) {
+ getLogger().info('Aurora: skipped reason=alreadyBuilt');
+ return true;
+ }
- // Check if database already exists in AWS before attempting to create
- // This handles both: status is BUILT/READY but cname missing, OR first-time deploy
- const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name);
- if (existingDbEndpoint) {
- logger.info(
- `[DEPLOY ${deploy?.uuid}] Aurora database already exists with endpoint ${existingDbEndpoint}, skipping creation`
- );
- await deploy.$query().patch({
- cname: existingDbEndpoint,
- status: DeployStatus.BUILT,
- });
- return true;
- }
+ const existingDbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name);
+ if (existingDbEndpoint) {
+ getLogger().info('Aurora: skipped reason=exists');
+ await deploy.$query().patch({
+ cname: existingDbEndpoint,
+ status: DeployStatus.BUILT,
+ });
+ return true;
+ }
- const uuid = nanoid();
- await deploy.$query().patch({
- status: DeployStatus.BUILDING,
- buildLogs: uuid,
- runUUID: nanoid(),
- });
- logger.info(`[DEPLOY ${deploy?.uuid}] Restoring Aurora cluster for ${deploy?.uuid}`);
- await cli.cliDeploy(deploy);
+ const uuid = nanoid();
+ await deploy.$query().patch({
+ status: DeployStatus.BUILDING,
+ buildLogs: uuid,
+ runUUID: nanoid(),
+ });
+ getLogger().info('Aurora: restoring');
+ await cli.cliDeploy(deploy);
- // After creation, find the database endpoint
- const dbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name);
- if (dbEndpoint) {
- await deploy.$query().patch({
- cname: dbEndpoint,
- });
- }
+ const dbEndpoint = await this.findExistingAuroraDatabase(deploy.build.uuid, deploy.deployable.name);
+ if (dbEndpoint) {
+ await deploy.$query().patch({
+ cname: dbEndpoint,
+ });
+ }
- await deploy.reload();
- if (deploy.buildLogs === uuid) {
- await deploy.$query().patch({
- status: DeployStatus.BUILT,
- });
+ await deploy.reload();
+ if (deploy.buildLogs === uuid) {
+ await deploy.$query().patch({
+ status: DeployStatus.BUILT,
+ });
+ }
+ getLogger().info('Aurora: restored');
+ return true;
+ } catch (e) {
+ getLogger().error({ error: e }, 'Aurora: cluster restore failed');
+ await deploy.$query().patch({
+ status: DeployStatus.ERROR,
+ });
+ return false;
+ }
}
- logger.info(`[DEPLOY ${deploy?.uuid}] Restored Aurora cluster for ${deploy?.uuid}`);
- return true;
- } catch (e) {
- logger.error(`[DEPLOY ${deploy?.uuid}] Aurora cluster restore for ${deploy?.uuid} failed with error: ${e}`);
- await deploy.$query().patch({
- status: DeployStatus.ERROR,
- });
- return false;
- }
+ );
}
async deployCodefresh(deploy: Deploy): Promise {
- let result: boolean = false;
-
- // We'll use either a tag specified in the UI when creating a manual build
- // or the default tag specified on the service
- const runUUID = nanoid();
- await deploy.$query().patch({
- runUUID,
- });
+ return withLogContext(
+ { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name },
+ async () => {
+ let result: boolean = false;
- // For now, we're just going to shell out and run the deploy
- await deploy.reload();
- await deploy.$fetchGraph('[service.[repository], deployable.[repository], build]');
- const { build, service, deployable } = deploy;
- const { repository } = build.enableFullYaml ? deployable : service;
- const repo = repository?.fullName;
- const [owner, name] = repo?.split('/') || [];
- const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name).catch((error) => {
- logger.warn(
- `[BUILD ${build.uuid}] ${owner}/${name}/${deploy.branchName} Something could be wrong when retrieving commit SHA for ${deploy.uuid} from github: ${error}`
- );
- });
+ const runUUID = nanoid();
+ await deploy.$query().patch({
+ runUUID,
+ });
- if (!fullSha) {
- logger.warn(
- `[BUILD ${build.uuid}] ${owner}/${name}/${deploy.branchName} Commit SHA for ${deploy.uuid} cannot be falsy. Check the owner, etc.`
- );
+ await deploy.reload();
+ await deploy.$fetchGraph('[service.[repository], deployable.[repository], build]');
+ const { build, service, deployable } = deploy;
+ const { repository } = build.enableFullYaml ? deployable : service;
+ const repo = repository?.fullName;
+ const [owner, name] = repo?.split('/') || [];
+ const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name).catch((error) => {
+ getLogger().warn(
+ { error, owner, name, branch: deploy.branchName },
+ 'Failed to retrieve commit SHA from github'
+ );
+ });
- result = false;
- } else {
- const shortSha = fullSha.substring(0, 7);
- const envSha = hash(merge(deploy.env || {}, build.commentRuntimeEnv));
- const buildSha = `${shortSha}-${envSha}`;
-
- // If the SHA's are the same, nothing need to do and considered as done.
- if (deploy?.sha === buildSha) {
- // Make sure we're in a clean state
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch(
- (error) => {
- logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`);
- }
- );
- logger.info(`[BUILD ${deploy?.uuid}] Marked codefresh deploy ${deploy?.uuid} as built since no changes`);
- result = true;
- } else {
- let buildLogs: string;
- let codefreshBuildId: string;
- try {
- await deploy.$query().patch({
- buildLogs: null,
- buildPipelineId: null,
- buildOutput: null,
- deployPipelineId: null,
- deployOutput: null,
- });
+ if (!fullSha) {
+ getLogger().warn({ owner, name, branch: deploy.branchName }, 'Git: SHA missing');
- codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => {
- logger.error(`[BUILD ${build.uuid}] Failed to receive codefresh build id for ${deploy.uuid}: ${error}`);
- return null;
- });
- logger.info(`[DEPLOY ${deploy?.uuid}] Triggered codefresh build for ${deploy?.uuid}`);
- if (codefreshBuildId != null) {
- buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`;
-
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- buildLogs,
- status: DeployStatus.BUILDING,
- buildPipelineId: codefreshBuildId,
- statusMessage: 'CI build triggered...',
- },
- runUUID
- ).catch((error) => {
- logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`);
- });
- logger
- .child({ url: buildLogs })
- .info(`[DEPLOY ${deploy?.uuid}] Wait for codefresh build to complete for ${deploy?.uuid}`);
- await cli.waitForCodefresh(codefreshBuildId);
- const buildOutput = await getLogs(codefreshBuildId);
- logger
- .child({ url: buildLogs })
- .info(`[DEPLOY ${deploy?.uuid}] Codefresh build completed for ${deploy?.uuid}`);
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.BUILT,
- sha: buildSha,
- buildOutput,
- statusMessage: 'CI build completed',
- },
- runUUID
- ).catch((error) => {
- logger.warn(`[BUILD ${build.uuid}] Failed to update activity feed: ${error}`);
- });
+ result = false;
+ } else {
+ const shortSha = fullSha.substring(0, 7);
+ const envSha = hash(merge(deploy.env || {}, build.commentRuntimeEnv));
+ const buildSha = `${shortSha}-${envSha}`;
+
+ if (deploy?.sha === buildSha) {
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT, sha: buildSha }, runUUID).catch(
+ (error) => {
+ getLogger().warn({ error }, 'ActivityFeed: update failed');
+ }
+ );
+ getLogger().info('Codefresh: skipped reason=noChanges status=built');
result = true;
+ } else {
+ let buildLogs: string;
+ let codefreshBuildId: string;
+ try {
+ await deploy.$query().patch({
+ buildLogs: null,
+ buildPipelineId: null,
+ buildOutput: null,
+ deployPipelineId: null,
+ deployOutput: null,
+ });
+
+ codefreshBuildId = await cli.codefreshDeploy(deploy, build, service, deployable).catch((error) => {
+ getLogger().error({ error }, 'Codefresh: build id missing');
+ return null;
+ });
+ getLogger().info('Codefresh: triggered');
+ if (codefreshBuildId != null) {
+ buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`;
+
+ await this.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ buildLogs,
+ status: DeployStatus.BUILDING,
+ buildPipelineId: codefreshBuildId,
+ statusMessage: 'CI build triggered...',
+ },
+ runUUID
+ ).catch((error) => {
+ getLogger().warn({ error }, 'ActivityFeed: update failed');
+ });
+ getLogger().info(`Codefresh: waiting url=${buildLogs}`);
+ await cli.waitForCodefresh(codefreshBuildId);
+ const buildOutput = await getLogs(codefreshBuildId);
+ getLogger().info('Codefresh: completed');
+ await this.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.BUILT,
+ sha: buildSha,
+ buildOutput,
+ statusMessage: 'CI build completed',
+ },
+ runUUID
+ ).catch((error) => {
+ getLogger().warn({ error }, 'ActivityFeed: update failed');
+ });
+ result = true;
+ }
+ } catch (error) {
+ getLogger().error({ error, url: buildLogs }, 'Codefresh: build failed');
+ await this.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.ERROR,
+ sha: buildSha,
+ statusMessage: 'CI build failed',
+ },
+ runUUID
+ );
+ result = false;
+ }
}
- } catch (error) {
- // Error'd while waiting for the pipeline to finish. This is usually due to an actual
- // pipeline failure or a pipeline getting terminated.
- logger
- .child({ url: buildLogs })
- .error(`[BUILD ${build?.uuid}] Codefresh build failed for ${deploy?.uuid}: ${error}`);
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.ERROR,
- sha: buildSha,
- statusMessage: 'CI build failed',
- },
- runUUID
- );
- result = false;
}
- }
- }
- return result;
+ return result;
+ }
+ );
}
async deployCLI(deploy: Deploy): Promise {
@@ -593,266 +565,272 @@ export default class DeployService extends BaseService {
* @param deploy the deploy to build an image for
*/
async buildImage(deploy: Deploy, enableFullYaml: boolean, index: number): Promise {
- try {
- // We'll use either a tag specified in the UI when creating a manual build
- // or the default tag specified on the service
- const runUUID = deploy.runUUID ?? nanoid();
- await deploy.$query().patch({
- runUUID,
- });
-
- await deploy.$fetchGraph('[service, build.[environment], deployable]');
- const { service, build, deployable } = deploy;
- const uuid = build?.uuid;
- const uuidText = uuid ? `[DEPLOY ${uuid}][buildImage]:` : '[DEPLOY][buildImage]:';
-
- if (!enableFullYaml) {
- await service.$fetchGraph('repository');
- let config: YamlService.LifecycleConfig;
- const isClassicModeOnly = build?.environment?.classicModeOnly ?? false;
- if (!isClassicModeOnly) {
- config = await YamlService.fetchLifecycleConfigByRepository(service.repository, deploy.branchName);
- }
-
- // Docker types are already built - next
- if (service.type === DeployTypes.DOCKER) {
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.BUILT,
- dockerImage: `${service.dockerImage}:${deploy.tag}`,
- },
- runUUID
- );
- return true;
- } else if (service.type === DeployTypes.GITHUB) {
- if (deploy.branchName === null) {
- // This means we're using an external host, rather than building from source.
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID);
- } else {
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID);
-
- await build?.$fetchGraph('pullRequest.[repository]');
- const pullRequest = build?.pullRequest;
- const author = pullRequest?.githubLogin;
- const enabledFeatures = build?.enabledFeatures || [];
- const repository = service?.repository;
- const repo = repository?.fullName;
- const [owner, name] = repo?.split('/') || [];
- const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name);
-
- let repositoryName: string = service.repository.fullName;
- let branchName: string = deploy.branchName;
- let dockerfilePath: string = service.dockerfilePath || './Dockerfile';
- let initDockerfilePath: string = service.initDockerfilePath;
-
- let githubService: YamlService.GithubService;
- // TODO This should be updated!
- if (config != null && config.version === '0.0.3-alpha-1') {
- const yamlService: YamlService.Service = YamlService.getDeployingServicesByName(config, service.name);
- if (yamlService != null) {
- githubService = yamlService as YamlService.GithubService;
-
- repositoryName = githubService.github.repository;
- branchName = githubService.github.branchName;
- dockerfilePath = githubService.github.docker.app.dockerfilePath;
-
- if (githubService.github.docker.init != null) {
- initDockerfilePath = githubService.github.docker.init.dockerfilePath;
- }
- }
- }
+ return withLogContext(
+ { deployUuid: deploy.uuid, serviceName: deploy.deployable?.name || deploy.service?.name },
+ async () => {
+ try {
+ const runUUID = deploy.runUUID ?? nanoid();
+ await deploy.$query().patch({
+ runUUID,
+ });
- // Verify we actually have a SHA from github before proceeding
- if (!fullSha) {
- // We were unable to retrieve this branch/repo combo
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
- return false;
- }
+ await deploy.$fetchGraph('[service, build.[environment], deployable]');
+ const { service, build, deployable } = deploy;
+ const uuid = build?.uuid;
- const shortSha = fullSha.substring(0, 7);
-
- logger.debug(`${uuidText} Building docker image ${service.name} ${deploy.branchName}`);
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILDING, sha: fullSha }, runUUID);
- /**
- * @note { svc: index } ensures the hash for each image is unique per service
- */
- const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv, { svc: index });
- const envVarsHash = hash(envVariables);
- const buildPipelineName = deployable?.dockerBuildPipelineName;
- const tag = generateDeployTag({ sha: shortSha, envVarsHash });
- const initTag = generateDeployTag({ prefix: 'lfc-init', sha: shortSha, envVarsHash });
- let ecrRepo = deployable?.ecr;
-
- const { lifecycleDefaults, app_setup } = await GlobalConfigService.getInstance().getAllConfigs();
- const { ecrDomain, ecrRegistry: registry } = lifecycleDefaults;
-
- const serviceName = deploy.build?.enableFullYaml ? deployable?.name : deploy.service?.name;
- ecrRepo = constructEcrRepoPath(deployable?.ecr, serviceName, ecrDomain);
-
- const tagsExist =
- (await codefresh.tagExists({ tag, ecrRepo, uuid })) &&
- (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid })));
-
- logger.debug(`${uuidText} Tags exist check for ${deploy.uuid}: ${tagsExist}`);
- const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG';
- if (!ecrDomain || !registry) {
- logger.child({ lifecycleDefaults }).error(`[BUILD ${deploy.uuid}] Missing ECR config to build image`);
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
- return false;
+ if (!enableFullYaml) {
+ await service.$fetchGraph('repository');
+ let config: YamlService.LifecycleConfig;
+ const isClassicModeOnly = build?.environment?.classicModeOnly ?? false;
+ if (!isClassicModeOnly) {
+ config = await YamlService.fetchLifecycleConfigByRepository(service.repository, deploy.branchName);
}
- if (!tagsExist) {
- await deploy.$query().patchAndFetch({
- buildOutput: null,
- buildLogs: null,
- buildPipelineId: null,
- });
- const codefreshBuildId = await codefresh.buildImage({
- ecrRepo,
- envVars: envVariables,
- dockerfilePath,
- gitOrg,
- tag,
- revision: fullSha,
- repo: repositoryName,
- branch: branchName,
- initDockerfilePath,
- cacheFrom: deploy.dockerImage,
- afterBuildPipelineId: service.afterBuildPipelineId,
- detatchAfterBuildPipeline: service.detatchAfterBuildPipeline,
- runtimeName: service.runtimeName,
- buildPipelineName,
+ // Docker types are already built - next
+ if (service.type === DeployTypes.DOCKER) {
+ await this.patchAndUpdateActivityFeed(
deploy,
- uuid,
- initTag,
- author,
- enabledFeatures,
- ecrDomain,
- deployCluster: lifecycleDefaults.deployCluster,
- });
- const buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`;
- await this.patchAndUpdateActivityFeed(deploy, { buildLogs }, runUUID);
- const buildSuccess = await codefresh.waitForImage(codefreshBuildId);
- if (buildSuccess) {
- await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
- return true;
+ {
+ status: DeployStatus.BUILT,
+ dockerImage: `${service.dockerImage}:${deploy.tag}`,
+ },
+ runUUID
+ );
+ return true;
+ } else if (service.type === DeployTypes.GITHUB) {
+ if (deploy.branchName === null) {
+ // This means we're using an external host, rather than building from source.
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID);
} else {
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID);
- return false;
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID);
+
+ await build?.$fetchGraph('pullRequest.[repository]');
+ const pullRequest = build?.pullRequest;
+ const author = pullRequest?.githubLogin;
+ const enabledFeatures = build?.enabledFeatures || [];
+ const repository = service?.repository;
+ const repo = repository?.fullName;
+ const [owner, name] = repo?.split('/') || [];
+ const fullSha = await github.getSHAForBranch(deploy.branchName, owner, name);
+
+ let repositoryName: string = service.repository.fullName;
+ let branchName: string = deploy.branchName;
+ let dockerfilePath: string = service.dockerfilePath || './Dockerfile';
+ let initDockerfilePath: string = service.initDockerfilePath;
+
+ let githubService: YamlService.GithubService;
+ // TODO This should be updated!
+ if (config != null && config.version === '0.0.3-alpha-1') {
+ const yamlService: YamlService.Service = YamlService.getDeployingServicesByName(config, service.name);
+ if (yamlService != null) {
+ githubService = yamlService as YamlService.GithubService;
+
+ repositoryName = githubService.github.repository;
+ branchName = githubService.github.branchName;
+ dockerfilePath = githubService.github.docker.app.dockerfilePath;
+
+ if (githubService.github.docker.init != null) {
+ initDockerfilePath = githubService.github.docker.init.dockerfilePath;
+ }
+ }
+ }
+
+ // Verify we actually have a SHA from github before proceeding
+ if (!fullSha) {
+ // We were unable to retrieve this branch/repo combo
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
+ return false;
+ }
+
+ const shortSha = fullSha.substring(0, 7);
+
+ getLogger().debug(
+ { serviceName: service.name, branchName: deploy.branchName },
+ 'Building docker image'
+ );
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILDING, sha: fullSha }, runUUID);
+ /**
+ * @note { svc: index } ensures the hash for each image is unique per service
+ */
+ const envVariables = merge(deploy.env || {}, deploy.build.commentRuntimeEnv, { svc: index });
+ const envVarsHash = hash(envVariables);
+ const buildPipelineName = deployable?.dockerBuildPipelineName;
+ const tag = generateDeployTag({ sha: shortSha, envVarsHash });
+ const initTag = generateDeployTag({ prefix: 'lfc-init', sha: shortSha, envVarsHash });
+ let ecrRepo = deployable?.ecr;
+
+ const { lifecycleDefaults, app_setup } = await GlobalConfigService.getInstance().getAllConfigs();
+ const { ecrDomain, ecrRegistry: registry } = lifecycleDefaults;
+
+ const serviceName = deploy.build?.enableFullYaml ? deployable?.name : deploy.service?.name;
+ ecrRepo = constructEcrRepoPath(deployable?.ecr, serviceName, ecrDomain);
+
+ const tagsExist =
+ (await codefresh.tagExists({ tag, ecrRepo, uuid })) &&
+ (!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid })));
+
+ getLogger().debug({ tagsExist }, 'Build: tags exist check');
+ const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG';
+ if (!ecrDomain || !registry) {
+ getLogger().error({ lifecycleDefaults }, 'ECR: config missing for build');
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
+ return false;
+ }
+ if (!tagsExist) {
+ await deploy.$query().patchAndFetch({
+ buildOutput: null,
+ buildLogs: null,
+ buildPipelineId: null,
+ });
+
+ const codefreshBuildId = await codefresh.buildImage({
+ ecrRepo,
+ envVars: envVariables,
+ dockerfilePath,
+ gitOrg,
+ tag,
+ revision: fullSha,
+ repo: repositoryName,
+ branch: branchName,
+ initDockerfilePath,
+ cacheFrom: deploy.dockerImage,
+ afterBuildPipelineId: service.afterBuildPipelineId,
+ detatchAfterBuildPipeline: service.detatchAfterBuildPipeline,
+ runtimeName: service.runtimeName,
+ buildPipelineName,
+ deploy,
+ uuid,
+ initTag,
+ author,
+ enabledFeatures,
+ ecrDomain,
+ deployCluster: lifecycleDefaults.deployCluster,
+ });
+ const buildLogs = `https://g.codefresh.io/build/${codefreshBuildId}`;
+ await this.patchAndUpdateActivityFeed(deploy, { buildLogs }, runUUID);
+ const buildSuccess = await codefresh.waitForImage(codefreshBuildId);
+ if (buildSuccess) {
+ await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
+ return true;
+ } else {
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID);
+ return false;
+ }
+ } else {
+ await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
+ await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID);
+ return true;
+ }
}
} else {
- await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
- await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID);
- return true;
+ getLogger().debug({ type: service.type }, 'Build: type not recognized');
+ return false;
}
- }
- } else {
- logger.debug(`${uuidText} Build type not recognized: ${service.type} for deploy.`);
- return false;
- }
- return true;
- } else {
- switch (deployable.type) {
- case DeployTypes.GITHUB:
- return this.buildImageForHelmAndGithub(deploy, runUUID);
- case DeployTypes.DOCKER:
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.BUILT,
- dockerImage: `${deployable.dockerImage}:${deploy.tag}`,
- },
- runUUID
- );
- logger.info(`[${deploy?.uuid}] Marked ${deploy.uuid} as BUILT since its a public docker image`);
return true;
- case DeployTypes.HELM: {
- try {
- const chartType = await determineChartType(deploy);
-
- if (chartType !== ChartType.PUBLIC) {
+ } else {
+ switch (deployable.type) {
+ case DeployTypes.GITHUB:
return this.buildImageForHelmAndGithub(deploy, runUUID);
- }
-
- let fullSha = null;
-
- await deploy.$fetchGraph('deployable.repository');
- if (deploy.deployable?.repository) {
+ case DeployTypes.DOCKER:
+ await this.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.BUILT,
+ dockerImage: `${deployable.dockerImage}:${deploy.tag}`,
+ },
+ runUUID
+ );
+ getLogger().info('Image: skipped reason=public status=built');
+ return true;
+ case DeployTypes.HELM: {
try {
- fullSha = await github.getShaForDeploy(deploy);
- } catch (shaError) {
- logger.debug(
- `[${deploy?.uuid}] Could not get SHA for PUBLIC helm chart, continuing without it: ${shaError.message}`
+ const chartType = await determineChartType(deploy);
+
+ if (chartType !== ChartType.PUBLIC) {
+ return this.buildImageForHelmAndGithub(deploy, runUUID);
+ }
+
+ let fullSha = null;
+
+ await deploy.$fetchGraph('deployable.repository');
+ if (deploy.deployable?.repository) {
+ try {
+ fullSha = await github.getShaForDeploy(deploy);
+ } catch (shaError) {
+ getLogger().debug(
+ { error: shaError },
+ 'Could not get SHA for PUBLIC helm chart, continuing without it'
+ );
+ }
+ }
+
+ await this.patchAndUpdateActivityFeed(
+ deploy,
+ {
+ status: DeployStatus.BUILT,
+ statusMessage: 'Helm chart does not need to be built',
+ ...(fullSha && { sha: fullSha }),
+ },
+ runUUID
);
+ return true;
+ } catch (error) {
+ getLogger().warn({ error }, 'Helm: deployment processing failed');
+ return false;
}
}
-
- await this.patchAndUpdateActivityFeed(
- deploy,
- {
- status: DeployStatus.BUILT,
- statusMessage: 'Helm chart does not need to be built',
- ...(fullSha && { sha: fullSha }),
- },
- runUUID
- );
- return true;
- } catch (error) {
- logger.child({ error }).warn(`[${deploy?.uuid}] Error processing Helm deployment: ${error.message}`);
- return false;
+ default:
+ getLogger().debug({ type: deployable.type }, 'Build: type not recognized');
+ return false;
}
}
- default:
- logger.debug(`[${deploy.uuid}] Build type not recognized: ${deployable.type} for deploy.`);
- return false;
+ } catch (e) {
+ getLogger().error({ error: e }, 'Docker: build error');
+ return false;
}
}
- } catch (e) {
- logger.error(`[${deploy.uuid}] Uncaught error building docker image: ${e}`);
- return false;
- }
+ );
}
public async patchAndUpdateActivityFeed(
deploy: Deploy,
params: Objection.PartialModelObject,
- runUUID: string
+ runUUID: string,
+ targetGithubRepositoryId?: number
) {
let build: Build;
try {
const id = deploy?.id;
await this.db.models.Deploy.query().where({ id, runUUID }).patch(params);
if (deploy.runUUID !== runUUID) {
- logger.debug(
- `[DEPLOY ${deploy.uuid}] runUUID mismatch: deploy.runUUID=${deploy.runUUID}, provided runUUID=${runUUID}`
- );
+ getLogger().debug({ deployRunUUID: deploy.runUUID, providedRunUUID: runUUID }, 'runUUID mismatch');
return;
}
- await deploy.$fetchGraph('build.[deploys.[service, deployable], pullRequest.[repository]]');
+
+ await deploy.$fetchGraph('build.pullRequest');
build = deploy?.build;
const pullRequest = build?.pullRequest;
await this.db.services.ActivityStream.updatePullRequestActivityStream(
build,
- build?.deploys,
+ [],
pullRequest,
- pullRequest?.repository,
+ null,
true,
true,
null,
- false
+ true,
+ targetGithubRepositoryId
);
} catch (error) {
- logger.child({ error }).warn(`[BUILD ${build?.uuid}] Failed to update the activity feeds`);
+ getLogger().warn({ error }, 'ActivityFeed: update failed');
}
}
private async patchDeployWithTag({ tag, deploy, initTag, ecrDomain }) {
await deploy.$fetchGraph('[build, service, deployable]');
const { build, deployable, service } = deploy;
- const uuid = build?.uuid;
- const uuidText = uuid ? `[DEPLOY ${uuid}][patchDeployWithTag]:` : '[DEPLOY][patchDeployWithTag]:';
+ const _uuid = build?.uuid;
let ecrRepo = deployable?.ecr as string;
const serviceName = build?.enableFullYaml ? deployable?.name : service?.name;
@@ -868,7 +846,7 @@ export default class DeployService extends BaseService {
initDockerImage,
})
.catch((error) => {
- logger.warn(`${uuidText} ${error}`);
+ getLogger().warn({ error }, 'Deploy: tag patch failed');
});
}
@@ -906,11 +884,10 @@ export default class DeployService extends BaseService {
async buildImageForHelmAndGithub(deploy: Deploy, runUUID: string) {
const { build, deployable } = deploy;
const uuid = build?.uuid;
- const uuidText = `[BUILD ${deploy?.uuid}]:`;
if (deploy.branchName === null) {
// This means we're using an external host, rather than building from source.
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.READY }, runUUID);
- logger.info(`${uuidText} [${deploy?.uuid}] Deploy is marked ready for external Host`);
+ getLogger().info('Deploy: ready reason=externalHost');
} else {
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.CLONING }, runUUID);
@@ -935,9 +912,7 @@ export default class DeployService extends BaseService {
// Verify we actually have a SHA from github before proceeding
if (!fullSha) {
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
- logger.error(
- `${uuidText} Failed to retrieve SHA for ${owner}/${name}/${deploy.branchName} to build ${deploy.uuid}`
- );
+ getLogger().error({ owner, name, branch: deploy.branchName }, 'Git: SHA fetch failed');
return false;
}
@@ -961,7 +936,7 @@ export default class DeployService extends BaseService {
const gitOrg = (app_setup?.org && app_setup.org.trim()) || 'REPLACE_ME_ORG';
if (!ecrDomain || !registry) {
- logger.child({ lifecycleDefaults }).error(`[BUILD ${deploy.uuid}] Missing ECR config to build image`);
+ getLogger().error({ lifecycleDefaults }, 'ECR: config missing for build');
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.ERROR }, runUUID);
return false;
}
@@ -970,11 +945,11 @@ export default class DeployService extends BaseService {
(await codefresh.tagExists({ tag, ecrRepo, uuid })) &&
(!initDockerfilePath || (await codefresh.tagExists({ tag: initTag, ecrRepo, uuid })));
- logger.debug(`${uuidText} Tags exist check for ${deploy.uuid}: ${tagsExist}`);
+ getLogger().debug({ tagsExist }, 'Build: tags exist check');
// Check for and skip duplicates
if (!tagsExist) {
- logger.info(`${uuidText} Building image`);
+ getLogger().info('Image: building');
// if this deploy has any env vars that depend on other builds, we need to wait for those builds to finish
// and update the env vars in this deploy before we can build the image
@@ -1012,7 +987,7 @@ export default class DeployService extends BaseService {
};
if (['buildkit', 'kaniko'].includes(deployable.builder?.engine)) {
- logger.info(`${uuidText} Building image with native build (${deployable.builder.engine})`);
+ getLogger().info(`Image: building engine=${deployable.builder.engine}`);
const nativeOptions = {
...buildOptions,
@@ -1048,7 +1023,7 @@ export default class DeployService extends BaseService {
}
}
- logger.info(`${uuidText} Building image with Codefresh`);
+ getLogger().info('Image: building engine=codefresh');
const buildPipelineId = await codefresh.buildImage(buildOptions);
const buildLogs = `https://g.codefresh.io/build/${buildPipelineId}`;
@@ -1060,15 +1035,15 @@ export default class DeployService extends BaseService {
if (buildSuccess) {
await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
- logger.child({ url: buildLogs }).info(`${uuidText} Image built successfully`);
+ getLogger().info('Image: built');
return true;
} else {
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILD_FAILED }, runUUID);
- logger.child({ url: buildLogs }).warn(`${uuidText} Error building image for ${deploy?.uuid}`);
+ getLogger().warn({ url: buildLogs }, 'Build: image failed');
return false;
}
} else {
- logger.info(`${uuidText} Image already exist for ${deploy?.uuid}`);
+ getLogger().info('Image: skipped reason=exists');
await this.patchDeployWithTag({ tag, initTag, deploy, ecrDomain });
await this.patchAndUpdateActivityFeed(deploy, { status: DeployStatus.BUILT }, runUUID);
return true;
@@ -1084,13 +1059,13 @@ export default class DeployService extends BaseService {
const servicesToWaitFor = extractEnvVarsWithBuildDependencies(deploy.deployable.env);
for (const [serviceName, patternsInfo] of Object.entries(servicesToWaitFor)) {
- const awaitingService = deploy.uuid;
+ const _awaitingService = deploy.uuid;
const waitingForService = `${serviceName}-${build.uuid}`;
const dependentDeploy = deploys.find((d) => d.uuid === waitingForService);
if (dependentDeploy.uuid === waitingForService) {
- logger.info(`[BUILD ${awaitingService}]: ${awaitingService} is waiting for ${waitingForService} to complete`);
+ getLogger().info(`Build: waiting service=${waitingForService}`);
await this.patchAndUpdateActivityFeed(
deploy,
@@ -1130,9 +1105,7 @@ export default class DeployService extends BaseService {
// about the output of that build, we can just pass an empty string as the pattern
if (!item.pattern || item.pattern.trim() === '') {
extractedValues[item.envKey] = '';
- logger.info(
- `[BUILD ${awaitingDeploy?.uuid}]: Empty pattern for key "${item.envKey}". Assuming build dependecy`
- );
+ getLogger().info(`Build: dependency envKey=${item.envKey} pattern=empty`);
return;
}
@@ -1141,17 +1114,18 @@ export default class DeployService extends BaseService {
if (match && match[0]) {
extractedValues[item.envKey] = match[0];
- logger.debug(
- `[BUILD ${awaitingDeploy?.uuid}]: Successfully extracted value: "${match[0]}" for key: "${item.envKey}" using pattern "${item.pattern}"`
+ getLogger().debug(
+ { value: match[0], envKey: item.envKey, pattern: item.pattern },
+ 'Successfully extracted value'
);
} else {
- logger.info(
- `[BUILD ${awaitingDeploy?.uuid}]: No match found for pattern "${item.pattern}" in ${serviceName} build pipeline with id: ${pipelineId}. Value of ${item.envKey} will be empty`
+ getLogger().info(
+ `Build: noMatch pattern=${item.pattern} service=${serviceName} pipelineId=${pipelineId} envKey=${item.envKey}`
);
}
});
} catch (error) {
- logger.error(`Error processing pipeline ${pipelineId} for service ${serviceName}:`, error);
+ getLogger().error({ error, pipelineId, serviceName }, 'Pipeline: processing failed');
throw error;
}
}
diff --git a/src/server/services/deployable.ts b/src/server/services/deployable.ts
index eb8f746..8248e9e 100644
--- a/src/server/services/deployable.ts
+++ b/src/server/services/deployable.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import BaseService from './_service';
import { Environment, Repository, Service, PullRequest, Build, Deploy } from 'server/models';
import Deployable from 'server/models/Deployable';
@@ -24,10 +24,6 @@ import { CAPACITY_TYPE, DeployTypes } from 'shared/constants';
import { Builder, Helm, KedaScaleToZero } from 'server/models/yaml';
import GlobalConfigService from './globalConfig';
-const logger = rootLogger.child({
- filename: 'services/deployable.ts',
-});
-
export interface DeployableAttributes {
appShort?: string;
ecr?: string;
@@ -211,11 +207,11 @@ export default class DeployableService extends BaseService {
attributes.serviceDisksYaml = JSON.stringify(yamlServiceDisks);
}
} catch (error) {
- logger
- .child({ service, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem generating deployable attributes from the database configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: generate attributes from DB failed');
throw error;
}
@@ -383,11 +379,11 @@ export default class DeployableService extends BaseService {
};
}
} catch (error) {
- logger
- .child({ service, deployment })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem generating deployable attributes from the yaml configuration. Error: ${error}`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: generate attributes from YAML failed');
throw error;
}
@@ -419,11 +415,11 @@ export default class DeployableService extends BaseService {
mergedAttributes = { ...yamlAttributes };
}
} catch (error) {
- logger
- .child({ dbAttributes, yamlAttributes, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem merging deployable attributes from the database with the yaml configuration. ${error}`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: merge attributes failed');
throw error;
}
@@ -475,11 +471,11 @@ export default class DeployableService extends BaseService {
}
}
} catch (error) {
- logger
- .child({ service, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem overwriting the deployable object configuration with the yaml configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: overwrite config with YAML failed');
throw error;
}
}
@@ -514,8 +510,8 @@ export default class DeployableService extends BaseService {
const dependencies: Service[] = await this.db.models.Service.query().where('dependsOnServiceId', service.id);
- logger.debug(
- `[BUILD ${buildUUID}] ${service.name} has ${dependencies.length} database dependency(dependsOnServiceId).`
+ getLogger({ buildUUID, service: service.name }).debug(
+ `Service has ${dependencies.length} database dependency(dependsOnServiceId)`
);
await Promise.all(
@@ -539,11 +535,11 @@ export default class DeployableService extends BaseService {
})
);
} catch (error) {
- logger
- .child({ service, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem creating or updating the deployable attributes from the database configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: upsert attributes from DB failed');
throw error;
}
}
@@ -570,9 +566,11 @@ export default class DeployableService extends BaseService {
const dbService: Service = await Service.query()
.findOne({ name: service.name })
.catch((error) => {
- logger
- .child({ error })
- .debug(`[BUILD ${buildUUID}] Not really an error. Just no db config for this yaml based service`);
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).debug('No database config for this yaml based service');
return null;
});
@@ -592,8 +590,8 @@ export default class DeployableService extends BaseService {
await build?.pullRequest?.$fetchGraph('[repository]');
repository = build?.pullRequest?.repository;
if (!repository) {
- logger.error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] Unable to find ${repoName} from Lifecycle database. Please verify the repository name and make sure Lifecycle Github app is installed on repository.`
+ getLogger({ buildUUID, service: service.name }).error(
+ `Unable to find ${repoName} from Lifecycle database. Verify repository name and ensure Lifecycle Github app is installed`
);
}
}
@@ -641,11 +639,11 @@ export default class DeployableService extends BaseService {
);
}
} catch (error) {
- logger
- .child({ deployableServices, service, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${service.name}] There was a problem creating or updating the deployable attributes from the yaml configuration when using a services yaml configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: service.name,
+ error,
+ }).error('Deployable: upsert attributes from YAML failed');
throw error;
}
}
@@ -714,11 +712,11 @@ export default class DeployableService extends BaseService {
build
);
} catch (error) {
- logger
- .child({ dbEnvService, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${dbEnvService.name}] There was a problem during attribution while using the database configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: dbEnvService.name,
+ error,
+ }).error('Deployable: attribution failed source=db');
throw error;
}
})
@@ -734,11 +732,11 @@ export default class DeployableService extends BaseService {
await attribution(environment.optionalServices, false);
}
} catch (error) {
- logger
- .child({ environment, error })
- .error(
- `[BUILD ${buildUUID}] [ENVIRONMENT ${environment.name}] There was a problem creating or update the deployable object from the database configuration.`
- );
+ getLogger({
+ buildUUID,
+ environment: environment.name,
+ error,
+ }).error('Deployable: upsert from DB config failed');
throw error;
}
}
@@ -777,7 +775,7 @@ export default class DeployableService extends BaseService {
id: yamlEnvService.serviceId,
})
.catch((error) => {
- logger.child({ error }).warn(`[BUILD ${buildUUID}] error`);
+ getLogger({ buildUUID, error }).warn('Query: failed');
return null;
});
@@ -797,18 +795,16 @@ export default class DeployableService extends BaseService {
build
);
} else {
- logger.error(`[BUILD ${buildUUID}] [yamlEnvService ${yamlEnvService}]`);
- logger.error(`[BUILD ${buildUUID}] [service ${service}]`);
- logger.error(
- `[BUILD ${buildUUID}] Service ID (${yamlEnvService.serviceId}) cannot be find in the database configuration.`
+ getLogger({ buildUUID, serviceId: yamlEnvService.serviceId }).error(
+ 'Service ID cannot be found in the database configuration'
);
}
} catch (error) {
- logger
- .child({ yamlEnvService, error })
- .error(
- `[BUILD ${buildUUID}] [SERVICE ${yamlEnvService.name}] There was a problem creating or updating the deployable object from the yaml configuration when using a services ID.`
- );
+ getLogger({
+ buildUUID,
+ service: yamlEnvService.name,
+ error,
+ }).error('Deployable: create/update from yaml failed source=serviceId');
throw error;
}
} else {
@@ -885,32 +881,30 @@ export default class DeployableService extends BaseService {
build
);
} else {
- logger.warn(
- `[BUILD ${buildUUID}] Service Name (${yamlEnvService.name}) cannot be find in the yaml configuration. Is it referenced via the Lifecycle database?`
+ getLogger({ buildUUID, service: yamlEnvService.name }).warn(
+ 'Service cannot be found in yaml configuration. Is it referenced via the Lifecycle database?'
);
}
} else {
- logger
- .child({ repository, deploy })
- .warn(
- `[BUILD ${buildUUID}][DEPLOY ${deploy?.uuid}] Unable to locate YAML config file from ${repository?.fullName}:${branchName}. Is this a database service?`
- );
+ getLogger({ buildUUID, deployUUID: deploy?.uuid, repository: repository?.fullName }).warn(
+ `Unable to locate YAML config file from ${repository?.fullName}:${branchName}. Is this a database service?`
+ );
}
} catch (error) {
- logger
- .child({ error, yamlEnvService })
- .error(
- `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration when using a services yaml configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: yamlEnvService.name,
+ error,
+ }).error('Deployable: create/update from yaml failed');
throw error;
}
}
} catch (error) {
- logger
- .child({ error, yamlEnvService })
- .error(
- `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration.`
- );
+ getLogger({
+ buildUUID,
+ service: yamlEnvService.name,
+ error,
+ }).error('Deployable: create/update from yaml failed');
throw error;
}
})
@@ -969,14 +963,10 @@ export default class DeployableService extends BaseService {
}
}
} else {
- logger.warn(`[BUILD ${buildUUID}] Missing PR branch name.`);
+ getLogger({ buildUUID }).warn('PR: branch name missing');
}
} catch (error) {
- logger
- .child({ error })
- .error(
- `[BUILD ${buildUUID}] There was a problem creating or updating the deployable object from the yaml configuration.`
- );
+ getLogger({ buildUUID, error }).error('Deployable: create/update from yaml failed');
throw error;
}
}
@@ -1028,15 +1018,17 @@ export default class DeployableService extends BaseService {
Array.from(deployableServices.values())
);
} else {
- logger.fatal(`[BUILD ${buildUUID}] Pull Request cannot be undefined`);
+ getLogger({ buildUUID }).fatal('Pull Request cannot be undefined');
}
} catch (error) {
- logger
- .child({ environment, error })
- .error(`[BUILD ${buildUUID}] [ENVIRONMENT ${environment.name}] There was a problem upserting the deployables.`);
+ getLogger({
+ buildUUID,
+ environment: environment.name,
+ error,
+ }).error('Deployable: upsert failed');
throw error;
}
- logger.info(`[BUILD ${buildUUID}] Created/Updated ${deployables.length} deployables`);
+ getLogger({ buildUUID }).info(`Deployable: upserted count=${deployables.length}`);
return deployables;
}
@@ -1063,9 +1055,11 @@ export default class DeployableService extends BaseService {
.where('buildId', buildId)
.first()
.catch((error) => {
- logger
- .child({ error })
- .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to search deployable`);
+ getLogger({
+ buildUUID,
+ service: deployableAttr.name,
+ error,
+ }).error('Deployable: search failed');
return undefined;
});
@@ -1074,15 +1068,19 @@ export default class DeployableService extends BaseService {
.$query()
.patch(deployableAttr as object)
.catch((error) => {
- logger
- .child({ error })
- .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to patch deployable`);
+ getLogger({
+ buildUUID,
+ service: deployableAttr.name,
+ error,
+ }).error('Deployable: patch failed');
});
} else {
deployable = await this.db.models.Deployable.create(deployableAttr as object).catch((error) => {
- logger
- .child({ error })
- .error(`[BUILD ${buildUUID}] [SERVICE ${deployableAttr.name}] Unable to create new deployable`);
+ getLogger({
+ buildUUID,
+ service: deployableAttr.name,
+ error,
+ }).error('Deployable: create failed');
return undefined;
});
}
diff --git a/src/server/services/environment.ts b/src/server/services/environment.ts
index 7c8f9ab..98c9b75 100644
--- a/src/server/services/environment.ts
+++ b/src/server/services/environment.ts
@@ -14,14 +14,10 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import Environment from 'server/models/Environment';
import Service from './_service';
-const logger = rootLogger.child({
- filename: 'services/repository.ts',
-});
-
export default class EnvironmentService extends Service {
/**
* Retrieve a Lifecycle environment. If it doesn't exist, create a new record.
@@ -48,9 +44,7 @@ export default class EnvironmentService extends Service {
autoDeploy,
}));
} catch (error) {
- logger.fatal(
- `[Environment ${envName}] [UUID ${uuid != null ?? '???'}] Unable to find or create environment: ${error}`
- );
+ getLogger({ environment: envName, uuid, error }).fatal('Unable to find or create environment');
throw error;
}
diff --git a/src/server/services/github.ts b/src/server/services/github.ts
index 1f29d92..ae275fb 100644
--- a/src/server/services/github.ts
+++ b/src/server/services/github.ts
@@ -17,7 +17,7 @@
import { parse as fParse } from 'flatted';
import _ from 'lodash';
import Service from './_service';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, extractContextForQueue, LogStage } from 'server/lib/logger';
import { IssueCommentEvent, PullRequestEvent, PushEvent } from '@octokit/webhooks-types';
import {
GithubPullRequestActions,
@@ -35,10 +35,6 @@ import { createOrUpdateGithubDeployment, deleteGithubDeploymentAndEnvironment }
import { enableKillSwitch, isStaging, hasDeployLabel } from 'server/lib/utils';
import { redisClient } from 'server/lib/dependencies';
-const logger = rootLogger.child({
- filename: 'services/github.ts',
-});
-
export default class GithubService extends Service {
// Handle the pull request webhook mapping the entrance with webhook body
async handlePullRequestHook({
@@ -60,7 +56,7 @@ export default class GithubService extends Service {
labels,
},
}: PullRequestEvent) {
- logger.info(`[GITHUB ${fullName}/${branch}] Pull request ${action}`);
+ getLogger({}).info(`PR: ${action} repo=${fullName} branch=${branch}`);
const isOpened = [GithubPullRequestActions.OPENED, GithubPullRequestActions.REOPENED].includes(
action as GithubPullRequestActions
);
@@ -79,16 +75,7 @@ export default class GithubService extends Service {
isJSON: true,
})) as LifecycleYamlConfigOptions;
} catch (error) {
- logger
- .child({
- action,
- status,
- branch,
- branchSha,
- fullName,
- error,
- })
- .warn(`[GITHUB ${fullName}/${branch}][handlePullRequestHook] Unable to fetch lifecycle config`);
+ getLogger({}).warn({ error }, `Config: fetch failed repo=${fullName}/${branch}`);
}
}
repository = await this.db.services.Repository.findRepository(ownerId, repositoryId, installationId);
@@ -153,6 +140,7 @@ export default class GithubService extends Service {
action: 'enable',
waitForComment: true,
labels: labels.map((l) => l.name),
+ ...extractContextForQueue(),
});
}
} else if (isClosed) {
@@ -160,7 +148,7 @@ export default class GithubService extends Service {
pullRequestId,
});
if (!build) {
- logger.warn(`[GITHUB ${fullName}/${branch}] No build found for closed pull request. Skipping deletion`);
+ getLogger({}).warn(`Build: not found for closed PR repo=${fullName}/${branch}`);
return;
}
await this.db.services.BuildService.deleteBuild(build);
@@ -170,20 +158,11 @@ export default class GithubService extends Service {
action: 'disable',
waitForComment: false,
labels: labels.map((l) => l.name),
+ ...extractContextForQueue(),
});
}
} catch (error) {
- logger
- .child({
- action,
- status,
- pullRequest,
- environment,
- repository,
- error,
- build,
- })
- .fatal(`[GITHUB ${fullName}/${branch}] Unable to handle Github pull request event: ${error}`);
+ getLogger().fatal({ error }, `Github: PR event handling failed repo=${fullName} branch=${branch}`);
}
}
@@ -202,16 +181,14 @@ export default class GithubService extends Service {
if (!pullRequest || isBot) return;
await pullRequest.$fetchGraph('[build, repository]');
- logger.info(`[GITHUB ${pullRequest.build?.uuid}] Pull request comment edited by ${commentCreatorUsername}`);
- await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body);
+ const buildUuid = pullRequest.build?.uuid;
+
+ return withLogContext({ buildUuid }, async () => {
+ getLogger().info(`PR: edited by=${commentCreatorUsername}`);
+ await this.db.services.ActivityStream.updateBuildsAndDeploysFromCommentEdit(pullRequest, body);
+ });
} catch (error) {
- logger
- .child({
- error,
- pullRequest,
- commentCreatorUsername,
- })
- .error(`Unable to handle Github Issue Comment event: ${error}`);
+ getLogger().error({ error }, `GitHub: issue comment handling failed`);
}
};
@@ -220,7 +197,7 @@ export default class GithubService extends Service {
action,
pull_request: { id: githubPullRequestId, labels, state: status },
} = body;
- let pullRequest: PullRequest, build: Build, repository: Repository;
+ let pullRequest: PullRequest, build: Build, _repository: Repository;
try {
// this is a hacky way to force deploy by adding a label
const labelNames = labels.map(({ name }) => name.toLowerCase()) || [];
@@ -238,7 +215,7 @@ export default class GithubService extends Service {
await pullRequest.$fetchGraph('[build, repository]');
build = pullRequest?.build;
- repository = pullRequest?.repository;
+ _repository = pullRequest?.repository;
await this.patchPullRequest({
pullRequest,
labels,
@@ -246,11 +223,7 @@ export default class GithubService extends Service {
status,
autoDeploy: false,
});
- logger.info(
- `[BUILD ${build?.uuid}] Patched pull request with labels(${action}) ${
- labels.length ? `: ${labels.map(({ name }) => name).join(', ')}` : ''
- }`
- );
+ getLogger().info(`Label: ${action} labels=[${labels.map(({ name }) => name).join(',')}]`);
if (pullRequest.deployOnUpdate === false) {
// when pullRequest.deployOnUpdate is false, it means that there is no `lifecycle-deploy!` label
@@ -260,22 +233,14 @@ export default class GithubService extends Service {
const buildId = build?.id;
if (!buildId) {
- logger
- .child({ build })
- .error(`[BUILD ${build?.uuid}][handleLabelWebhook][buidIdError] No build ID found for this pull request!`);
+ getLogger().error(`Build: id not found for=handleLabelWebhook`);
}
await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
buildId,
+ ...extractContextForQueue(),
});
} catch (error) {
- logger
- .child({
- build,
- pullRequest,
- repository,
- error,
- })
- .error(`[BUILD ${build?.uuid}][handleLabelWebhook] Error processing label webhook`);
+ getLogger().error({ error }, `Label: webhook processing failed`);
}
};
@@ -284,7 +249,7 @@ export default class GithubService extends Service {
const branchName = ref.split('refs/heads/')[1];
if (!branchName) return;
const hasVoidCommit = [previousCommit, latestCommit].some((commit) => this.isVoidCommit(commit));
- logger.debug(`[GITHUB] Push event repo ${repoName}, branch ${branchName}`);
+ getLogger({}).debug(`Push event repo=${repoName} branch=${branchName}`);
const models = this.db.models;
try {
@@ -331,7 +296,7 @@ export default class GithubService extends Service {
for (const build of buildsToDeploy) {
const buildId = build?.id;
if (!buildId) {
- logger.error(`[BUILD ${build?.uuid}][handlePushWebhook][buidIdError] No build ID found for this build!`);
+ getLogger().error(`Build: id not found for=handlePushWebhook`);
}
// Only check for failed deploys on PR environments, not static environments
let hasFailedDeploys = false;
@@ -344,23 +309,24 @@ export default class GithubService extends Service {
hasFailedDeploys = failedDeploys.length > 0;
if (hasFailedDeploys) {
- logger.info(
- `[BUILD ${build?.uuid}] Detected ${failedDeploys.length} failed deploy(s). Triggering full redeploy for push on repo: ${repoName} branch: ${branchName}`
+ getLogger().info(
+ `Push: redeploying reason=failedDeploys count=${failedDeploys.length} repo=${repoName} branch=${branchName}`
);
}
}
if (!hasFailedDeploys) {
- logger.info(`[BUILD ${build?.uuid}] Deploying build for push on repo: ${repoName} branch: ${branchName}`);
+ getLogger().info(`Push: deploying repo=${repoName} branch=${branchName}`);
}
await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
buildId,
...(hasFailedDeploys ? {} : { githubRepositoryId }),
+ ...extractContextForQueue(),
});
}
} catch (error) {
- logger.error(`[GITHUB] Error processing push webhook: ${error}`);
+ getLogger({}).error({ error }, `Push: webhook processing failed`);
}
};
@@ -397,13 +363,15 @@ export default class GithubService extends Service {
if (!build) return;
- logger.info(`[BUILD ${build?.uuid}] Redeploying static env for push on branch`);
+ getLogger().info(`Push: redeploying reason=staticEnv`);
await this.db.services.BuildService.resolveAndDeployBuildQueue.add('resolve-deploy', {
buildId: build?.id,
+ ...extractContextForQueue(),
});
} catch (error) {
- logger.error(
- `[GITHUB] Error processing push webhook for static env for branch: ${branchName} at repository id: ${githubRepositoryId}.\n Error: ${error}`
+ getLogger({}).error(
+ { error },
+ `Push: static env webhook failed branch=${branchName} repositoryId=${githubRepositoryId}`
);
}
};
@@ -412,7 +380,7 @@ export default class GithubService extends Service {
const { body } = req;
const type = req.headers['x-github-event'];
- logger.debug(`***** Incoming Github Webhook: ${type} *****`);
+ getLogger({}).debug(`Incoming Github Webhook type=${type}`);
const isVerified = github.verifyWebhookSignature(req);
if (!isVerified) {
@@ -424,28 +392,28 @@ export default class GithubService extends Service {
try {
const labelNames = body.pull_request.labels.map(({ name }) => name.toLowerCase()) || [];
if (isStaging() && !labelNames.includes(FallbackLabels.DEPLOY_STG)) {
- logger.debug(`[GITHUB] STAGING RUN DETECTED - Skipping processing of this event`);
+ getLogger({}).debug(`Staging run detected, skipping processing of this event`);
return;
}
const hasLabelChange = [GithubWebhookTypes.LABELED, GithubWebhookTypes.UNLABELED].includes(body.action);
if (hasLabelChange) return await this.handleLabelWebhook(body);
else return await this.handlePullRequestHook(body);
} catch (e) {
- logger.error(`There is problem when handling PULL_REQUEST event: ${e}`);
+ getLogger({}).error({ error: e }, `GitHub: PULL_REQUEST event handling failed`);
throw e;
}
case GithubWebhookTypes.PUSH:
try {
return await this.handlePushWebhook(body);
} catch (e) {
- logger.error(`There is problem when handling PUSH event: ${e}`);
+ getLogger({}).error({ error: e }, `GitHub: PUSH event handling failed`);
throw e;
}
case GithubWebhookTypes.ISSUE_COMMENT:
try {
return await this.handleIssueCommentWebhook(body);
} catch (e) {
- logger.error(`There is problem when handling ISSUE_COMMENT event: ${e}`);
+ getLogger({}).error({ error: e }, `GitHub: ISSUE_COMMENT event handling failed`);
throw e;
}
default:
@@ -462,11 +430,16 @@ export default class GithubService extends Service {
});
processWebhooks = async (job) => {
- try {
- await this.db.services.GithubService.dispatchWebhook(fParse(job.data.message));
- } catch (error) {
- logger.error(`Error processing webhook:`, error);
- }
+ const { correlationId, sender, message, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ try {
+ getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).debug('Webhook: processing');
+ await this.db.services.GithubService.dispatchWebhook(fParse(message));
+ } catch (error) {
+ getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).fatal({ error }, 'Error processing webhook');
+ }
+ });
};
githubDeploymentQueue = this.queueManager.registerQueue(QUEUE_NAMES.GITHUB_DEPLOYMENT, {
@@ -478,26 +451,34 @@ export default class GithubService extends Service {
});
processGithubDeployment = async (job) => {
- // This queue has 3 attempts configured, so errors will cause retries
- const { deployId, action } = job.data;
- const text = `[DEPLOYMENT ${deployId}][processGithubDeployment] ${action}`;
- const deploy = await this.db.models.Deploy.query().findById(deployId);
- try {
- switch (action) {
- case 'create': {
- await createOrUpdateGithubDeployment(deploy);
- break;
- }
- case 'delete': {
- await deleteGithubDeploymentAndEnvironment(deploy);
- break;
+ const { deployId, action, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, sender, _ddTraceContext, deployUuid: String(deployId) }, async () => {
+ const deploy = await this.db.models.Deploy.query().findById(deployId);
+ try {
+ getLogger({ stage: LogStage.DEPLOY_STARTING }).debug(`GitHub deployment: ${action}`);
+
+ switch (action) {
+ case 'create': {
+ await createOrUpdateGithubDeployment(deploy);
+ break;
+ }
+ case 'delete': {
+ await deleteGithubDeploymentAndEnvironment(deploy);
+ break;
+ }
+ default:
+ throw new Error(`Unknown action: ${action}`);
}
- default:
- throw new Error(`Unknown action: ${action}`);
+
+ getLogger({ stage: LogStage.DEPLOY_COMPLETE }).debug(`GitHub deployment: ${action} completed`);
+ } catch (error) {
+ getLogger({ stage: LogStage.DEPLOY_FAILED }).warn(
+ { error },
+ `Error processing GitHub deployment job=${job?.id} action=${action}`
+ );
}
- } catch (error) {
- logger.child({ error }).warn(`${text} Error processing job ${job?.id} with action ${action}`);
- }
+ });
};
private patchPullRequest = async ({ pullRequest, labels, action, status, autoDeploy = false }) => {
@@ -523,15 +504,7 @@ export default class GithubService extends Service {
labels: JSON.stringify(labelNames),
});
} catch (error) {
- logger
- .child({
- error,
- pullRequest,
- labels,
- action,
- status,
- })
- .error(`[BUILD][patchPullRequest] Error patching pull request for ${pullRequest?.fullName}/${branch}`);
+ getLogger().error({ error }, `PR: patch failed repo=${pullRequest?.fullName}/${branch}`);
}
};
diff --git a/src/server/services/globalConfig.ts b/src/server/services/globalConfig.ts
index 5140bd2..18c780c 100644
--- a/src/server/services/globalConfig.ts
+++ b/src/server/services/globalConfig.ts
@@ -15,23 +15,23 @@
*/
import { createAppAuth } from '@octokit/auth-app';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import BaseService from './_service';
import { GlobalConfig, LabelsConfig } from './types/globalConfig';
import { GITHUB_APP_INSTALLATION_ID, APP_AUTH, APP_ENV, QUEUE_NAMES } from 'shared/config';
import { Metrics } from 'server/lib/metrics';
import { redisClient } from 'server/lib/dependencies';
-const logger = rootLogger.child({
- filename: 'services/globalConfig.ts',
-});
-
const REDIS_CACHE_KEY = 'global_config';
const GITHUB_CACHED_CLIENT_TOKEN = 'github_cached_client_token';
export default class GlobalConfigService extends BaseService {
private static instance: GlobalConfigService;
+ private memoryCache: GlobalConfig | null = null;
+ private memoryCacheExpiry: number = 0;
+ private static MEMORY_CACHE_TTL_MS = 30000; // 30 seconds
+
static getInstance(): GlobalConfigService {
if (!this.instance) {
this.instance = new GlobalConfigService();
@@ -39,6 +39,11 @@ export default class GlobalConfigService extends BaseService {
return this.instance;
}
+ clearMemoryCache(): void {
+ this.memoryCache = null;
+ this.memoryCacheExpiry = 0;
+ }
+
protected cacheRefreshQueue = this.queueManager.registerQueue(QUEUE_NAMES.GLOBAL_CONFIG_CACHE_REFRESH, {
connection: redisClient.getConnection(),
});
@@ -60,29 +65,46 @@ export default class GlobalConfigService extends BaseService {
}
/**
- * Get all global configs. First, it will try to retrieve them from the cache.
- * If they are not available if cache is empty, it will fetch them from the DB, cache them, and then return them.
+ * Get all global configs. Uses a three-tier caching strategy:
+ * 1. In-memory cache (30 second TTL) - fastest, eliminates Redis calls
+ * 2. Redis cache - shared across pods
+ * 3. Database - source of truth
* @returns A map of all config keys values.
**/
async getAllConfigs(refreshCache: boolean = false): Promise {
+ const now = Date.now();
+
+ if (!refreshCache && this.memoryCache && now < this.memoryCacheExpiry) {
+ return this.memoryCache;
+ }
+
const cachedConfigs = await this.redis.hgetall(REDIS_CACHE_KEY);
if (Object.keys(cachedConfigs).length === 0 || refreshCache) {
- logger.debug('Cache miss for all configs, fetching from DB');
+ getLogger().debug('Cache miss for all configs, fetching from DB');
const configsFromDb = await this.getAllConfigsFromDb();
- // to delete keys removed from database
- // this is not a common scenario that happens with global config table, but just to be safe
const keysFromDb = new Set(Object.keys(configsFromDb));
const keysToRemove = Object.keys(cachedConfigs).filter((key) => !keysFromDb.has(key));
if (keysToRemove.length > 0) {
await this.redis.hdel(REDIS_CACHE_KEY, ...keysToRemove);
- logger.debug(`Deleted stale keys from cache: ${keysToRemove.join(', ')}`);
+ getLogger().debug(`Deleted stale keys from cache: keys=${keysToRemove.join(', ')}`);
}
await this.redis.hmset(REDIS_CACHE_KEY, configsFromDb);
- return this.deserialize(configsFromDb);
+ const result = this.deserialize(configsFromDb);
+
+ this.memoryCache = result;
+ this.memoryCacheExpiry = now + GlobalConfigService.MEMORY_CACHE_TTL_MS;
+
+ return result;
}
- return this.deserialize(cachedConfigs);
+
+ const result = this.deserialize(cachedConfigs);
+
+ this.memoryCache = result;
+ this.memoryCacheExpiry = now + GlobalConfigService.MEMORY_CACHE_TTL_MS;
+
+ return result;
}
/**
@@ -120,7 +142,7 @@ export default class GlobalConfigService extends BaseService {
if (!labels) throw new Error('Labels configuration not found in global config');
return labels;
} catch (error) {
- logger.error('Error retrieving labels configuration, using fallback defaults', error);
+ getLogger().error({ error }, 'Config: labels fetch failed using=defaults');
// Return fallback defaults on error
return {
deploy: ['lifecycle-deploy!'],
@@ -139,7 +161,7 @@ export default class GlobalConfigService extends BaseService {
try {
deserializedConfigs[key as keyof GlobalConfig] = JSON.parse(value as string);
} catch (e) {
- logger.error(`Error deserializing config for key ${key}: ${e.message}`);
+ getLogger().error({ error: e }, `Config: deserialize failed key=${key}`);
}
}
return deserializedConfigs as GlobalConfig;
@@ -172,7 +194,7 @@ export default class GlobalConfigService extends BaseService {
try {
await this.getGithubClientToken(true);
} catch (error) {
- logger.child({ error }).error(`Error refreshing GlobalConfig cache during boot: ${error}`);
+ getLogger().error({ error }, 'Config: cache refresh failed during=boot');
}
}
@@ -189,14 +211,19 @@ export default class GlobalConfigService extends BaseService {
);
}
- processCacheRefresh = async () => {
- try {
- await this.getAllConfigs(true);
- await this.getGithubClientToken(true);
- logger.debug('GlobalConfig and Github cache refreshed successfully.');
- } catch (error) {
- logger.child({ error }).error('Error refreshing GlobalConfig cache');
- }
+ processCacheRefresh = async (job) => {
+ const { correlationId } = job?.data || {};
+
+ return withLogContext({ correlationId: correlationId || `cache-refresh-${Date.now()}` }, async () => {
+ try {
+ getLogger({ stage: LogStage.CONFIG_REFRESH }).info('Config: refreshing type=global_config,github_token');
+ await this.getAllConfigs(true);
+ await this.getGithubClientToken(true);
+ getLogger({ stage: LogStage.CONFIG_REFRESH }).debug('GlobalConfig and Github cache refreshed successfully');
+ } catch (error) {
+ getLogger({ stage: LogStage.CONFIG_FAILED }).error({ error }, 'Config: cache refresh failed');
+ }
+ });
};
/**
@@ -209,9 +236,9 @@ export default class GlobalConfigService extends BaseService {
async setConfig(key: string, value: any): Promise {
try {
await this.db.knex('global_config').insert({ key, config: value }).onConflict('key').merge();
- logger.info(`Set global config value for key: ${key}`);
+ getLogger().info(`Config: set key=${key}`);
} catch (err: any) {
- logger.child({ err }).error(`Error setting global config value for key: ${key}`);
+ getLogger().error({ error: err }, `Config: set failed key=${key}`);
throw err;
}
}
diff --git a/src/server/services/ingress.ts b/src/server/services/ingress.ts
index f6ab993..de74e5a 100644
--- a/src/server/services/ingress.ts
+++ b/src/server/services/ingress.ts
@@ -15,7 +15,7 @@
*/
/* eslint-disable no-unused-vars */
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import BaseService from './_service';
import fs from 'fs';
import { TMP_PATH, QUEUE_NAMES } from 'shared/config';
@@ -27,10 +27,6 @@ import GlobalConfigService from './globalConfig';
const MANIFEST_PATH = `${TMP_PATH}/ingress`;
-const logger = rootLogger.child({
- filename: 'services/ingress.ts',
-});
-
export default class IngressService extends BaseService {
async updateIngressManifest(): Promise {
return true;
@@ -66,47 +62,57 @@ export default class IngressService extends BaseService {
* @param done the done callback
*/
ingressCleanupForBuild = async (job) => {
- // queue has retry attempts configured, so errors will cause retries
- const buildId = job.data.buildId;
- // For cleanup purpose, we want to include the ingresses for all the services (active or not) to cleanup just in case.
- const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, true);
- const namespace = await this.db.services.BuildService.getNamespace({ id: buildId });
- try {
- configurations.forEach(async (configuration) => {
- await shellPromise(`kubectl delete ingress ingress-${configuration.deployUUID} --namespace ${namespace}`).catch(
- (error) => {
- logger.warn(`[DEPLOY ${configuration.deployUUID}] ${error}`);
+ const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => {
+ getLogger({ stage: LogStage.INGRESS_PROCESSING }).info('Ingress: cleaning up');
+
+ // For cleanup purpose, we want to include the ingresses for all the services (active or not) to cleanup just in case.
+ const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, true);
+ const namespace = await this.db.services.BuildService.getNamespace({ id: buildId });
+ try {
+ configurations.forEach(async (configuration) => {
+ await shellPromise(
+ `kubectl delete ingress ingress-${configuration.deployUUID} --namespace ${namespace}`
+ ).catch((error) => {
+ getLogger({ stage: LogStage.INGRESS_PROCESSING }).warn(`${error}`);
return null;
+ });
+ });
+ getLogger({ stage: LogStage.INGRESS_COMPLETE }).info('Ingress: cleaned up');
+ } catch (e) {
+ getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error: e }, 'Ingress: cleanup failed');
+ }
+ });
+ };
+
+ createOrUpdateIngressForBuild = async (job) => {
+ const { buildId, buildUuid, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, buildUuid, sender, _ddTraceContext }, async () => {
+ getLogger({ stage: LogStage.INGRESS_PROCESSING }).info('Ingress: creating');
+
+ // We just want to create/update ingress for active services only
+ const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, false);
+ const namespace = await this.db.services.BuildService.getNamespace({ id: buildId });
+ const { lifecycleDefaults, domainDefaults } = await GlobalConfigService.getInstance().getAllConfigs();
+ const manifests = configurations.map((configuration) => {
+ return yaml.dump(
+ this.generateNginxManifestForConfiguration({
+ configuration,
+ ingressClassName: lifecycleDefaults?.ingressClassName,
+ altHosts: domainDefaults?.altHttp || [],
+ }),
+ {
+ skipInvalid: true,
}
);
});
- } catch (e) {
- // It's ok if this fails.
- logger.warn(e);
- }
- };
+ manifests.forEach(async (manifest, idx) => {
+ await this.applyManifests(manifest, `${buildId}-${idx}-nginx`, namespace);
+ });
- createOrUpdateIngressForBuild = async (job) => {
- // queue has retry attempts configured, so errors will cause retries
- const buildId = job.data.buildId;
- // We just want to create/update ingress for active services only
- const configurations = await this.db.services.BuildService.configurationsForBuildId(buildId, false);
- const namespace = await this.db.services.BuildService.getNamespace({ id: buildId });
- const { lifecycleDefaults, domainDefaults } = await GlobalConfigService.getInstance().getAllConfigs();
- const manifests = configurations.map((configuration) => {
- return yaml.dump(
- this.generateNginxManifestForConfiguration({
- configuration,
- ingressClassName: lifecycleDefaults?.ingressClassName,
- altHosts: domainDefaults?.altHttp || [],
- }),
- {
- skipInvalid: true,
- }
- );
- });
- manifests.forEach(async (manifest, idx) => {
- await this.applyManifests(manifest, `${buildId}-${idx}-nginx`, namespace);
+ getLogger({ stage: LogStage.INGRESS_COMPLETE }).info('Ingress: created');
});
};
@@ -194,7 +200,7 @@ export default class IngressService extends BaseService {
await fs.promises.writeFile(localPath, manifest, 'utf8');
await shellPromise(`kubectl apply -f ${localPath} --namespace ${namespace}`);
} catch (error) {
- logger.warn(error);
+ getLogger({ stage: LogStage.INGRESS_FAILED }).warn({ error }, 'Ingress: manifest apply failed');
}
};
}
diff --git a/src/server/services/label.ts b/src/server/services/label.ts
index 7432d8d..212b45c 100644
--- a/src/server/services/label.ts
+++ b/src/server/services/label.ts
@@ -18,15 +18,11 @@ import Service from './_service';
import { Queue, Job } from 'bullmq';
import { QUEUE_NAMES } from 'shared/config';
import { redisClient } from 'server/lib/dependencies';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger';
import { waitForColumnValue } from 'shared/utils';
import { updatePullRequestLabels } from 'server/lib/github';
import { getDeployLabel } from 'server/lib/utils';
-const logger = rootLogger.child({
- filename: 'services/label.ts',
-});
-
interface LabelJob {
pullRequestId: number;
action: 'enable' | 'disable';
@@ -55,62 +51,86 @@ export default class LabelService extends Service {
* Process label queue jobs
*/
processLabelQueue = async (job: Job) => {
- const { pullRequestId, action, waitForComment, labels: currentLabels } = job.data;
+ const {
+ pullRequestId,
+ action,
+ waitForComment,
+ labels: currentLabels,
+ sender,
+ correlationId,
+ _ddTraceContext,
+ } = job.data as LabelJob & { sender?: string; correlationId?: string; _ddTraceContext?: Record };
- try {
- const pullRequest = await this.db.models.PullRequest.query()
- .findById(pullRequestId)
- .withGraphFetched('[repository, build]');
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ try {
+ const pullRequest = await this.db.models.PullRequest.query()
+ .findById(pullRequestId)
+ .withGraphFetched('[repository, build]');
- if (!pullRequest) {
- throw new Error(`[BUILD unknown] Pull request with id ${pullRequestId} not found`);
- }
+ if (!pullRequest) {
+ throw new Error(`Pull request with id ${pullRequestId} not found`);
+ }
- const { repository, build } = pullRequest;
- const buildUuid = build?.uuid || 'unknown';
- if (!repository) {
- throw new Error(`[BUILD ${buildUuid}] Repository not found for pull request ${pullRequestId}`);
- }
+ const { repository, build } = pullRequest;
+ const buildUuid = build?.uuid || 'unknown';
+ updateLogContext({ buildUuid });
+ if (!repository) {
+ throw new Error(`Repository not found for pull request ${pullRequestId}`);
+ }
- if (waitForComment && !pullRequest.commentId) {
- logger.debug(`[BUILD ${buildUuid}] Waiting for comment_id to be set before updating labels`);
- // 60 attempts * 5 seconds = 5 minutes
- const updatedPullRequest = await waitForColumnValue(pullRequest, 'commentId', 60, 5000);
+ getLogger({ stage: LogStage.LABEL_PROCESSING }).info(
+ `Label: processing action=${action} pr=${pullRequest.pullRequestNumber}`
+ );
- if (!updatedPullRequest) {
- logger.warn(`[BUILD ${buildUuid}] Timeout waiting for comment_id while updating labels after 5 minutes`);
+ if (waitForComment && !pullRequest.commentId) {
+ getLogger({ stage: LogStage.LABEL_PROCESSING }).debug(
+ 'Waiting for comment_id to be set before updating labels'
+ );
+ // 60 attempts * 5 seconds = 5 minutes
+ const updatedPullRequest = await waitForColumnValue(pullRequest, 'commentId', 60, 5000);
+
+ if (!updatedPullRequest) {
+ getLogger({ stage: LogStage.LABEL_PROCESSING }).warn(
+ 'Timeout waiting for comment_id while updating labels after 5 minutes'
+ );
+ }
}
- }
- let updatedLabels: string[];
+ let updatedLabels: string[];
- const deployLabel = await getDeployLabel();
- if (action === 'enable') {
- if (!currentLabels.includes(deployLabel)) {
- updatedLabels = [...currentLabels, deployLabel];
+ const deployLabel = await getDeployLabel();
+ if (action === 'enable') {
+ if (!currentLabels.includes(deployLabel)) {
+ updatedLabels = [...currentLabels, deployLabel];
+ } else {
+ getLogger({ stage: LogStage.LABEL_COMPLETE }).debug(
+ `Deploy label "${deployLabel}" already exists on PR, skipping update`
+ );
+ return;
+ }
} else {
- logger.debug(`[BUILD ${buildUuid}] Deploy label "${deployLabel}" already exists on PR, skipping update`);
- return;
+ const labelsConfig = await this.db.services.GlobalConfig.getLabels();
+ const deployLabels = labelsConfig.deploy || [];
+ updatedLabels = currentLabels.filter((label) => !deployLabels.includes(label));
}
- } else {
- const labelsConfig = await this.db.services.GlobalConfig.getLabels();
- const deployLabels = labelsConfig.deploy || [];
- updatedLabels = currentLabels.filter((label) => !deployLabels.includes(label));
- }
- await updatePullRequestLabels({
- installationId: repository.githubInstallationId,
- pullRequestNumber: pullRequest.pullRequestNumber,
- fullName: pullRequest.fullName,
- labels: updatedLabels,
- });
+ await updatePullRequestLabels({
+ installationId: repository.githubInstallationId,
+ pullRequestNumber: pullRequest.pullRequestNumber,
+ fullName: pullRequest.fullName,
+ labels: updatedLabels,
+ });
- logger.info(
- `[BUILD ${buildUuid}] Successfully ${action === 'enable' ? 'added' : 'removed'} ${deployLabel} label`
- );
- } catch (error) {
- logger.error({ error }, `[PR ${pullRequestId}] Failed to process label job`);
- throw error;
- }
+ getLogger({ stage: LogStage.LABEL_COMPLETE }).info(
+ `Label: ${action === 'enable' ? 'added' : 'removed'} label=${deployLabel}`
+ );
+ } catch (error) {
+ getLogger({ stage: LogStage.LABEL_FAILED }).error(
+ { error },
+ `Failed to process label job for PR ${pullRequestId}`
+ );
+ throw error;
+ }
+ });
};
}
diff --git a/src/server/services/logStreaming.ts b/src/server/services/logStreaming.ts
index c22e753..a1b384d 100644
--- a/src/server/services/logStreaming.ts
+++ b/src/server/services/logStreaming.ts
@@ -14,15 +14,11 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { getK8sJobStatusAndPod } from 'server/lib/logStreamingHelper';
import BuildService from 'server/services/build';
import { LogStreamResponse, LogType } from './types/logStreaming';
-const logger = rootLogger.child({
- filename: __filename,
-});
-
export class LogStreamingService {
private buildService: BuildService;
@@ -46,9 +42,7 @@ export class LogStreamingService {
const namespace = `env-${uuid}`;
const logType: LogType = (explicitType as LogType) || this.detectLogType(jobName);
- logger.info(
- `uuid=${uuid} name=${serviceName} jobName=${jobName} logType=${logType} message="Processing log request"`
- );
+ getLogger().info(`LogStreaming: processing log request name=${serviceName} jobName=${jobName} logType=${logType}`);
// 3. Fetch K8s Data
const podInfo = await getK8sJobStatusAndPod(jobName, namespace);
diff --git a/src/server/services/override.ts b/src/server/services/override.ts
index e99875e..96f5860 100644
--- a/src/server/services/override.ts
+++ b/src/server/services/override.ts
@@ -15,15 +15,11 @@
*/
import BaseService from './_service';
-import rootLogger from 'server/lib/logger';
+import { getLogger, updateLogContext } from 'server/lib/logger';
import { Build } from 'server/models';
import * as k8s from 'server/lib/kubernetes';
import DeployService from './deploy';
-const logger = rootLogger.child({
- filename: 'services/override.ts',
-});
-
interface ValidationResult {
valid: boolean;
error?: string;
@@ -60,7 +56,7 @@ export default class OverrideService extends BaseService {
return { valid: false, error: 'UUID is not available' };
}
} catch (error) {
- logger.error('Error checking UUID uniqueness:', error);
+ getLogger().error({ error }, 'UUID: uniqueness check failed');
return { valid: false, error: 'Unable to validate UUID' };
}
@@ -77,7 +73,8 @@ export default class OverrideService extends BaseService {
const oldUuid = build.uuid;
const oldNamespace = build.namespace;
- logger.info(`[BUILD ${oldUuid}] Updating UUID to '${newUuid}'`);
+ updateLogContext({ buildUuid: oldUuid, newUuid });
+ getLogger().info(`Override: updating newUuid=${newUuid}`);
try {
return await this.db.models.Build.transact(async (trx) => {
@@ -110,13 +107,10 @@ export default class OverrideService extends BaseService {
const updatedBuild = await this.db.models.Build.query(trx).findById(build.id);
- // Delete the old namespace for cleanup (non-blocking, outside transaction)
k8s.deleteNamespace(oldNamespace).catch((error) => {
- logger.warn(`[BUILD ${oldUuid}] Failed to delete old namespace ${oldNamespace}:`, error);
+ getLogger().warn({ error }, `Namespace: delete failed name=${oldNamespace}`);
});
- logger.info(
- `[BUILD ${newUuid}] Successfully updated UUID from '${oldUuid}' to '${newUuid}', updated ${deploys.length} deploys`
- );
+ getLogger().info(`Override: updated oldUuid=${oldUuid} newUuid=${newUuid} deploysUpdated=${deploys.length}`);
return {
build: updatedBuild,
@@ -124,7 +118,7 @@ export default class OverrideService extends BaseService {
};
});
} catch (error) {
- logger.error(`[BUILD ${oldUuid}] Failed to update UUID to '${newUuid}': ${error}`, error);
+ getLogger().error({ error }, `UUID: update failed newUuid=${newUuid}`);
throw error;
}
}
diff --git a/src/server/services/pullRequest.ts b/src/server/services/pullRequest.ts
index 766b928..1037e87 100644
--- a/src/server/services/pullRequest.ts
+++ b/src/server/services/pullRequest.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage } from 'server/lib/logger';
import { PullRequest, Repository } from 'server/models';
import BaseService from './_service';
import { UniqueViolationError } from 'objection';
@@ -34,10 +34,6 @@ export interface PullRequestOptions {
branch: string;
}
-const logger = rootLogger.child({
- filename: 'services/pullRequest.ts',
-});
-
export default class PullRequestService extends BaseService {
/**
* Get Pull Request Model. If it doesn't exist in the database, create a new one.
@@ -66,22 +62,19 @@ export default class PullRequestService extends BaseService {
});
} catch (error) {
if (error instanceof UniqueViolationError) {
- logger.info(
- `[REPO]${fullName} [PR#]${pullRequestNumber} Pull request already exists, fetching existing record`
- );
+ getLogger({ fullName, pullRequestNumber }).debug('PR: exists, fetching');
pullRequest = await this.db.models.PullRequest.findOne({
repositoryId: repository.id,
githubPullRequestId,
});
if (!pullRequest) {
- // should never happen, but just in case
throw new Error(
`Failed to find pull request after unique violation for repo ${repository.id}, PR ${githubPullRequestId}`
);
}
} else {
- logger.error(`[REPO]${fullName} [PR#]${pullRequestNumber} Failed to create pull request: ${error}`);
+ getLogger({ fullName, pullRequestNumber }).error({ error }, 'PR: create failed');
throw error;
}
}
@@ -124,7 +117,10 @@ export default class PullRequestService extends BaseService {
);
return hasLabel;
} catch (e) {
- logger.error(`[REPO]${pullRequest.fullName} [PR NUM]${pullRequest.pullRequestNumber}: ${e}`);
+ getLogger({ fullName: pullRequest.fullName, pullRequestNumber: pullRequest.pullRequestNumber }).error(
+ { error: e },
+ 'Failed to check lifecycle enabled for pull request'
+ );
return true;
}
}
@@ -146,7 +142,10 @@ export default class PullRequestService extends BaseService {
const hasState = response.data.state === state;
return hasLabels && hasState;
} catch (e) {
- logger.error(`[REPO]${name} [PR ID]${githubPullRequestId}: ${e}`);
+ getLogger({ fullName: name, githubPullRequestId }).error(
+ { error: e },
+ 'Failed to check pull request labels and state'
+ );
return true;
}
}
@@ -161,12 +160,18 @@ export default class PullRequestService extends BaseService {
});
// eslint-disable-next-line no-unused-vars
- processCleanupClosedPRs = async (_job) => {
- try {
- await this.db.services.BuildService.cleanupBuilds();
- } catch (error) {
- logger.error(`Error processing cleanup closed PRs:`, error);
- }
+ processCleanupClosedPRs = async (job) => {
+ const { correlationId } = job.data || {};
+
+ return withLogContext({ correlationId: correlationId || `cleanup-${Date.now()}` }, async () => {
+ try {
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info('Cleanup: processing closed PRs');
+ await this.db.services.BuildService.cleanupBuilds();
+ getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info('Cleanup: closed PRs completed');
+ } catch (error) {
+ getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'Cleanup: closed PRs processing failed');
+ }
+ });
};
/**
@@ -183,7 +188,10 @@ export default class PullRequestService extends BaseService {
const response = await github
.getPullRequestByRepositoryFullName(pullRequest.repository.fullName, pullRequest.pullRequestNumber)
.catch((error) => {
- logger.error(`${error}`);
+ getLogger({
+ fullName: pullRequest.repository.fullName,
+ pullRequestNumber: pullRequest.pullRequestNumber,
+ }).error({ error }, 'Failed to get pull request by repository full name');
return null;
});
diff --git a/src/server/services/repository.ts b/src/server/services/repository.ts
index 0699b16..cdd65c4 100644
--- a/src/server/services/repository.ts
+++ b/src/server/services/repository.ts
@@ -14,14 +14,10 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { Repository } from 'server/models';
import BaseService from './_service';
-const logger = rootLogger.child({
- filename: 'services/repository.ts',
-});
-
export default class RepositoryService extends BaseService {
/**
* Retrieve a Lifecycle Github Repository model. If it doesn't exist, create a new record.
@@ -59,7 +55,7 @@ export default class RepositoryService extends BaseService {
defaultEnvId,
}));
} catch (error) {
- logger.error(error);
+ getLogger({ githubRepositoryId, error }).error('Repository: find or create failed');
throw error;
}
@@ -86,7 +82,7 @@ export default class RepositoryService extends BaseService {
ownerId,
});
} catch (error) {
- logger.error(error);
+ getLogger({ githubRepositoryId, error }).error('Repository: find failed');
throw error;
}
diff --git a/src/server/services/service.ts b/src/server/services/service.ts
index a8550c6..081d6dd 100644
--- a/src/server/services/service.ts
+++ b/src/server/services/service.ts
@@ -14,17 +14,13 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import { Environment, Repository } from 'server/models';
import ServiceModel from 'server/models/Service';
import { CAPACITY_TYPE, DeployTypes } from 'shared/constants';
import BaseService from './_service';
import GlobalConfigService from './globalConfig';
-const logger = rootLogger.child({
- filename: 'services/service.ts',
-});
-
export default class ServiceService extends BaseService {
async findOrCreateDefaultService(environment: Environment, repository: Repository): Promise {
let services: ServiceModel[] = [];
@@ -32,8 +28,8 @@ export default class ServiceService extends BaseService {
try {
await environment.$fetchGraph('[defaultServices]');
if (environment.defaultServices != null && environment.defaultServices.length > 0) {
- logger.debug(
- `[${environment.name}] There is/are ${environment.defaultServices.length} default dependency service(s) in the database.`
+ getLogger({ environment: environment.name }).debug(
+ `Found ${environment.defaultServices.length} default dependency service(s) in database`
);
services = environment.defaultServices;
} else {
@@ -82,7 +78,7 @@ export default class ServiceService extends BaseService {
}
}
} catch (error) {
- logger.error(error);
+ getLogger({ environment: environment.name, error }).error('Service: find or create failed');
throw error;
}
diff --git a/src/server/services/ttlCleanup.ts b/src/server/services/ttlCleanup.ts
index 046804c..3e0f0ae 100644
--- a/src/server/services/ttlCleanup.ts
+++ b/src/server/services/ttlCleanup.ts
@@ -18,7 +18,7 @@ import Service from './_service';
import { Queue, Job } from 'bullmq';
import { QUEUE_NAMES } from 'shared/config';
import { redisClient } from 'server/lib/dependencies';
-import rootLogger from 'server/lib/logger';
+import { withLogContext, updateLogContext, getLogger, LogStage } from 'server/lib/logger';
import * as k8s from '@kubernetes/client-node';
import { updatePullRequestLabels, createOrUpdatePullRequestComment, getPullRequestLabels } from 'server/lib/github';
import { getKeepLabel, getDisabledLabel, getDeployLabel } from 'server/lib/utils';
@@ -27,12 +27,9 @@ import Metrics from 'server/lib/metrics';
import { DEFAULT_TTL_INACTIVITY_DAYS, DEFAULT_TTL_CHECK_INTERVAL_MINUTES } from 'shared/constants';
import GlobalConfigService from './globalConfig';
-const logger = rootLogger.child({
- filename: 'services/ttlCleanup.ts',
-});
-
interface TTLCleanupJob {
dryRun?: boolean;
+ correlationId?: string;
}
interface StaleEnvironment {
@@ -62,79 +59,60 @@ export default class TTLCleanupService extends Service {
* Process TTL cleanup queue jobs
*/
processTTLCleanupQueue = async (job: Job) => {
- try {
- // Always read fresh config to handle runtime config changes
- const config = await this.getTTLConfig();
-
- if (!config.enabled) {
- logger.info('[TTL] TTL cleanup is disabled, skipping');
- return;
- }
-
- // Job data takes precedence (for manual API calls), fall back to config for scheduled jobs
- const dryRun = job.data.dryRun ?? config.dryRun;
- const source = job.data.dryRun !== undefined ? 'api-override' : 'config';
-
- logger.info('[TTL] Starting TTL cleanup job', {
- dryRun,
- source,
- jobDataDryRun: job.data.dryRun,
- configDryRun: config.dryRun,
- });
-
- const staleEnvironments = await this.findStaleEnvironments(config.inactivityDays, config.excludedRepositories);
+ const { correlationId } = job.data || {};
- logger.info(`[TTL] Found ${staleEnvironments.length} stale environments`, {
- inactivityDays: config.inactivityDays,
- dryRun,
- });
+ return withLogContext({ correlationId: correlationId || `ttl-cleanup-${Date.now()}` }, async () => {
+ try {
+ // Always read fresh config to handle runtime config changes
+ const config = await this.getTTLConfig();
- let successCount = 0;
- let errorCount = 0;
+ if (!config.enabled) {
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).debug('TTL: disabled, skipping');
+ return;
+ }
- for (const env of staleEnvironments) {
- try {
- if (dryRun) {
- const dbLabels = this.parseLabels(env.pullRequest.labels);
-
- logger.info(`[TTL ${env.buildUUID}] [DRY RUN] Would clean up environment (NO ACTION TAKEN)`, {
- namespace: env.namespace,
- prNumber: env.pullRequest.pullRequestNumber,
- fullName: env.pullRequest.fullName,
- daysExpired: env.daysExpired,
- currentLabelsFromGitHub: env.currentLabels,
- labelsInDatabase: dbLabels,
- labelDriftDetected: env.hadLabelDrift,
- });
- successCount++;
- } else {
- logger.info(`[TTL ${env.buildUUID}] Cleaning up stale environment`, {
- namespace: env.namespace,
- prNumber: env.pullRequest.pullRequestNumber,
- fullName: env.pullRequest.fullName,
- });
- await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun);
- successCount++;
- }
- } catch (error) {
- errorCount++;
- logger.error(`[TTL ${env.buildUUID}] Failed to cleanup environment`, {
- namespace: env.namespace,
- error,
+ // Job data takes precedence (for manual API calls), fall back to config for scheduled jobs
+ const dryRun = job.data.dryRun ?? config.dryRun;
+
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: starting cleanup dryRun=${dryRun}`);
+
+ const staleEnvironments = await this.findStaleEnvironments(config.inactivityDays, config.excludedRepositories);
+
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info(
+ `TTL: found stale environments count=${staleEnvironments.length} inactivityDays=${config.inactivityDays}`
+ );
+
+ let successCount = 0;
+ let errorCount = 0;
+
+ for (const env of staleEnvironments) {
+ await withLogContext({ buildUuid: env.buildUUID }, async () => {
+ try {
+ if (dryRun) {
+ getLogger().info(
+ `TTL: dry run would cleanup namespace=${env.namespace} pr=${env.pullRequest.pullRequestNumber}`
+ );
+ successCount++;
+ } else {
+ getLogger().info(`TTL: cleaning namespace=${env.namespace} pr=${env.pullRequest.pullRequestNumber}`);
+ await this.cleanupStaleEnvironment(env, config.inactivityDays, config.commentTemplate, dryRun);
+ successCount++;
+ }
+ } catch (error) {
+ errorCount++;
+ getLogger().error({ error }, `TTL: cleanup failed namespace=${env.namespace}`);
+ }
});
}
- }
- logger.info('[TTL] TTL cleanup job completed', {
- totalFound: staleEnvironments.length,
- successCount,
- errorCount,
- dryRun,
- });
- } catch (error) {
- logger.error('[TTL] Error in TTL cleanup job', { error });
- throw error;
- }
+ getLogger({ stage: LogStage.CLEANUP_COMPLETE }).info(
+ `TTL: completed found=${staleEnvironments.length} success=${successCount} errors=${errorCount}`
+ );
+ } catch (error) {
+ getLogger({ stage: LogStage.CLEANUP_FAILED }).error({ error }, 'TTL: cleanup job failed');
+ throw error;
+ }
+ });
};
private parseLabels(labels: string | string[] | null): string[] {
@@ -180,7 +158,7 @@ export default class TTLCleanupService extends Service {
const namespaces = namespacesResponse.body.items;
- logger.info(`[TTL] Scanning ${namespaces.length} namespaces with TTL enabled`);
+ getLogger({ stage: LogStage.CLEANUP_STARTING }).info(`TTL: scanning namespaces count=${namespaces.length}`);
// Fetch dynamic labels once at the start
const keepLabel = await getKeepLabel();
@@ -197,7 +175,7 @@ export default class TTLCleanupService extends Service {
const expireAtUnix = labels['lfc/ttl-expireAtUnix'];
if (!expireAtUnix) {
- logger.debug(`[TTL] Namespace ${nsName} has no TTL expiration label, skipping`);
+ getLogger().debug(`Namespace ${nsName} has no TTL expiration label, skipping`);
continue;
}
@@ -209,51 +187,52 @@ export default class TTLCleanupService extends Service {
const daysExpired = Math.floor((now - expireTime) / (1000 * 60 * 60 * 24));
- const buildUUID = labels['lfc/uuid']; // Use lfc/uuid (intentional difference)
+ const buildUUID = labels['lfc/uuid'];
if (!buildUUID) {
- logger.warn(`[TTL] Namespace ${nsName} has no lfc/uuid label, skipping`);
+ getLogger().warn(`TTL: namespace missing uuid label namespace=${nsName}`);
continue;
}
- logger.debug(`[TTL ${buildUUID}] Namespace ${nsName} expired ${daysExpired} days ago`);
+ updateLogContext({ buildUuid: buildUUID });
+
+ getLogger().debug(`Namespace ${nsName} expired ${daysExpired} days ago`);
const build = await this.db.models.Build.query()
.findOne({ uuid: buildUUID })
.withGraphFetched('[pullRequest.repository]');
if (!build) {
- logger.warn(`[TTL ${buildUUID}] No build found for namespace ${nsName}, skipping`);
+ getLogger().warn(`TTL: build not found namespace=${nsName}`);
continue;
}
if (build.status === 'torn_down' || build.status === 'pending') {
- logger.debug(`[TTL ${buildUUID}] Build is already ${build.status}, skipping`);
+ getLogger().debug(`Build is already ${build.status}, skipping`);
continue;
}
if (build.isStatic) {
- logger.debug(`[TTL ${buildUUID}] Build is static environment, skipping`);
+ getLogger().debug(`Build is static environment, skipping`);
continue;
}
const pullRequest = build.pullRequest;
if (!pullRequest) {
- logger.warn(`[TTL ${buildUUID}] No pull request found, skipping`);
+ getLogger().warn('TTL: pull request not found');
continue;
}
if (pullRequest.status !== 'open') {
- logger.debug(`[TTL ${buildUUID}] PR is ${pullRequest.status}, skipping`);
+ getLogger().debug(`PR is ${pullRequest.status}, skipping`);
continue;
}
if (excludedRepositories.length > 0 && excludedRepositories.includes(pullRequest.fullName)) {
- logger.debug(`[TTL ${buildUUID}] Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping`);
+ getLogger().debug(`Repository ${pullRequest.fullName} is excluded from TTL cleanup, skipping`);
continue;
}
- // Fetch current labels from GitHub to avoid stale data due to webhook incidents
let currentLabels: string[];
try {
currentLabels = await getPullRequestLabels({
@@ -262,38 +241,30 @@ export default class TTLCleanupService extends Service {
fullName: pullRequest.fullName,
});
- logger.debug(
- `[TTL ${buildUUID}] Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}`
- );
+ getLogger().debug(`Fetched ${currentLabels.length} labels from GitHub: ${currentLabels.join(', ')}`);
- // Sync labels back to DB if they differ (self-healing)
const dbLabels = this.parseLabels(pullRequest.labels);
if (JSON.stringify(currentLabels.sort()) !== JSON.stringify(dbLabels.sort())) {
- logger.info(`[TTL ${buildUUID}] Label drift detected, syncing to database`, {
- dbLabels,
- currentLabels,
- });
+ getLogger().debug('TTL: label drift detected, syncing to DB');
await pullRequest.$query().patch({
labels: JSON.stringify(currentLabels) as any,
});
}
} catch (error) {
- logger.warn(`[TTL ${buildUUID}] Failed to fetch labels from GitHub, falling back to DB: ${error}`);
- // Fallback to DB labels if GitHub API fails
+ getLogger().warn({ error }, 'TTL: GitHub labels fetch failed, using DB');
currentLabels = this.parseLabels(pullRequest.labels);
}
if (currentLabels.includes(keepLabel)) {
- logger.debug(`[TTL ${buildUUID}] Has ${keepLabel} label (verified from GitHub), skipping`);
+ getLogger().debug(`Has ${keepLabel} label (verified from GitHub), skipping`);
continue;
}
if (currentLabels.includes(disabledLabel)) {
- logger.debug(`[TTL ${buildUUID}] Already has ${disabledLabel} label (verified from GitHub), skipping`);
+ getLogger().debug(`Already has ${disabledLabel} label (verified from GitHub), skipping`);
continue;
}
- // Store current labels and drift status for dry-run reporting
const dbLabels = this.parseLabels(pullRequest.labels);
const hadLabelDrift = JSON.stringify(currentLabels.sort()) !== JSON.stringify(dbLabels.sort());
@@ -308,7 +279,10 @@ export default class TTLCleanupService extends Service {
});
}
} catch (error) {
- logger.error('[TTL] Error scanning K8s namespaces for stale environments', { error });
+ getLogger({ stage: LogStage.CLEANUP_FAILED }).error(
+ { error },
+ 'Error scanning K8s namespaces for stale environments'
+ );
throw error;
}
@@ -328,12 +302,9 @@ export default class TTLCleanupService extends Service {
const buildUuid = build.uuid;
const repository = pullRequest.repository;
- logger.info(`[TTL ${buildUuid}] Cleaning up stale environment`, {
- namespace,
- prNumber: pullRequest.pullRequestNumber,
- fullName: pullRequest.fullName,
- daysExpired: env.daysExpired,
- });
+ updateLogContext({ buildUuid });
+
+ getLogger().info(`TTL: cleaning namespace=${namespace} pr=${pullRequest.pullRequestNumber}`);
// Fetch dynamic labels at runtime
const deployLabel = await getDeployLabel();
@@ -351,9 +322,7 @@ export default class TTLCleanupService extends Service {
labels: updatedLabels,
});
- logger.info(`[TTL ${buildUuid}] Updated labels: removed ${deployLabel}, added ${disabledLabel}`, {
- prNumber: pullRequest.pullRequestNumber,
- });
+ getLogger().debug(`TTL: labels updated PR#${pullRequest.pullRequestNumber}`);
const commentMessage = await this.generateCleanupComment(inactivityDays, commentTemplate);
@@ -366,9 +335,7 @@ export default class TTLCleanupService extends Service {
etag: null,
});
- logger.info(`[TTL ${buildUuid}] Posted cleanup comment to PR`, {
- prNumber: pullRequest.pullRequestNumber,
- });
+ getLogger().debug(`TTL: cleanup comment posted PR#${pullRequest.pullRequestNumber}`);
await pullRequest.$query().patch({
labels: JSON.stringify(updatedLabels) as any,
@@ -378,11 +345,10 @@ export default class TTLCleanupService extends Service {
const metrics = new Metrics('ttl.cleanup', { repositoryName: pullRequest.fullName });
metrics.increment('total', { dry_run: dryRun.toString() });
} catch (error) {
- logger.error(`[TTL ${buildUuid}] Failed to cleanup stale environment`, {
- namespace,
- prNumber: pullRequest.pullRequestNumber,
- error,
- });
+ getLogger().error(
+ { error },
+ `Failed to cleanup stale environment: namespace=${namespace} prNumber=${pullRequest.pullRequestNumber}`
+ );
throw error;
}
}
@@ -420,7 +386,7 @@ export default class TTLCleanupService extends Service {
const config = await this.getTTLConfig();
if (!config.enabled) {
- logger.info('[TTL] TTL cleanup is disabled in global config');
+ getLogger().debug('TTL: disabled in config');
return;
}
@@ -436,8 +402,6 @@ export default class TTLCleanupService extends Service {
}
);
- logger.info(
- `[TTL] TTL cleanup job scheduled every ${config.checkIntervalMinutes} minutes (${config.inactivityDays} day TTL, dryRun: ${config.dryRun})`
- );
+ getLogger().info(`TTL: scheduled interval=${config.checkIntervalMinutes}min`);
}
}
diff --git a/src/server/services/webhook.ts b/src/server/services/webhook.ts
index caf55e3..80ab2d6 100644
--- a/src/server/services/webhook.ts
+++ b/src/server/services/webhook.ts
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-import rootLogger from 'server/lib/logger';
+import { withLogContext, getLogger, LogStage, updateLogContext } from 'server/lib/logger';
import BaseService from './_service';
import { Build, PullRequest } from 'server/models';
import * as YamlService from 'server/models/yaml';
@@ -28,10 +28,6 @@ import { redisClient } from 'server/lib/dependencies';
import { validateWebhook } from 'server/lib/webhook/webhookValidator';
import { executeDockerWebhook, executeCommandWebhook } from 'server/lib/webhook';
-const logger = rootLogger.child({
- filename: 'services/webhook.ts',
-});
-
export class WebhookError extends LifecycleError {
constructor(msg: string, uuid: string = null, service: string = null) {
super(uuid, service, msg);
@@ -52,6 +48,10 @@ export default class WebhookService extends BaseService {
throw new WebhookError('Pull Request and Build cannot be null when upserting webhooks');
}
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
await pullRequest.$fetchGraph('repository');
// if build is in classic mode, we should not proceed with yaml webhooks since db webhooks are not supported anymore
@@ -66,10 +66,10 @@ export default class WebhookService extends BaseService {
if (yamlConfig?.environment?.webhooks != null) {
webhooks = yamlConfig.environment.webhooks;
await build.$query().patch({ webhooksYaml: JSON.stringify(webhooks) });
- logger.child({ webhooks }).info(`[BUILD ${build.uuid}] Updated build with webhooks from config`);
+ getLogger().info(`Webhook: config updated webhooks=${JSON.stringify(webhooks)}`);
} else {
await build.$query().patch({ webhooksYaml: null });
- logger.info(`[BUILD ${build.uuid}] No webhooks found in config`);
+ getLogger().info('Webhook: config empty');
}
}
return webhooks;
@@ -80,11 +80,13 @@ export default class WebhookService extends BaseService {
* @param build the build for which we want to run webhooks against
*/
async runWebhooksForBuild(build: Build): Promise {
+ updateLogContext({ buildUuid: build.uuid });
+
// Check feature flag - if disabled, skip all webhooks
// Only skips if explicitly set to false. If undefined/missing, webhooks execute (default behavior)
const { features } = await this.db.services.GlobalConfig.getAllConfigs();
if (features?.webhooks === false) {
- logger.debug(`[BUILD ${build.uuid}] Webhooks feature flag is disabled. Skipping webhook execution.`);
+ getLogger().debug('Webhooks feature flag is disabled, skipping webhook execution');
return;
}
@@ -94,15 +96,13 @@ export default class WebhookService extends BaseService {
case BuildStatus.TORN_DOWN:
break;
default:
- logger.debug(`[BUILD ${build.uuid}] Skipping Lifecycle Webhooks execution for status: ${build.status}`);
+ getLogger().debug(`Skipping Lifecycle Webhooks execution for status: ${build.status}`);
return;
}
// if build is not full yaml and no webhooks defined in YAML config, we should not run webhooks (no more db webhook support)
if (!build.enableFullYaml && build.webhooksYaml == null) {
- logger.debug(
- `[BUILD ${build.uuid}] Skipping Lifecycle Webhooks(non yaml config build) execution for status: ${build.status}`
- );
+ getLogger().debug(`Skipping Lifecycle Webhooks (non yaml config build) execution for status: ${build.status}`);
return;
}
const webhooks: YamlService.Webhook[] = JSON.parse(build.webhooksYaml);
@@ -114,14 +114,19 @@ export default class WebhookService extends BaseService {
const configFileWebhooks: YamlService.Webhook[] = webhooks.filter((webhook) => webhook.state === build.status);
// if no webhooks defined in YAML config, we should not run webhooks
if (configFileWebhooks != null && configFileWebhooks.length < 1) {
- logger.info(`[BUILD ${build.uuid}] No webhooks found to be triggered for build status: ${build.status}`);
+ getLogger().info(`Webhook: skipped reason=noMatch status=${build.status}`);
return;
}
- logger.info(`[BUILD ${build.uuid}] Triggering for build status: ${build.status}`);
+ getLogger().info(`Webhook: triggering status=${build.status}`);
for (const webhook of configFileWebhooks) {
- logger.info(`[BUILD ${build.uuid}] Running webhook: ${webhook.name}`);
- await this.runYamlConfigFileWebhookForBuild(webhook, build);
+ await withLogContext({ webhookName: webhook.name, webhookType: webhook.type }, async () => {
+ getLogger().info(`Webhook: running name=${webhook.name}`);
+ await this.runYamlConfigFileWebhookForBuild(webhook, build);
+ });
}
+ getLogger({ stage: LogStage.WEBHOOK_COMPLETE }).info(
+ `Webhook: completed count=${configFileWebhooks.length} status=${build.status}`
+ );
}
/**
@@ -146,9 +151,7 @@ export default class WebhookService extends BaseService {
switch (webhook.type) {
case 'codefresh': {
const buildId: string = await this.db.services.Codefresh.triggerYamlConfigWebhookPipeline(webhook, data);
- logger
- .child({ url: `https://g.codefresh.io/build/${buildId}` })
- .info(`[BUILD ${build.uuid}] Webhook (${webhook.name}) triggered: ${buildId}`);
+ getLogger().info(`Webhook: triggered buildId=${buildId} url=https://g.codefresh.io/build/${buildId}`);
metadata = {
link: `https://g.codefresh.io/build/${buildId}`,
};
@@ -176,11 +179,11 @@ export default class WebhookService extends BaseService {
metadata: { status: 'starting' },
status: 'executing',
});
- logger.info(`[BUILD ${build.uuid}] Docker webhook (${webhook.name}) invoked`);
+ getLogger().info(`Webhook: invoking`);
// Execute webhook (this waits for completion)
const result = await executeDockerWebhook(webhook, build, data);
- logger.info(`[BUILD ${build.uuid}] Docker webhook (${webhook.name}) executed: ${result.jobName}`);
+ getLogger().info(`Webhook: executed jobName=${result.jobName}`);
// Update the invocation record with final status
await invocation.$query().patch({
@@ -206,11 +209,11 @@ export default class WebhookService extends BaseService {
metadata: { status: 'starting' },
status: 'executing',
});
- logger.info(`[BUILD ${build.uuid}] Command webhook (${webhook.name}) invoked`);
+ getLogger().info(`Webhook: invoking`);
// Execute webhook (this waits for completion)
const result = await executeCommandWebhook(webhook, build, data);
- logger.info(`[BUILD ${build.uuid}] Command webhook (${webhook.name}) executed: ${result.jobName}`);
+ getLogger().info(`Webhook: executed jobName=${result.jobName}`);
// Update the invocation record with final status
await invocation.$query().patch({
@@ -228,9 +231,9 @@ export default class WebhookService extends BaseService {
throw new Error(`Unsupported webhook type: ${webhook.type}`);
}
- logger.debug(`[BUILD ${build.uuid}] Webhook history added for runUUID: ${build.runUUID}`);
+ getLogger().debug(`Webhook: history added runUUID=${build.runUUID}`);
} catch (error) {
- logger.error(`[BUILD ${build.uuid}] Error invoking webhook: ${error}`);
+ getLogger({ error }).error('Webhook: invocation failed');
// Still create a failed invocation record
await this.db.models.WebhookInvocations.create({
@@ -259,14 +262,22 @@ export default class WebhookService extends BaseService {
});
processWebhookQueue = async (job) => {
- const buildId = job.data.buildId;
- const build = await this.db.models.Build.query().findOne({
- id: buildId,
+ const { buildId, sender, correlationId, _ddTraceContext } = job.data;
+
+ return withLogContext({ correlationId, sender, _ddTraceContext }, async () => {
+ const build = await this.db.models.Build.query().findOne({
+ id: buildId,
+ });
+
+ if (build?.uuid) {
+ updateLogContext({ buildUuid: build.uuid });
+ }
+
+ try {
+ await this.db.services.Webhook.runWebhooksForBuild(build);
+ } catch (e) {
+ getLogger({ stage: LogStage.WEBHOOK_PROCESSING }).error({ error: e }, 'Webhook: invocation failed');
+ }
});
- try {
- await this.db.services.Webhook.runWebhooksForBuild(build);
- } catch (e) {
- logger.error(`[BUILD ${build.uuid}] Failed to invoke the webhook: ${e}`);
- }
};
}
diff --git a/src/shared/utils.ts b/src/shared/utils.ts
index 9decd06..4221162 100644
--- a/src/shared/utils.ts
+++ b/src/shared/utils.ts
@@ -21,13 +21,9 @@ import { Deploy } from 'server/models';
import Fastly from 'server/lib/fastly';
import { Link, FeatureFlags } from 'shared/types';
import { DD_URL, DD_LOG_URL } from 'shared/constants';
-import rootLogger from 'server/lib/logger';
+import { getLogger } from 'server/lib/logger';
import Model from 'server/models/_Model';
-const logger = rootLogger.child({
- filename: 'src/shared/utils.ts',
-});
-
/**
* determineIfFastlyIsUsed
* @description determines if fastly is used in a given deploy
@@ -152,7 +148,7 @@ export const constructFastlyBuildLink = async (
const { href: url = '' } = (await fastlyFn(fastlyBuildId, fastlyServiceType)) || {};
return url ? { name: 'Fastly Dashboard', url } : {};
} catch (err) {
- logger.error(`constructFastlyBuildLink: there was an error constructing the fastly build link: ${err}`);
+ getLogger().error({ error: err }, 'Fastly: build link construction failed');
return {};
}
};
diff --git a/sysops/tilt/ngrok-keycloak.yaml b/sysops/tilt/ngrok-keycloak.yaml
index 11d5dff..aced647 100644
--- a/sysops/tilt/ngrok-keycloak.yaml
+++ b/sysops/tilt/ngrok-keycloak.yaml
@@ -29,15 +29,14 @@ spec:
spec:
containers:
- name: ngrok
- image: ngrok/ngrok:latest
+ image: wernight/ngrok
command: ['ngrok']
args:
- 'http'
+ - '--authtoken=$(NGROK_AUTHTOKEN)'
- '--hostname=$(NGROK_KEYCLOAK_DOMAIN)'
- '--log=stdout'
- - '--log-level=debug'
- 'lifecycle-keycloak:8080' # point at the Keycloak Service's name & port
-
envFrom:
- secretRef:
name: ngrok-secret
diff --git a/sysops/tilt/ngrok.yaml b/sysops/tilt/ngrok.yaml
index b8fc02c..40c46c6 100644
--- a/sysops/tilt/ngrok.yaml
+++ b/sysops/tilt/ngrok.yaml
@@ -29,15 +29,14 @@ spec:
spec:
containers:
- name: ngrok
- image: ngrok/ngrok:latest
+ image: wernight/ngrok
command: ['ngrok']
args:
- 'http'
+ - '--authtoken=$(NGROK_AUTHTOKEN)'
- '--hostname=$(NGROK_LIFECYCLE_DOMAIN)'
- '--log=stdout'
- - '--log-level=debug'
- 'lifecycle-web:80' # point at the K8s Service's name & port
-
envFrom:
- secretRef:
name: ngrok-secret
diff --git a/tsconfig.json b/tsconfig.json
index f3ead8c..9ee33a3 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -28,9 +28,15 @@
"jsx": "preserve",
"incremental": true,
"experimentalDecorators": true,
- "emitDecoratorMetadata": true
+ "emitDecoratorMetadata": true,
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "strictNullChecks": true
},
- "include": ["src/**/*", "scripts/**/*", "package.json"],
+ "include": ["src/**/*", "scripts/**/*", "package.json", ".next/types/**/*.ts"],
"exclude": ["node_modules", "**/node_modules/*"],
"ts-node": {
"compilerOptions": {
diff --git a/ws-server.ts b/ws-server.ts
index e269767..ff64f01 100644
--- a/ws-server.ts
+++ b/ws-server.ts
@@ -31,7 +31,7 @@ import { createServer, IncomingMessage, ServerResponse } from 'http';
import { parse } from 'url';
import next from 'next';
import { WebSocketServer, WebSocket } from 'ws';
-import rootLogger from './src/server/lib/logger';
+import { rootLogger } from './src/server/lib/logger';
import { streamK8sLogs, AbortHandle } from './src/server/lib/k8sStreamer';
const dev = process.env.NODE_ENV !== 'production';