Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions .github/TEMPLATES/secret-mapping-opencrvs-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,15 @@ kibana-users-secret:
- KIBANA_USERNAME
- KIBANA_PASSWORD

# Traefik static SSL certificate
# backward compatible with existing implementation,
# See: https://documentation.opencrvs.org/v1.8/setup/3.-installation/3.3-set-up-a-server-hosted-environment/3.3.5-setup-dns-a-records/4.3.2.3-static-tls-certificates
traefik-cert:
type: tls
namespace: traefik
data:
- TRAEFIK_CERT: cert
- TRAEFIK_KEY: key
- SSL_CRT: cert
- SSL_KEY: key

# If backup is configured then workflow will use GitHub secrets for current environment
# If restore is configured then workflow will fetch secrets from source environment (usually production)
Expand Down
8 changes: 8 additions & 0 deletions .github/workflows/deploy-opencrvs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ on:
type: string
countryconfig-image-tag:
type: string
data-seed-enabled:
type: boolean
environment:
type: string
workflow_dispatch:
Expand All @@ -19,6 +21,11 @@ on:
description: "Tag of the countryconfig image"
required: true
default: "v1.9.6"
data-seed-enabled:
description: "Enable data seeding during deployment"
required: false
default: "true"
type: boolean
environment:
description: "Target environment"
required: true
Expand Down Expand Up @@ -140,6 +147,7 @@ jobs:
--set countryconfig.image.tag="$COUNTRYCONFIG_IMAGE_TAG" \
--set countryconfig.image.name="$COUNTRYCONFIG_IMAGE_NAME" \
--set data_seed.env.ACTIVATE_USERS="${{ vars.ACTIVATE_USERS || 'false' }}" \
--set data_seed.enabled="${{ inputs.data-seed-enabled }}" \
--set hostname=${{ vars.DOMAIN }} 2>&1 ; STATUS=$?;
kill $STERN_PID 2>/dev/null || true
exit $STATUS
Expand Down
72 changes: 72 additions & 0 deletions infrastructure/environments/swarm-to-k8s.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import * as path from 'path';
import kleur from 'kleur'
import { error, info, log, success, warn } from './logger'
import { updateWorkflowEnvironments } from './update-workflows';
import { generateInventory, copyChartsValues, extractAndModifyUsers, extractWorkerNodes, extractBackupNode, dockerManagerFirst, readYamlFile } from './templates'



(async () => {
log(kleur.bold(
"------------------------------------------------\n" +
"OpenCRVS Infrastructure migration script: \n" +
"Migrating Swarm configurations to Kubernetes\n" +
"------------------------------------------------\n"
));
const environment_type = process.env.ENVIRONMENT_TYPE || 'production';
const environment = process.env.ENVIRONMENT || '';
if (!environment) {
error('\n', 'Environment variable ENVIRONMENT is not set. Exiting.');
process.exit(1);
}
if (["backup", "jumpbox"].includes(environment)) {
info(` > ${environment} environment will not be migrated, see migration notes`)
process.exit(0);
}
log(kleur.bold().underline('Migration properties:'));
log(` ✓ Environment: ${environment}`)

const old_inventory_path = process.env.OLD_INVENTORY_PATH || '';
if (!old_inventory_path) {
error('\n', 'Environment variable OLD_INVENTORY_PATH is not set. Exiting.');
log('\n', 'Old inventory path is required to read existing Swarm configurations.');
process.exit(1);
}
const ansible_inventory = path.join(old_inventory_path, environment + '.yml');
const data = readYamlFile(ansible_inventory) as any;
log(` ✓ Loaded old inventory file: ${ansible_inventory}`);

const master = dockerManagerFirst(data) || ''
log(` ✓ Kubernetes API Host (Docker Manager): ${master}`);
const users = extractAndModifyUsers(data);
const worker_nodes = extractWorkerNodes(data);
log(` ✓ Worker nodes: ${worker_nodes.join(', ')}`);
const backup_host = extractBackupNode(data);
log(` ✓ Backup host: ${backup_host}`);

generateInventory(
environment,
{
worker_nodes: worker_nodes,
users: users,
backup_host: backup_host,
kube_api_host: master
}
)

copyChartsValues(
environment,
{
env: environment,
environment_type: environment_type,
// FIXME: In general that should be environment_type,
// Hardcode like this blocks us from being generic:
// https://github.com/opencrvs/opencrvs-core/issues/11171
is_qa_env: environment !== 'production' ? "true" : "false",
backup_enabled: environment === 'production' ? "true" : "false",
restore_enabled: environment === 'staging' ? "true" : "false",
restore_environment_name: environment === 'staging' ? "production" : ""
}
)
await updateWorkflowEnvironments();
})();
137 changes: 66 additions & 71 deletions infrastructure/environments/templates.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,59 @@
import fs from "fs";
import path from "path";
import { log } from './logger'
/**
* Replace placeholders in file content.
* Customize the replacements map to your needs.
*/
function replacePlaceholders(content: string, replacements: Record<string, string>): string {
let updated = content;
for (const [key, value] of Object.entries(replacements)) {
const regex = new RegExp(`\\{\\{${key}\\}\\}`, "g"); // matches ${KEY}
let clear_value = String(value).replace(/[\x00-\x1F\x7F]/g, ""); // remove control characters
updated = updated.replace(regex, clear_value);
import { log, success, warn } from './logger'
import * as yaml from 'js-yaml';
import Handlebars from 'handlebars';

// Register a helper to increment numbers
Handlebars.registerHelper('data_label_idx', function(value) {
return parseInt(value) + 2;
});

export function readYamlFile(filePath: any): any {
const fileContent = fs.readFileSync(filePath, "utf8");
return yaml.load(fileContent);
}


// Extract users from the old inventory
export function extractAndModifyUsers(data: any): any {
if (!data?.all?.vars?.users) {
return { users: [] };
}
return data.all.vars.users;
}

export function dockerManagerFirst(data: any): string {
if (!data?.['docker-manager-first']?.hosts) {
throw new Error('Invalid YAML structure: missing docker-manager-first.hosts');
}
const hosts = data['docker-manager-first'].hosts;
const dockerManagerFirst = Object.values(hosts)
.filter((host: any) => host.ansible_host)
.map((host: any) => host.ansible_host);
return dockerManagerFirst.length === 1 ? dockerManagerFirst[0] : '';
}

export function extractBackupNode(data: any): string {
if (!data?.['backups']?.hosts) {
return '';
}
const hosts = data['backups'].hosts;
const backupHostEntry = Object.values(hosts)
.filter((host: any) => host.ansible_host)
.map((host: any) => host.ansible_host);
return backupHostEntry.length === 1 ? backupHostEntry[0] : '';
}

export function extractWorkerNodes(data: any): string[] {
if (!data?.['docker-workers']?.hosts) {
return [];
}
return updated;
const hosts = data['docker-workers'].hosts;
const worker_hosts = Object.values(hosts)
.filter((host: any) => host.ansible_host)
.map((host: any) => host.ansible_host);
return worker_hosts;
}

/**
Expand All @@ -23,7 +64,7 @@ function replacePlaceholders(content: string, replacements: Record<string, strin
/**
* Recursively copy a directory and replace placeholders in text files.
*/
export function copyChartsValues(env: string, replacements: Record<string, string>) {
export function copyChartsValues(env: string, values: Record<string, string>) {
const srcDir = path.resolve(__dirname, "templates", "charts-values");
const destDir = path.resolve(__dirname, "..", "..", "environments", env);
fs.mkdirSync(destDir, { recursive: true });
Expand All @@ -38,22 +79,23 @@ export function copyChartsValues(env: string, replacements: Record<string, strin
}
} else {
if (fs.existsSync(dest)) {
log(`⚠️ Skipping existing file: ${dest}`);
warn(` ⚠️ Skipping existing file: ${dest}`);
return;
}
// read file
const content = fs.readFileSync(src, "utf8");

// replace placeholders
const updated = replacePlaceholders(content, replacements);

const template = Handlebars.compile(content);
const updated = template(values);
// write updated file
fs.writeFileSync(dest, updated, "utf8");
log(` Created: ${dest}`);
log(` Created: ${dest}`);
}
}

console.log(`\n📋 Copying charts-values templates to ${destDir}:`);
copyRecursive(srcDir, destDir);
success(`✅ Completed copying charts-values.\n`);
}

/**
Expand All @@ -74,63 +116,16 @@ export function generateInventory(env: string, values: Record<string, any>){

// Check if output file already exists
if (fs.existsSync(outputPath)) {
log(`⚠️ Skipping ${templatePath}, file already exists at ${outputPath}`);
warn(` ⚠️ Skipping ${templatePath}, file already exists at ${outputPath}`);
return;
}
let template = fs.readFileSync(templatePath, "utf-8");
const templateFile = fs.readFileSync(templatePath, "utf-8");
const template = Handlebars.compile(templateFile);
values['single_node'] = (values['worker_nodes'].length > 0 || values['backup_host']) ? "false" : "true";

// Extract worker nodes and backup host from values
let worker_nodes = values['worker_nodes'].map((e: string) => String(e)
.replace(/[\x00-\x1F\x7F]/g, ""))
.filter((e: string) => e.length > 0);

// Generate workers block
if (worker_nodes && worker_nodes.length > 0) {
let workersBlock = `
# Workers section is optional, for single node cluster feel free to remove this section
# section can be added later
# more workers can be added later as well
workers:
hosts:`;

worker_nodes.forEach((host: string, index: number) => {
const isFirstWorker = index === 0;
workersBlock += `
worker${index}:
ansible_host: ${host}${isFirstWorker ? `
labels:
# By default all datastores are deployed to worker node with role data1
role: data1` : ''}
`;
});

template = template.replace('{{WORKERS_BLOCK}}', workersBlock);
} else {
// No worker nodes, remove the placeholder
template = template.replace('{{WORKERS_BLOCK}}', '');
}


// Generate backup block if backup_host is provided
const backupHost = String(values['backup_host']).replace(/[\x00-\x1F\x7F]/g, "");
let backupBlock = '';
if (backupHost.length > 0) {
backupBlock = `
# backup section is optional, feel free to remove if backups are not enabled
# section can be added later
backup:
hosts:
backup1:
ansible_host: ${backupHost}
`;
}
template = template.replace('{{BACKUP_BLOCK}}', backupBlock);
const updated = template(values);

// Determine if single-node or multi-node
values['single_node'] = (worker_nodes.length > 0 || backupHost) ? "false" : "true";
const updated = replacePlaceholders(template, values);
values
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
fs.writeFileSync(outputPath, updated);
log(`✅ Generated inventory file at ${outputPath}`);
log(`\n✅ Generated inventory file at ${outputPath}\n`);
}
33 changes: 12 additions & 21 deletions infrastructure/environments/update-workflows.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { readFileSync, writeFileSync, statSync, existsSync } from 'fs';
import { basename, join } from 'path';
import * as glob from 'glob';
import * as yaml from 'js-yaml';

import { error, info, log, success, warn } from './logger'
interface WorkflowConfig {
workflows: string[];
path: string;
Expand All @@ -18,9 +18,7 @@ async function extractInfrastructureNames(): Promise<string[]> {
console.log('⚠️ Warning: No environment directories found in infrastructure/server-setup/inventory/');
return [];
}
console.log('List of existing infrastructure configurations:');
console.log(infraEnvironments.join(', '));

log('🔍 Found infrastructure configurations:', infraEnvironments.join(', '));
return infraEnvironments;
}

Expand All @@ -37,9 +35,7 @@ async function extractEnvironmentNames(): Promise<string[]> {
return [];
}

console.log('\nList of existing environment configurations:');
console.log(environments.join(', '));

log('🔍 Found OpenCRVS configurations:', environments.join(', '));
return environments;
}

Expand Down Expand Up @@ -72,8 +68,6 @@ async function updateWorkflows(
const { workflows } = config;

for (const workflowPath of workflows) {
console.log(`\nUpdating ${workflowPath} with: [${envList.join(', ')}]`);

try {
const fileContents = readFileSync(workflowPath, 'utf8');

Expand All @@ -87,25 +81,22 @@ async function updateWorkflows(
const updatedContent = updateOptionsInYaml(fileContents, envList);

writeFileSync(workflowPath, updatedContent, 'utf8');
console.log(`✓ Successfully updated ${workflowPath}`);
log(` ✓ Successfully updated ${workflowPath}`);
} catch (error) {
console.error(`\n⚠️ Error updating ${workflowPath} with environments: [${envList.join(', ')}]`);
console.error(`✗ Failed to update ${workflowPath}:`, error);
throw error;
}
}
}

export async function updateWorkflowEnvironments(): Promise<void> {
try {
console.log('🔄 Updating workflow environments...\n');

try {
// Extract infrastructure names
const infraEnvironments = await extractInfrastructureNames();

// Extract environment names (only directories)
const environments = await extractEnvironmentNames();

// Update workflows with infrastructure configurations
console.log('🔄 Updating infrastructure workflows:');
await updateWorkflows(infraEnvironments, {
workflows: [
'.github/workflows/provision.yml',
Expand All @@ -114,7 +105,9 @@ export async function updateWorkflowEnvironments(): Promise<void> {
path: 'on.workflow_dispatch.inputs.environment.options'
});

console.log(`\n📋 Updating workflows...`);
// Extract environment names (only directories)
const environments = await extractEnvironmentNames();

const workflows = [
'.github/workflows/deploy-dependencies.yml',
'.github/workflows/deploy-opencrvs.yml',
Expand All @@ -123,16 +116,14 @@ export async function updateWorkflowEnvironments(): Promise<void> {
'.github/workflows/k8s-reindex.yml',
'.github/workflows/github-to-k8s-sync-env.yml'
];
log("📋 Updating OpenCRVS application workflows:");
await updateWorkflows(environments, {
workflows,
path: 'on.workflow_dispatch.inputs.environment.options'
});


console.log('\n✅ All workflows updated successfully!');
console.log('\n💡 Review the changes and commit them when ready.');

success('✅ All workflows updated successfully!');
} catch (error) {
console.error('\n❌ Error updating workflows:', error);
process.exit(1);
Expand Down
Loading