From dbebd1ec18aae2f76b6873903b630cc8c6d2bf0e Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 15:49:30 +0200 Subject: [PATCH 01/40] testing --- .github/workflows/import-configuration.yml | 50 ++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 .github/workflows/import-configuration.yml diff --git a/.github/workflows/import-configuration.yml b/.github/workflows/import-configuration.yml new file mode 100644 index 00000000..3306fae0 --- /dev/null +++ b/.github/workflows/import-configuration.yml @@ -0,0 +1,50 @@ +name: Import Configuration + +on: + repository_dispatch: + types: [import-config] + workflow_dispatch: + inputs: + environment: + description: 'Target environment (leave empty for repository level)' + required: false + type: string + default: '' + data_type: + description: 'Type of data to import' + required: false + type: choice + default: secret + options: + - secret + - variable + data: + description: 'Base64 encoded JSON data' + required: true + type: string + +jobs: + import: + runs-on: ubuntu-latest + steps: + - name: Parse input data + id: parse + run: | + echo "${{ inputs.data }}" + echo "${{ inputs.data_type }}" + echo "${{ inputs.environment }}" + import-secrets: + if: inputs.data_type == 'secret' + runs-on: ubuntu-latest + steps: + - name: import secrets + run: | + echo "Importing secrets..." + echo + import-variables: + if: inputs.data_type == 'variable' + runs-on: ubuntu-latest + steps: + - name: import variables + run: | + echo "Importing variables..." From 54d3e31565f2f5490f4ea4f8838693ad22066833 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:09:20 +0200 Subject: [PATCH 02/40] testing --- .github/workflows/{import-configuration.yml => import-config.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{import-configuration.yml => import-config.yml} (100%) diff --git a/.github/workflows/import-configuration.yml b/.github/workflows/import-config.yml similarity index 100% rename from .github/workflows/import-configuration.yml rename to .github/workflows/import-config.yml From d0f71db8bcdb3c209b9caca9389c2f85ec125153 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:23:05 +0200 Subject: [PATCH 03/40] testing --- .github/workflows/import-config.yml | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 3306fae0..2919d8bc 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -30,9 +30,28 @@ jobs: - name: Parse input data id: parse run: | - echo "${{ inputs.data }}" - echo "${{ inputs.data_type }}" - echo "${{ inputs.environment }}" + # Get inputs from either workflow_dispatch or repository_dispatch + if [ "${{ github.event_name }}" = "repository_dispatch" ]; then + ENVIRONMENT="${{ github.event.client_payload.environment }}" + DATA_TYPE="${{ github.event.client_payload.data_type }}" + DATA='${{ toJSON(github.event.client_payload.data) }}' + else + ENVIRONMENT="${{ inputs.environment }}" + DATA_TYPE="${{ inputs.data_type }}" + DATA='${{ inputs.data }}' + fi + + echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT + echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT + + # Parse JSON data + echo "$DATA" | jq '.' > config.json + + echo "šŸ“¦ Received configuration:" + echo "šŸŒ Environment: ${ENVIRONMENT:-repository}" + echo "šŸ“¦ Data type: ${DATA_TYPE}" + echo "šŸ“‹ Keys:" + cat config.json | jq 'keys' import-secrets: if: inputs.data_type == 'secret' runs-on: ubuntu-latest From 3ac2b40717df70d65a1800d70c7fea45691a31f5 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:24:10 +0200 Subject: [PATCH 04/40] testing --- .github/workflows/import-config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 2919d8bc..2832f29d 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -31,6 +31,7 @@ jobs: id: parse run: | # Get inputs from either workflow_dispatch or repository_dispatch + echo "Determining input source... ${{ github.event_name }}" if [ "${{ github.event_name }}" = "repository_dispatch" ]; then ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" From ccab10de21b2d5cb67fa1812ce09958bbf3f47ca Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:26:30 +0200 Subject: [PATCH 05/40] testing --- .github/workflows/import-config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 2832f29d..99ad7b24 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -32,7 +32,9 @@ jobs: run: | # Get inputs from either workflow_dispatch or repository_dispatch echo "Determining input source... ${{ github.event_name }}" + echo if [ "${{ github.event_name }}" = "repository_dispatch" ]; then + echo "${{ github.event.client_payload }}" ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" DATA='${{ toJSON(github.event.client_payload.data) }}' From c96c366159dcfbdef349010387a0ed7a3a803dee Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:29:46 +0200 Subject: [PATCH 06/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 99ad7b24..79b4ae81 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -34,7 +34,7 @@ jobs: echo "Determining input source... ${{ github.event_name }}" echo if [ "${{ github.event_name }}" = "repository_dispatch" ]; then - echo "${{ github.event.client_payload }}" + echo "${{ toJSON(github.event.client_payload) }}" ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" DATA='${{ toJSON(github.event.client_payload.data) }}' From bb474cbb7fc971cfa444054b6304d9b5fde7b280 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:30:00 +0200 Subject: [PATCH 07/40] testing --- .github/workflows/import-config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 79b4ae81..1575f0cb 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -48,6 +48,7 @@ jobs: echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT # Parse JSON data + echo $DATA echo "$DATA" | jq '.' > config.json echo "šŸ“¦ Received configuration:" From 914f5fced4be28bb0f9a3fc02e3e1bac4faa481b Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:32:10 +0200 Subject: [PATCH 08/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 1575f0cb..d904b369 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -49,7 +49,7 @@ jobs: # Parse JSON data echo $DATA - echo "$DATA" | jq '.' > config.json + echo "$DATA" | jq -n '.' > config.json echo "šŸ“¦ Received configuration:" echo "šŸŒ Environment: ${ENVIRONMENT:-repository}" From b4df9176527c44e688f3ec35ccd0c1c00b01218b Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:34:31 +0200 Subject: [PATCH 09/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index d904b369..975a4000 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -34,7 +34,7 @@ jobs: echo "Determining input source... ${{ github.event_name }}" echo if [ "${{ github.event_name }}" = "repository_dispatch" ]; then - echo "${{ toJSON(github.event.client_payload) }}" + echo "${{ toJSON(github.event.client_payload.data) }}" ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" DATA='${{ toJSON(github.event.client_payload.data) }}' From e26f17aa402df9064176c968ddd77631acb07092 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:41:53 +0200 Subject: [PATCH 10/40] testing --- .github/workflows/import-config.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 975a4000..8ef57f03 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -48,14 +48,13 @@ jobs: echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT # Parse JSON data - echo $DATA - echo "$DATA" | jq -n '.' > config.json + echo "$DATA" | base64 -d echo "šŸ“¦ Received configuration:" echo "šŸŒ Environment: ${ENVIRONMENT:-repository}" echo "šŸ“¦ Data type: ${DATA_TYPE}" echo "šŸ“‹ Keys:" - cat config.json | jq 'keys' + echo "$DATA" | base64 -d | cut -d\= -f1 import-secrets: if: inputs.data_type == 'secret' runs-on: ubuntu-latest From b9266e61a23af8d0428b734a1834f58589076ba0 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:43:57 +0200 Subject: [PATCH 11/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 8ef57f03..b98f0628 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -37,7 +37,7 @@ jobs: echo "${{ toJSON(github.event.client_payload.data) }}" ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" - DATA='${{ toJSON(github.event.client_payload.data) }}' + DATA='${{ github.event.client_payload.data }}' else ENVIRONMENT="${{ inputs.environment }}" DATA_TYPE="${{ inputs.data_type }}" From 635429673074157b5cef1a3144e82a59d8628223 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:44:30 +0200 Subject: [PATCH 12/40] testing --- .github/workflows/import-config.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index b98f0628..019363d2 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -32,9 +32,7 @@ jobs: run: | # Get inputs from either workflow_dispatch or repository_dispatch echo "Determining input source... ${{ github.event_name }}" - echo if [ "${{ github.event_name }}" = "repository_dispatch" ]; then - echo "${{ toJSON(github.event.client_payload.data) }}" ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" DATA='${{ github.event.client_payload.data }}' From acea044a62701438e67d379605789e9bb22941c3 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:45:34 +0200 Subject: [PATCH 13/40] testing --- .github/workflows/import-config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 019363d2..a42294e2 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -16,8 +16,8 @@ on: type: choice default: secret options: - - secret - - variable + - secrets + - variables data: description: 'Base64 encoded JSON data' required: true @@ -54,7 +54,7 @@ jobs: echo "šŸ“‹ Keys:" echo "$DATA" | base64 -d | cut -d\= -f1 import-secrets: - if: inputs.data_type == 'secret' + if: inputs.data_type == 'secrets' runs-on: ubuntu-latest steps: - name: import secrets @@ -62,7 +62,7 @@ jobs: echo "Importing secrets..." echo import-variables: - if: inputs.data_type == 'variable' + if: inputs.data_type == 'variables' runs-on: ubuntu-latest steps: - name: import variables From db3530b5b92bc954eaee55863bdb409614465ca3 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:49:00 +0200 Subject: [PATCH 14/40] testing --- .github/workflows/import-config.yml | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index a42294e2..342cb19b 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -44,7 +44,7 @@ jobs: echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT - + echo "data=${DATA}" >> $GITHUB_OUTPUT # Parse JSON data echo "$DATA" | base64 -d @@ -54,17 +54,27 @@ jobs: echo "šŸ“‹ Keys:" echo "$DATA" | base64 -d | cut -d\= -f1 import-secrets: - if: inputs.data_type == 'secrets' + needs: import + if: needs.import.outputs.data_type == 'secrets' runs-on: ubuntu-latest steps: - name: import secrets run: | echo "Importing secrets..." - echo + echo ${{ + needs.import.outputs.environment + }} import-variables: - if: inputs.data_type == 'variables' + needs: import + if: needs.import.outputs.data_type == 'variables' runs-on: ubuntu-latest steps: - name: import variables run: | echo "Importing variables..." + echo ${{ + needs.import.outputs.environment + }} + echo ${{ + needs.import.outputs.data + }} \ No newline at end of file From 06812a2cd7c097e520e4e8be2e9128c3ba9f65df Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:50:45 +0200 Subject: [PATCH 15/40] testing --- .github/workflows/import-config.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 342cb19b..33f1dee7 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -26,6 +26,10 @@ on: jobs: import: runs-on: ubuntu-latest + outputs: + environment: ${{ steps.parse.outputs.environment }} + data_type: ${{ steps.parse.outputs.data_type }} + data: ${{ steps.parse.outputs.data }} steps: - name: Parse input data id: parse @@ -66,7 +70,7 @@ jobs: }} import-variables: needs: import - if: needs.import.outputs.data_type == 'variables' + # if: needs.import.outputs.data_type == 'variables' runs-on: ubuntu-latest steps: - name: import variables From 54cd0cdb8ec80541308f9649e25cebe795eae611 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:55:15 +0200 Subject: [PATCH 16/40] testing --- .github/workflows/import-config.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 33f1dee7..186f5f26 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -31,11 +31,10 @@ jobs: data_type: ${{ steps.parse.outputs.data_type }} data: ${{ steps.parse.outputs.data }} steps: - - name: Parse input data + - name: Parse input data and show summary id: parse run: | # Get inputs from either workflow_dispatch or repository_dispatch - echo "Determining input source... ${{ github.event_name }}" if [ "${{ github.event_name }}" = "repository_dispatch" ]; then ENVIRONMENT="${{ github.event.client_payload.environment }}" DATA_TYPE="${{ github.event.client_payload.data_type }}" @@ -52,11 +51,12 @@ jobs: # Parse JSON data echo "$DATA" | base64 -d - echo "šŸ“¦ Received configuration:" - echo "šŸŒ Environment: ${ENVIRONMENT:-repository}" - echo "šŸ“¦ Data type: ${DATA_TYPE}" - echo "šŸ“‹ Keys:" - echo "$DATA" | base64 -d | cut -d\= -f1 + echo " + # šŸ“¦ Received configuration: + + šŸŒ Environment: ${ENVIRONMENT:-repository} + + šŸ“¦ Data type: ${DATA_TYPE}" + + šŸ“‹ Keys:" >> $GITHUB_STEP_SUMMARY + echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' >> $GITHUB_STEP_SUMMARY import-secrets: needs: import if: needs.import.outputs.data_type == 'secrets' From 93a0f2aba950e945648f436acff2b054a242bb0a Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:56:26 +0200 Subject: [PATCH 17/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 186f5f26..5d49eaa3 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -54,7 +54,7 @@ jobs: echo " # šŸ“¦ Received configuration: + šŸŒ Environment: ${ENVIRONMENT:-repository} - + šŸ“¦ Data type: ${DATA_TYPE}" + + šŸ“¦ Data type: ${DATA_TYPE} + šŸ“‹ Keys:" >> $GITHUB_STEP_SUMMARY echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' >> $GITHUB_STEP_SUMMARY import-secrets: From d32f31f9469b4b4724f39137684ff7f2448219ec Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 16:58:07 +0200 Subject: [PATCH 18/40] testing --- .github/workflows/import-config.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 5d49eaa3..d6d3b3c6 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -48,13 +48,14 @@ jobs: echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT echo "data=${DATA}" >> $GITHUB_OUTPUT - # Parse JSON data - echo "$DATA" | base64 -d + + NUMBER=$(echo "$DATA" | base64 -d | wc -l) echo " # šŸ“¦ Received configuration: - + šŸŒ Environment: ${ENVIRONMENT:-repository} + + šŸŒ Scope/Environment: ${ENVIRONMENT:-repository} + šŸ“¦ Data type: ${DATA_TYPE} + + šŸ”¢ Items count: ${NUMBER} + šŸ“‹ Keys:" >> $GITHUB_STEP_SUMMARY echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' >> $GITHUB_STEP_SUMMARY import-secrets: From 75e80be93ec7c34527a37ad3f4bb9fa43317aef9 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 20 Jan 2026 17:03:10 +0200 Subject: [PATCH 19/40] testing --- .github/workflows/import-config.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index d6d3b3c6..eae85e69 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -50,14 +50,17 @@ jobs: echo "data=${DATA}" >> $GITHUB_OUTPUT NUMBER=$(echo "$DATA" | base64 -d | wc -l) - - echo " - # šŸ“¦ Received configuration: - + šŸŒ Scope/Environment: ${ENVIRONMENT:-repository} - + šŸ“¦ Data type: ${DATA_TYPE} - + šŸ”¢ Items count: ${NUMBER} - + šŸ“‹ Keys:" >> $GITHUB_STEP_SUMMARY - echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' >> $GITHUB_STEP_SUMMARY + { + echo "# šŸ“¦ Received configuration:" + echo "" + echo "## Summary" + echo "- šŸŒ Scope/Environment: **${ENVIRONMENT:-repository}**" + echo "- šŸ“¦ Data type: **${DATA_TYPE}**" + echo "- šŸ”¢ Items count: **${NUMBER}**" + echo "" + echo "## šŸ“‹ Data keys:" + echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' + } >> $GITHUB_STEP_SUMMARY import-secrets: needs: import if: needs.import.outputs.data_type == 'secrets' @@ -82,4 +85,4 @@ jobs: }} echo ${{ needs.import.outputs.data - }} \ No newline at end of file + }} From 8d53220d61ad5ab5b00c38a55a20eb1a96d72f09 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 08:56:05 +0200 Subject: [PATCH 20/40] testing --- .github/workflows/import-config.yml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index eae85e69..49c8bd27 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -1,4 +1,5 @@ name: Import Configuration +run-name: Import Configuration on: repository_dispatch: @@ -19,7 +20,7 @@ on: - secrets - variables data: - description: 'Base64 encoded JSON data' + description: 'Base64 encoded JSON data (KEY=base64value format, one per line)' required: true type: string @@ -58,7 +59,7 @@ jobs: echo "- šŸ“¦ Data type: **${DATA_TYPE}**" echo "- šŸ”¢ Items count: **${NUMBER}**" echo "" - echo "## šŸ“‹ Data keys:" + echo "## Data keys:" echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' } >> $GITHUB_STEP_SUMMARY import-secrets: @@ -76,6 +77,10 @@ jobs: needs: import # if: needs.import.outputs.data_type == 'variables' runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ github.token }} + ENVIRONMENT: ${{ needs.parse.outputs.environment }} + DATA: ${{ needs.parse.outputs.data }} steps: - name: import variables run: | @@ -86,3 +91,14 @@ jobs: echo ${{ needs.import.outputs.data }} + [ -n "$ENVIRONMENT" ] && \ + OPTS="--env $ENVIRONMENT" || \ + OPTS="" + while read line; do + if [[ $line == *"="* ]]; then + key=$(echo "$line" | cut -d '=' -f 1) + value=$(echo "$line" | cut -d '=' -f 2-) + echo "Adding variable: $key" + gh variable set $OPTS $key --body "$value" + fi + done <(echo "$DATA" | base64 -d) \ No newline at end of file From fcc165b163accde75ccc18cc8a32a87fd9d72621 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:23:45 +0200 Subject: [PATCH 21/40] testing --- .github/workflows/import-config.yml | 33 ++++++++++++++--------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 49c8bd27..45b32972 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -84,21 +84,20 @@ jobs: steps: - name: import variables run: | - echo "Importing variables..." - echo ${{ - needs.import.outputs.environment - }} - echo ${{ - needs.import.outputs.data - }} - [ -n "$ENVIRONMENT" ] && \ - OPTS="--env $ENVIRONMENT" || \ - OPTS="" - while read line; do - if [[ $line == *"="* ]]; then - key=$(echo "$line" | cut -d '=' -f 1) - value=$(echo "$line" | cut -d '=' -f 2-) - echo "Adding variable: $key" - gh variable set $OPTS $key --body "$value" + echo "šŸ“Š Importing variables to ${ENVIRONMENT:-repository level}..." + + # Decode data + echo "$DATA" | base64 -d > items.env + + # Import each variable + while IFS='=' read -r KEY VALUE; do + [ -z "$KEY" ] && continue + DECODED_VALUE=$(echo "$VALUE" | base64 -d) + echo " āœ“ Setting variable: $KEY" + + if [ -z "$ENVIRONMENT" ]; then + gh variable set "$KEY" --body "$DECODED_VALUE" + else + gh variable set "$KEY" --body "$DECODED_VALUE" --env "$ENVIRONMENT" fi - done <(echo "$DATA" | base64 -d) \ No newline at end of file + done < items.env From 420e7693df3dde32928910985750dfb77571761c Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:25:18 +0200 Subject: [PATCH 22/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 45b32972..2dd15365 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -88,7 +88,7 @@ jobs: # Decode data echo "$DATA" | base64 -d > items.env - + cat items.env # Import each variable while IFS='=' read -r KEY VALUE; do [ -z "$KEY" ] && continue From 2b5f2a4c777c8489b0e5f5b8f68777b9a6d89cda Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:26:54 +0200 Subject: [PATCH 23/40] testing --- .github/workflows/import-config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 2dd15365..be543ab0 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -79,8 +79,8 @@ jobs: runs-on: ubuntu-latest env: GH_TOKEN: ${{ github.token }} - ENVIRONMENT: ${{ needs.parse.outputs.environment }} - DATA: ${{ needs.parse.outputs.data }} + ENVIRONMENT: ${{ needs.import.outputs.environment }} + DATA: ${{ needs.import.outputs.data }} steps: - name: import variables run: | From 8c44ac628c5950a58755376535c5e49098ffb1e1 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:27:17 +0200 Subject: [PATCH 24/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index be543ab0..eb084d56 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -85,7 +85,7 @@ jobs: - name: import variables run: | echo "šŸ“Š Importing variables to ${ENVIRONMENT:-repository level}..." - + echo "needs.import.outputs.data_type: ${{ needs.import.outputs.data_type }}" # Decode data echo "$DATA" | base64 -d > items.env cat items.env From 81f88c09e2fab5b7dbe52360b5358b21b4c11f2f Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:28:22 +0200 Subject: [PATCH 25/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index eb084d56..732cfa19 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -75,7 +75,7 @@ jobs: }} import-variables: needs: import - # if: needs.import.outputs.data_type == 'variables' + if: needs.import.outputs.data_type == 'variables' runs-on: ubuntu-latest env: GH_TOKEN: ${{ github.token }} From 94a7a0e951e717d026a142a6d3c49ad3bfa3475f Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:30:40 +0200 Subject: [PATCH 26/40] testing --- .github/workflows/import-config.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 732cfa19..9b4f3332 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -81,6 +81,7 @@ jobs: GH_TOKEN: ${{ github.token }} ENVIRONMENT: ${{ needs.import.outputs.environment }} DATA: ${{ needs.import.outputs.data }} + REPO: ${{ github.repository }} steps: - name: import variables run: | @@ -96,8 +97,8 @@ jobs: echo " āœ“ Setting variable: $KEY" if [ -z "$ENVIRONMENT" ]; then - gh variable set "$KEY" --body "$DECODED_VALUE" + gh variable set "$KEY" --body "$DECODED_VALUE" --repo "$REPO" else - gh variable set "$KEY" --body "$DECODED_VALUE" --env "$ENVIRONMENT" + gh variable set "$KEY" --body "$DECODED_VALUE" --env "$ENVIRONMENT" --repo "$REPO" fi done < items.env From 8ef4d00539c1baa7c0d009ce715148c12b870bf1 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 14:35:39 +0200 Subject: [PATCH 27/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 9b4f3332..703f53d6 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -78,7 +78,7 @@ jobs: if: needs.import.outputs.data_type == 'variables' runs-on: ubuntu-latest env: - GH_TOKEN: ${{ github.token }} + GH_TOKEN: ${{ secrets.GH_TOKEN }} ENVIRONMENT: ${{ needs.import.outputs.environment }} DATA: ${{ needs.import.outputs.data }} REPO: ${{ github.repository }} From 69488c8d36dd0487b73c4e3c8a04fa8863e058eb Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 15:11:19 +0200 Subject: [PATCH 28/40] testing --- .github/workflows/import-config.yml | 37 +++++++++++++++++++---------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index 703f53d6..c90d62c5 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -45,7 +45,7 @@ jobs: DATA_TYPE="${{ inputs.data_type }}" DATA='${{ inputs.data }}' fi - + echo "::add-mask::$DATA" echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT echo "data=${DATA}" >> $GITHUB_OUTPUT @@ -66,13 +66,30 @@ jobs: needs: import if: needs.import.outputs.data_type == 'secrets' runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + ENVIRONMENT: ${{ needs.import.outputs.environment }} + DATA: ${{ needs.import.outputs.data }} + REPO: ${{ github.repository }} steps: - - name: import secrets - run: | - echo "Importing secrets..." - echo ${{ - needs.import.outputs.environment - }} + - name: Import secrets + run: | + echo "šŸ” Importing secrets to ${ENVIRONMENT:-repository level}..." + echo "$DATA" | base64 -d > items.env + + while IFS='=' read -r KEY VALUE; do + [ -z "$KEY" ] && continue + DECODED_VALUE=$(echo "$VALUE" | base64 -d) + echo " āœ“ Setting secret: $KEY" + + if [ -z "$ENVIRONMENT" ]; then + echo "$DECODED_VALUE" | gh secret set "$KEY" --repo "$REPO" + else + echo "$DECODED_VALUE" | gh secret set "$KEY" --env "$ENVIRONMENT" --repo "$REPO" + fi + done < items.env + + echo "āœ… Secrets imported successfully!" import-variables: needs: import if: needs.import.outputs.data_type == 'variables' @@ -83,19 +100,15 @@ jobs: DATA: ${{ needs.import.outputs.data }} REPO: ${{ github.repository }} steps: - - name: import variables + - name: Import variables run: | echo "šŸ“Š Importing variables to ${ENVIRONMENT:-repository level}..." - echo "needs.import.outputs.data_type: ${{ needs.import.outputs.data_type }}" - # Decode data echo "$DATA" | base64 -d > items.env - cat items.env # Import each variable while IFS='=' read -r KEY VALUE; do [ -z "$KEY" ] && continue DECODED_VALUE=$(echo "$VALUE" | base64 -d) echo " āœ“ Setting variable: $KEY" - if [ -z "$ENVIRONMENT" ]; then gh variable set "$KEY" --body "$DECODED_VALUE" --repo "$REPO" else From 36253daffcead6728c0d0d71d4092946e7bddaf5 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 15:48:05 +0200 Subject: [PATCH 29/40] testing --- .github/workflows/import-config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index c90d62c5..b149efb3 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -35,6 +35,7 @@ jobs: - name: Parse input data and show summary id: parse run: | + echo "::add-mask::${{ inputs.data || github.event.client_payload.data }}" # Get inputs from either workflow_dispatch or repository_dispatch if [ "${{ github.event_name }}" = "repository_dispatch" ]; then ENVIRONMENT="${{ github.event.client_payload.environment }}" @@ -45,7 +46,6 @@ jobs: DATA_TYPE="${{ inputs.data_type }}" DATA='${{ inputs.data }}' fi - echo "::add-mask::$DATA" echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT echo "data=${DATA}" >> $GITHUB_OUTPUT From 6cc6fbc40cd7c43af72232d77da751cea46ae542 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 21 Jan 2026 15:58:05 +0200 Subject: [PATCH 30/40] testing --- .github/workflows/import-config.yml | 33 +++++++++-------------------- 1 file changed, 10 insertions(+), 23 deletions(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index b149efb3..b5e5c9ba 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -27,29 +27,16 @@ on: jobs: import: runs-on: ubuntu-latest - outputs: - environment: ${{ steps.parse.outputs.environment }} - data_type: ${{ steps.parse.outputs.data_type }} - data: ${{ steps.parse.outputs.data }} + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + DATA: ${{ inputs.data || github.event.client_payload.data }} + ENVIRONMENT: ${{ inputs.environment || github.event.client_payload.environment }} + REPO: ${{ github.repository }} steps: - - name: Parse input data and show summary - id: parse + - name: Debug input data run: | - echo "::add-mask::${{ inputs.data || github.event.client_payload.data }}" - # Get inputs from either workflow_dispatch or repository_dispatch - if [ "${{ github.event_name }}" = "repository_dispatch" ]; then - ENVIRONMENT="${{ github.event.client_payload.environment }}" - DATA_TYPE="${{ github.event.client_payload.data_type }}" - DATA='${{ github.event.client_payload.data }}' - else - ENVIRONMENT="${{ inputs.environment }}" - DATA_TYPE="${{ inputs.data_type }}" - DATA='${{ inputs.data }}' - fi - echo "environment=${ENVIRONMENT}" >> $GITHUB_OUTPUT - echo "data_type=${DATA_TYPE}" >> $GITHUB_OUTPUT echo "data=${DATA}" >> $GITHUB_OUTPUT - + echo "::add-mask::${DATA}" NUMBER=$(echo "$DATA" | base64 -d | wc -l) { echo "# šŸ“¦ Received configuration:" @@ -64,12 +51,12 @@ jobs: } >> $GITHUB_STEP_SUMMARY import-secrets: needs: import - if: needs.import.outputs.data_type == 'secrets' + if: inputs.data_type == 'secrets' || github.event.client_payload.data_type == 'secrets' runs-on: ubuntu-latest env: GH_TOKEN: ${{ secrets.GH_TOKEN }} - ENVIRONMENT: ${{ needs.import.outputs.environment }} - DATA: ${{ needs.import.outputs.data }} + DATA: ${{ inputs.data || github.event.client_payload.data }} + ENVIRONMENT: ${{ inputs.environment || github.event.client_payload.environment }} REPO: ${{ github.repository }} steps: - name: Import secrets From b4fd32b6d96f27305c6ae41e44f79fdd85f54d62 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Mon, 26 Jan 2026 12:12:53 +0200 Subject: [PATCH 31/40] testing --- .github/workflows/import-config.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml index b5e5c9ba..1bc82bac 100644 --- a/.github/workflows/import-config.yml +++ b/.github/workflows/import-config.yml @@ -23,7 +23,9 @@ on: description: 'Base64 encoded JSON data (KEY=base64value format, one per line)' required: true type: string - + secrets: + data: + required: true jobs: import: runs-on: ubuntu-latest From 18b460fdd8d7fee7458c7b91ff5631f7f6c507fe Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Mon, 26 Jan 2026 17:16:04 +0200 Subject: [PATCH 32/40] feat: Migration script swarm to k8s --- .../secret-mapping-opencrvs-deps.yml | 7 +- .github/workflows/deploy-opencrvs.yml | 6 ++ infrastructure/environments/swarm-to-k8s.ts | 66 +++++++++++++ infrastructure/environments/templates.ts | 95 ++++++++++++++++--- .../inventory/inventory.template.yml | 5 +- .../environments/update-workflows.ts | 33 +++---- package.json | 1 + 7 files changed, 178 insertions(+), 35 deletions(-) create mode 100644 infrastructure/environments/swarm-to-k8s.ts diff --git a/.github/TEMPLATES/secret-mapping-opencrvs-deps.yml b/.github/TEMPLATES/secret-mapping-opencrvs-deps.yml index 16abd61c..ae4343cb 100644 --- a/.github/TEMPLATES/secret-mapping-opencrvs-deps.yml +++ b/.github/TEMPLATES/secret-mapping-opencrvs-deps.yml @@ -33,12 +33,15 @@ kibana-users-secret: - KIBANA_USERNAME - KIBANA_PASSWORD +# Traefik static SSL certificate +# backward compatible with existing implementation, +# See: https://documentation.opencrvs.org/v1.8/setup/3.-installation/3.3-set-up-a-server-hosted-environment/3.3.5-setup-dns-a-records/4.3.2.3-static-tls-certificates traefik-cert: type: tls namespace: traefik data: - - TRAEFIK_CERT: cert - - TRAEFIK_KEY: key + - SSL_CRT: cert + - SSL_KEY: key # If backup is configured then workflow will use GitHub secrets for current environment # If restore is configured then workflow will fetch secrets from source environment (usually production) diff --git a/.github/workflows/deploy-opencrvs.yml b/.github/workflows/deploy-opencrvs.yml index 4fc73a0b..fd731e76 100644 --- a/.github/workflows/deploy-opencrvs.yml +++ b/.github/workflows/deploy-opencrvs.yml @@ -19,6 +19,11 @@ on: description: "Tag of the countryconfig image" required: true default: "v1.9.1" + data-seed-enabled: + description: "Enable data seeding during deployment" + required: false + default: "true" + type: boolean environment: description: "Target environment" required: true @@ -140,6 +145,7 @@ jobs: --set countryconfig.image.tag="$COUNTRYCONFIG_IMAGE_TAG" \ --set countryconfig.image.name="$COUNTRYCONFIG_IMAGE_NAME" \ --set data_seed.env.ACTIVATE_USERS="${{ vars.ACTIVATE_USERS || 'false' }}" \ + --set data_seed.enabled="${{ inputs.data-seed-enabled }}" \ --set hostname=${{ vars.DOMAIN }} 2>&1 ; STATUS=$?; kill $STERN_PID 2>/dev/null || true exit $STATUS diff --git a/infrastructure/environments/swarm-to-k8s.ts b/infrastructure/environments/swarm-to-k8s.ts new file mode 100644 index 00000000..8b00d54e --- /dev/null +++ b/infrastructure/environments/swarm-to-k8s.ts @@ -0,0 +1,66 @@ +import * as path from 'path'; +import kleur from 'kleur' +import { error, info, log, success, warn } from './logger' +import { updateWorkflowEnvironments } from './update-workflows'; +import { generateInventory, copyChartsValues, extractAndModifyUsers, extractWorkerNodes, extractBackupNode, dockerManagerFirst, readYamlFile } from './templates' + + + +(async () => { + const environment_type = process.env.ENVIRONMENT_TYPE || 'production'; + const environment = process.env.ENVIRONMENT || ''; + if (!environment) { + error('\n', 'Environment variable ENVIRONMENT is not set. Exiting.'); + process.exit(1); + } + log('\n'); + log(kleur.bold().underline(`Environment: ${environment} Migrating Swarm configurations to Kubernetes`)) + + const old_inventory_path = process.env.OLD_INVENTORY_PATH || ''; + if (!old_inventory_path) { + error('\n', 'Environment variable OLD_INVENTORY_PATH is not set. Exiting.'); + log('\n', 'Old inventory path is required to read existing Swarm configurations.'); + process.exit(1); + } + const ansible_inventory = path.join(old_inventory_path, environment + '.yml'); + const data = readYamlFile(ansible_inventory) as any; + log(` āœ“ Loaded old inventory file: ${ansible_inventory}`); + const master = dockerManagerFirst(data) || '' + log(` āœ“ Kubernetes API Host (Docker Manager): ${master}`); + const users = extractAndModifyUsers(data); + // console.log(users.forEach((u: any) => console.log(u.name))); + let worker_nodes: string[] = []; + let backup_host = ''; + if (environment === 'production') { + worker_nodes = extractWorkerNodes(data); + log(` āœ“ Worker nodes: ${worker_nodes.join(', ')}`); + backup_host = extractBackupNode(data); + log(` āœ“ Backup host: ${backup_host}`); + } + + generateInventory( + environment, + { + worker_nodes: worker_nodes, + users: users, + backup_host: backup_host, + kube_api_host: master + } + ) + + copyChartsValues( + environment, + { + env: environment, + environment_type: environment_type, + // FIXME: In general that should be environment_type, + // Hardcode like this blocks us from being generic: + // https://github.com/opencrvs/opencrvs-core/issues/11171 + is_qa_env: environment !== 'production' ? "true" : "false", + backup_enabled: environment === 'production' ? "true" : "false", + restore_enabled: environment === 'staging' ? "true" : "false", + restore_environment_name: environment === 'staging' ? "production" : "" + } + ) + await updateWorkflowEnvironments(); +})(); \ No newline at end of file diff --git a/infrastructure/environments/templates.ts b/infrastructure/environments/templates.ts index 9d6fbab1..00f5cb56 100644 --- a/infrastructure/environments/templates.ts +++ b/infrastructure/environments/templates.ts @@ -1,6 +1,71 @@ import fs from "fs"; import path from "path"; -import { log } from './logger' +import { log, success, warn } from './logger' +import * as yaml from 'js-yaml'; + + +export function readYamlFile(filePath: any): any { + const fileContent = fs.readFileSync(filePath, "utf8"); + return yaml.load(fileContent); +} + + +// Refactor with jsonpath +export function extractAndModifyUsers(data: any): any { + if (!data?.all?.vars?.users) { + throw new Error('Invalid YAML structure: missing all.vars.users'); + } + const users = data.all.vars.users.map((user: any) => { + if (user.sudoer === true) { + delete user.sudoer; + user.role = 'admin'; + } + return user; + }); + + // Return with "users:" key + return yaml.dump({ users }, { + indent: 2, + lineWidth: -1, + noRefs: true, + }); + +} + +export function dockerManagerFirst(data: any): string { + if (!data?.['docker-manager-first']?.hosts) { + console.log(data); + throw new Error('Invalid YAML structure: missing docker-manager-first.hosts'); + } + const hosts = data['docker-manager-first'].hosts; + const dockerManagerFirst = Object.values(hosts) + .filter((host: any) => host.ansible_host) + .map((host: any) => host.ansible_host); + return dockerManagerFirst.length === 1 ? dockerManagerFirst[0] : ''; +} + +export function extractBackupNode(data: any): string { + if (!data?.['backups']?.hosts) { + throw new Error('Invalid YAML structure: missing backups.hosts'); + } + const hosts = data['backups'].hosts; + const backupHostEntry = Object.values(hosts) + .filter((host: any) => host.ansible_host) + .map((host: any) => host.ansible_host); + return backupHostEntry.length === 1 ? backupHostEntry[0] : ''; +} + +export function extractWorkerNodes(data: any): string[] { + if (!data?.['docker-workers']?.hosts) { + throw new Error('Invalid YAML structure: missing docker-workers.hosts'); + } + const hosts = data['docker-workers'].hosts; + const worker_hosts = Object.values(hosts) + .filter((host: any) => host.ansible_host) + .map((host: any) => host.ansible_host); + return worker_hosts; +} + /** * Replace placeholders in file content. * Customize the replacements map to your needs. @@ -38,7 +103,7 @@ export function copyChartsValues(env: string, replacements: Record){ // Check if output file already exists if (fs.existsSync(outputPath)) { - log(`āš ļø Skipping ${templatePath}, file already exists at ${outputPath}`); + warn(` āš ļø Skipping ${templatePath}, file already exists at ${outputPath}`); return; } let template = fs.readFileSync(templatePath, "utf-8"); @@ -94,13 +160,12 @@ export function generateInventory(env: string, values: Record){ hosts:`; worker_nodes.forEach((host: string, index: number) => { - const isFirstWorker = index === 0; workersBlock += ` worker${index}: - ansible_host: ${host}${isFirstWorker ? ` + ansible_host: ${host} labels: - # By default all datastores are deployed to worker node with role data1 - role: data1` : ''} + # Labels have index + 2 for backward compatibility with existing swarm setup + role: data${index + 2} `; }); @@ -110,7 +175,15 @@ export function generateInventory(env: string, values: Record){ template = template.replace('{{WORKERS_BLOCK}}', ''); } - + if (values['users']) { + const indentedYaml = values['users'].split('\n') + .filter((line: string) => line.trim().length > 0) + .map((line: string) => ' ' + line) // indent each line with 4 spaces + .join('\n'); + template = template.replace('{{USERS_BLOCK}}', indentedYaml); + } else { + template = template.replace('{{USERS_BLOCK}}', ' users: []'); + } // Generate backup block if backup_host is provided const backupHost = String(values['backup_host']).replace(/[\x00-\x1F\x7F]/g, ""); let backupBlock = ''; @@ -132,5 +205,5 @@ export function generateInventory(env: string, values: Record){ values fs.mkdirSync(path.dirname(outputPath), { recursive: true }); fs.writeFileSync(outputPath, updated); - log(`āœ… Generated inventory file at ${outputPath}`); + log(`\nāœ… Generated inventory file at ${outputPath}\n`); } diff --git a/infrastructure/environments/templates/inventory/inventory.template.yml b/infrastructure/environments/templates/inventory/inventory.template.yml index a8242973..bc2e9a30 100644 --- a/infrastructure/environments/templates/inventory/inventory.template.yml +++ b/infrastructure/environments/templates/inventory/inventory.template.yml @@ -36,7 +36,8 @@ all: # Allowed states: # - present: user is allowed to login # - absent: user account is disabled - users: [] + # users: [] +{{USERS_BLOCK}} children: master: @@ -50,6 +51,8 @@ all: labels: # traefik-role label is used to identify where to deploy traefik traefik-role: ingress + # By default all datastores are deployed to node with role data1 + role: data1 {{WORKERS_BLOCK}} diff --git a/infrastructure/environments/update-workflows.ts b/infrastructure/environments/update-workflows.ts index 1c997255..b9942bc1 100644 --- a/infrastructure/environments/update-workflows.ts +++ b/infrastructure/environments/update-workflows.ts @@ -3,7 +3,7 @@ import { readFileSync, writeFileSync, statSync, existsSync } from 'fs'; import { basename, join } from 'path'; import * as glob from 'glob'; import * as yaml from 'js-yaml'; - +import { error, info, log, success, warn } from './logger' interface WorkflowConfig { workflows: string[]; path: string; @@ -18,9 +18,7 @@ async function extractInfrastructureNames(): Promise { console.log('āš ļø Warning: No environment directories found in infrastructure/server-setup/inventory/'); return []; } - console.log('List of existing infrastructure configurations:'); - console.log(infraEnvironments.join(', ')); - + log('šŸ” Found infrastructure configurations:', infraEnvironments.join(', ')); return infraEnvironments; } @@ -37,9 +35,7 @@ async function extractEnvironmentNames(): Promise { return []; } - console.log('\nList of existing environment configurations:'); - console.log(environments.join(', ')); - + log('šŸ” Found OpenCRVS configurations:', environments.join(', ')); return environments; } @@ -72,8 +68,6 @@ async function updateWorkflows( const { workflows } = config; for (const workflowPath of workflows) { - console.log(`\nUpdating ${workflowPath} with: [${envList.join(', ')}]`); - try { const fileContents = readFileSync(workflowPath, 'utf8'); @@ -87,8 +81,9 @@ async function updateWorkflows( const updatedContent = updateOptionsInYaml(fileContents, envList); writeFileSync(workflowPath, updatedContent, 'utf8'); - console.log(`āœ“ Successfully updated ${workflowPath}`); + log(` āœ“ Successfully updated ${workflowPath}`); } catch (error) { + console.error(`\nāš ļø Error updating ${workflowPath} with environments: [${envList.join(', ')}]`); console.error(`āœ— Failed to update ${workflowPath}:`, error); throw error; } @@ -96,16 +91,12 @@ async function updateWorkflows( } export async function updateWorkflowEnvironments(): Promise { - try { - console.log('šŸ”„ Updating workflow environments...\n'); - + try { // Extract infrastructure names const infraEnvironments = await extractInfrastructureNames(); - // Extract environment names (only directories) - const environments = await extractEnvironmentNames(); - // Update workflows with infrastructure configurations + console.log('šŸ”„ Updating infrastructure workflows:'); await updateWorkflows(infraEnvironments, { workflows: [ '.github/workflows/provision.yml', @@ -114,7 +105,9 @@ export async function updateWorkflowEnvironments(): Promise { path: 'on.workflow_dispatch.inputs.environment.options' }); - console.log(`\nšŸ“‹ Updating workflows...`); + // Extract environment names (only directories) + const environments = await extractEnvironmentNames(); + const workflows = [ '.github/workflows/deploy-dependencies.yml', '.github/workflows/deploy-opencrvs.yml', @@ -123,16 +116,14 @@ export async function updateWorkflowEnvironments(): Promise { '.github/workflows/k8s-reindex.yml', '.github/workflows/github-to-k8s-sync-env.yml' ]; - + log("šŸ“‹ Updating OpenCRVS application workflows:"); await updateWorkflows(environments, { workflows, path: 'on.workflow_dispatch.inputs.environment.options' }); - console.log('\nāœ… All workflows updated successfully!'); - console.log('\nšŸ’” Review the changes and commit them when ready.'); - + success('āœ… All workflows updated successfully!'); } catch (error) { console.error('\nāŒ Error updating workflows:', error); process.exit(1); diff --git a/package.json b/package.json index e33c87d3..b383ca1a 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "husky": {}, "scripts": { "environment:init": "ts-node infrastructure/environments/setup-environment.ts", + "environment:swarm-to-k8s": "ts-node infrastructure/environments/swarm-to-k8s.ts", "environment:upgrade": "yarn environment:init", "prepare": "husky" }, From 90f3256d84dfe38156c7bc604a9705450614e5d9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 27 Jan 2026 15:53:43 +0000 Subject: [PATCH 33/40] chore: Create configuration files for swarm-to-k8s environment --- .github/workflows/deploy-dependencies.yml | 2 +- .github/workflows/deploy-opencrvs.yml | 2 +- .github/workflows/github-to-k8s-sync-env.yml | 2 +- .github/workflows/k8s-reindex.yml | 2 +- .github/workflows/k8s-reset-data.yml | 2 +- .github/workflows/k8s-seed-data.yml | 2 +- .github/workflows/provision.yml | 2 +- .github/workflows/reset-2fa.yml | 2 +- .../swarm-to-k8s/dependencies/values.yaml | 41 +++++++ .../swarm-to-k8s/mosip-api/values.yaml | 2 + .../opencrvs-services/values.yaml | 53 ++++++++ environments/swarm-to-k8s/traefik/values.yaml | 84 +++++++++++++ .../server-setup/inventory/swarm-to-k8s.yml | 115 ++++++++++++++++++ 13 files changed, 303 insertions(+), 8 deletions(-) create mode 100644 environments/swarm-to-k8s/dependencies/values.yaml create mode 100644 environments/swarm-to-k8s/mosip-api/values.yaml create mode 100644 environments/swarm-to-k8s/opencrvs-services/values.yaml create mode 100644 environments/swarm-to-k8s/traefik/values.yaml create mode 100644 infrastructure/server-setup/inventory/swarm-to-k8s.yml diff --git a/.github/workflows/deploy-dependencies.yml b/.github/workflows/deploy-dependencies.yml index af6725a9..eb2f15a9 100644 --- a/.github/workflows/deploy-dependencies.yml +++ b/.github/workflows/deploy-dependencies.yml @@ -9,7 +9,7 @@ on: default: "dev" type: choice options: - - "" + - swarm-to-k8s jobs: approve: environment: ${{ inputs.environment }} diff --git a/.github/workflows/deploy-opencrvs.yml b/.github/workflows/deploy-opencrvs.yml index fd731e76..3adf9eeb 100644 --- a/.github/workflows/deploy-opencrvs.yml +++ b/.github/workflows/deploy-opencrvs.yml @@ -30,7 +30,7 @@ on: default: "dev" type: choice options: - - "" + - swarm-to-k8s jobs: approve: diff --git a/.github/workflows/github-to-k8s-sync-env.yml b/.github/workflows/github-to-k8s-sync-env.yml index e288aced..52ca36d3 100644 --- a/.github/workflows/github-to-k8s-sync-env.yml +++ b/.github/workflows/github-to-k8s-sync-env.yml @@ -10,7 +10,7 @@ on: default: "development" type: choice options: - - development + - swarm-to-k8s namespace_template: description: "Secrets mapping template" default: "opencrvs" diff --git a/.github/workflows/k8s-reindex.yml b/.github/workflows/k8s-reindex.yml index cf4dfb9e..9c69b3a0 100644 --- a/.github/workflows/k8s-reindex.yml +++ b/.github/workflows/k8s-reindex.yml @@ -9,7 +9,7 @@ on: default: "dev" type: choice options: - - "" + - swarm-to-k8s workflow_call: inputs: environment: diff --git a/.github/workflows/k8s-reset-data.yml b/.github/workflows/k8s-reset-data.yml index bcb1bdd6..0d8a8d7d 100644 --- a/.github/workflows/k8s-reset-data.yml +++ b/.github/workflows/k8s-reset-data.yml @@ -9,7 +9,7 @@ on: default: "dev" type: choice options: - - "" + - swarm-to-k8s workflow_call: inputs: environment: diff --git a/.github/workflows/k8s-seed-data.yml b/.github/workflows/k8s-seed-data.yml index 7e8fe4d7..aba135a4 100644 --- a/.github/workflows/k8s-seed-data.yml +++ b/.github/workflows/k8s-seed-data.yml @@ -9,7 +9,7 @@ on: default: "dev" type: choice options: - - "" + - swarm-to-k8s workflow_call: inputs: environment: diff --git a/.github/workflows/provision.yml b/.github/workflows/provision.yml index c4b872d5..063a7bd0 100644 --- a/.github/workflows/provision.yml +++ b/.github/workflows/provision.yml @@ -9,7 +9,7 @@ on: default: 'dev' type: choice options: - - "" + - swarm-to-k8s tags: description: 'Tags to apply to the provisioned resources' required: true diff --git a/.github/workflows/reset-2fa.yml b/.github/workflows/reset-2fa.yml index a67f0737..09bb9a79 100644 --- a/.github/workflows/reset-2fa.yml +++ b/.github/workflows/reset-2fa.yml @@ -13,7 +13,7 @@ on: default: required: true options: - - "" + - swarm-to-k8s jobs: approve: diff --git a/environments/swarm-to-k8s/dependencies/values.yaml b/environments/swarm-to-k8s/dependencies/values.yaml new file mode 100644 index 00000000..4e534b3f --- /dev/null +++ b/environments/swarm-to-k8s/dependencies/values.yaml @@ -0,0 +1,41 @@ +storage_type: host_path + +environment_type: production + +minio: + use_default_credentials: false + +elasticsearch: + use_default_credentials: false + +mongodb: + use_default_credentials: false + +postgres: + use_default_credentials: false + +redis: + auth_mode: acl + +monitoring: + enabled: true + +elastalert: + env: + HTTP_POST2_ALERT_URL: http://countryconfig.opencrvs-swarm-to-k8s.svc.cluster.local:3040/email + +# Backup configuration +backup: + enabled: false + schedule: "0 1 * * *" + backup_server_secret: backup-server-ssh-credentials + backup_server_dir: /home/backup/swarm-to-k8s + + +# Restore configuration +restore: + enabled: false + schedule: "0 0 * * *" + backup_server_secret: backup-server-ssh-credentials + backup_server_dir: /home/backup/ + backup_encryption_secret: restore-encryption-secret \ No newline at end of file diff --git a/environments/swarm-to-k8s/mosip-api/values.yaml b/environments/swarm-to-k8s/mosip-api/values.yaml new file mode 100644 index 00000000..442be8ee --- /dev/null +++ b/environments/swarm-to-k8s/mosip-api/values.yaml @@ -0,0 +1,2 @@ +ingress: + ssl_enabled: true \ No newline at end of file diff --git a/environments/swarm-to-k8s/opencrvs-services/values.yaml b/environments/swarm-to-k8s/opencrvs-services/values.yaml new file mode 100644 index 00000000..39f50531 --- /dev/null +++ b/environments/swarm-to-k8s/opencrvs-services/values.yaml @@ -0,0 +1,53 @@ +######################################################################################## +# Initial configuration file for OpenCRVS installation +######################################################################################## +# Some properties are not defined in this file and should be provided as key/value at +# installation time: +# - hostname: valid DNS name for opencrvs +# - countryconfig.image.name: Countryconfig image repository +# - countryconfig.image.tag: Countryconfig image tag +environment_type: production + +hpa: + enabled: false + +env: + APN_SERVICE_URL: "http://apm-server.opencrvs-deps-swarm-to-k8s.svc.cluster.local:8200" + QA_ENV: true +influxdb: + host: influxdb-0.influxdb.opencrvs-deps-swarm-to-k8s.svc.cluster.local +elasticsearch: + auth_mode: auto + host: elasticsearch.opencrvs-deps-swarm-to-k8s.svc.cluster.local + + +minio: + auth_mode: use_secret + host: minio-0.minio.opencrvs-deps-swarm-to-k8s.svc.cluster.local + +mongodb: + auth_mode: auto + host: mongodb-0.mongodb.opencrvs-deps-swarm-to-k8s.svc.cluster.local + +redis: + auth_mode: use_secret + host: redis-0.redis.opencrvs-deps-swarm-to-k8s.svc.cluster.local + +postgres: + auth_mode: auto + host: postgres-0.postgres.opencrvs-deps-swarm-to-k8s.svc.cluster.local + +imagePullSecrets: + # Default value for credentials created while yarn environment:init + - name: dockerhub-credentials + +countryconfig: + secrets: + smtp-config: + - ALERT_EMAIL + - SENDER_EMAIL_ADDRESS + - SMTP_HOST + - SMTP_PASSWORD + - SMTP_PORT + - SMTP_SECURE + - SMTP_USERNAME diff --git a/environments/swarm-to-k8s/traefik/values.yaml b/environments/swarm-to-k8s/traefik/values.yaml new file mode 100644 index 00000000..8f16cfe4 --- /dev/null +++ b/environments/swarm-to-k8s/traefik/values.yaml @@ -0,0 +1,84 @@ +# Overwriting https://github.com/traefik/traefik-helm-chart/blob/master/traefik/values.yaml +namespaceOverride: "traefik" +logs: + general: + # "TRACE", "DEBUG", "INFO", "WARN", "ERROR", "FATAL", "PANIC" + level: "INFO" + # format: "common" # For local environment + format: "json" # For server environment + access: + # -- To enable access logs + enabled: true + format: "json" + +ingressRoute: + dashboard: + enabled: false + +# Be explicit that we only use CRDs, not ingress/gw support +providers: + kubernetesCRD: + enabled: true + kubernetesIngress: + enabled: false + kubernetesGateway: + enabled: false + +service: + enabled: true + single: false + type: NodePort + +ports: + web: + port: 8000 + hostPort: 80 + protocol: TCP + nodePort: 30080 + redirections: + entryPoint: + to: websecure + scheme: https + permanent: true + + websecure: + port: 8443 + hostPort: 443 + protocol: TCP + nodePort: 30443 + # šŸ‘‡ Adjust this section at websecure entrypoint + tls: + enabled: true + certResolver: letsencrypt + +# šŸ‘‡ Adjust this section if needed +certificatesResolvers: + letsencrypt: + acme: + tlsChallenge: false + httpChallenge: + entryPoint: web + # šŸ‘‡ Provide admin email address + email: admin@opencrvs.org + # Storage for certificates: + storage: /certificates/acme.json + # NOTE: Sometimes Let's Encrypt hit production SSL certificate issuing limits + # If you are having issues, switch to staging + # Staging server + # caServer: https://acme-staging-v02.api.letsencrypt.org/directory + # Production server + caServer: https://acme-v02.api.letsencrypt.org/directory + +deployment: + hostNetwork: true + additionalVolumes: + - name: acme + hostPath: + path: /data/traefik + +additionalVolumeMounts: + - name: acme + mountPath: /certificates + +nodeSelector: + traefik-role: ingress diff --git a/infrastructure/server-setup/inventory/swarm-to-k8s.yml b/infrastructure/server-setup/inventory/swarm-to-k8s.yml new file mode 100644 index 00000000..e219ef5b --- /dev/null +++ b/infrastructure/server-setup/inventory/swarm-to-k8s.yml @@ -0,0 +1,115 @@ +all: + vars: + + # Domain/IP address for remote access to your cluster API + # Domain/IP address will be added as main endpoint to your ~/.kube/config + # - If you are behind VPN, use private IP address + # - If your server is exposed (not recommeded), use public IP address + # - If you would like to run kubectl commands from the remote server, leave this field empty + # kube_api_endpoint: '' + + # IMPORTANT: If master VM has multiple ethernet interfaces, put private IP address at kube_api_address + # kube_api_host: 10.10.10.10 + kube_api_host: 46.224.251.95 + + # Default ansible provision user, keep as is + ansible_user: provision + # single_node: + # For development/qa/testing/staging keep true + # For production keep false + # Defaults production configuration: + # - master node + # - 2 worker nodes + single_node: true + + # users: Add as many users as you wish + # Configuration example + # - name: + # ssh_keys: + # - + # - + # state: present + # role: admin + # Allowed roles: + # - operator: grant read only access to OS and full access to kubernetes cluster + # - admin: grant full access to OS and kubernetes cluster + # Allowed states: + # - present: user is allowed to login + # - absent: user account is disabled + # users: [] + users: + - name: pyry + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJBcrSLLdrkLrhqNQi7Uo/ZIWXb1y4kc0vGb16e2s0Jq pyry@opencrvs.org + state: present + role: admin + - name: tameem + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGUprcQyUFYwRto0aRpgriR95C1pgNxrQ0lEWEe1D8he haidertameem@gmail.com + state: present + role: admin + - name: riku + ssh_keys: + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWIF63S4f3z9wQMvWibmvl7MPuJ6EVrkP0HuvgNhcs/4DZYMcR/GRBvV4ldOSYMlBevIXycgGzNDxKJgENUuwIWanjBu7uVAHyD6+cIRD1h63qq7Cjv/2HYTfBDKOrKzPOhA6zWvKO0ZGWsjRXk5LWMCbKOkvKJCxOpj/NVBxeE4FTK5YADYPV3OSsmBtqTHrVLm2sMmShU/2hMYYswWkobidjX65+nK/X+3C+yJbHwiydVvn+QCrFlFfCLPWKe8rUpOxyxofPqWVQh6CHhHfT8okaOc9sOE8Qeip9ljo84DftJh3Xm3ynOdWK1hH2BvRvxNadWqcE1qECbkg4tx2x riku.rouvila@gmail.com + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDGfWxxQHJv6Md/vBVoDH2UNm/uYgIBlFpP1mfh2Yj6jRNiQ/TQrfwpTawq0Sg+UW4LfYk5yxttsZ0h6L/v6PLiawgbMtf2ZqSviRTYSZTSihkK2zLmeJA2ByBCh57w4tR6IGqJK4w0kjYQSaaU6V5skQ4u+gnLQoKtkVQ4K34EFXAiIur96tLwjwDd/xCm+9T91+cAxGLv8Pe0PjirjwnvktUtzpgOhedkYK7KX0l8SKxQXUK6Ul2/QbpGO3rmguzEdtrl3Dw1TAEfu2njXbNGVQ+JWV9htH+ymsMIGoeumJRaaAZ4AXLlQPBCxTXcdQDuAjfFDPuppms/h7qB1S4Aioz7zqyd7pL7Z6Z8mJBZZlP3PsfGvADM2CdShpbL4HAa+n9miNNSYcJ7cHvC/zCitNjfaEYLVYkB5G+ggeK8Ss/MDcnsh3YFB8WnT582zt/TTJda5n+5Q7tquc1m+61t2gEKKTfBoDft9UYW2/4ViHj3ROL2Oyj7udrh/oAqV8M= riku@MBP16inch2231 + state: present + role: admin + - name: euan + ssh_keys: + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDECqHO65UpyrrO8uueD06RxGaVVq22f152Rf8qVQQAAIGAMu6gCs7ztlZ8a3yQgSEIjM/Jl1/RqIVs6CziTEef74nLFTZ5Ufz3CLRVgdebBeSBEmhTfTUV0HLkSyNzwKFpuzJxucGd72ulPvEp6eHvyJAPJz37YcU8cjaL1v05T6s2ee99li35GlDDtCzfjVV4ZPAg5JdfWuTj41RAVC0LQhk2/NB4qEu37UxGGjhRFSjBEsS5LxI9QfvgrsHpl/VOn+soH7ZkK7kS6qRgNP/uYsXRWXhHaamcl5OX68gJWTbrW6c7PCqlbCWGnsHJswCmqPIthwXXMfC7ULDNLSKG6mslAt5Dyc8/MCr3vTW7pDyr2d0FvvY86SMQUggxv3qF7TZewqfX1bhK0fMLarIxVMQ1RFo//wN9QGA+2we8rxd2Y1Kr1DBuJyuwXPfv+Exo8yNYQ+x/AYH5k6UVcSYuaB8eYmplG2KQCxt8RBFtoChrwOKNRWLqXdKyfpdp5XmnnWxPvR95gf3h3yLocVYkF0i0uvKKJ0vt8J0Ezfkdfow0B1kUg5bPXKJROX7PwbaCPdYcxyDaO6wwOigRnSmoFvkH1pLb4j1RQAXcX531CHgfN6Izi/h0mpMS4bnyIUcv2GQr+h4z4TxcCtj7qpH2y6yw7XG12jVh7TfeesXG2Q== euanmillar77@gmail.com + state: present + role: admin + - name: tahmid + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINUml9O5ySwPtEMD1yGEYHlf9Z3jro97NWAnM9+ew9gn tahmidrahman.dsi@gmail.com + state: present + role: admin + - name: tareq + ssh_keys: + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCWQihdKkwxTItN+rwYAX1vBg+8sv59sFsjYoVaO2mzS01rARfh+M+UVqpEv3zFT/3v6Dr5Z5VhzYvvbH8akiGQxURqie9quEi1iBCqcq+LApkMZxNm7yyvexlFsbkKMHsSZyVCzjE2Wt+6fwR1NqkMQgJjZS+b4CB+CUTNP2i6ytUTmck9K5iAOp1Gpm+Xgyvz6ZEJPkAJ16gV7gzNJUt/DSCkCyV8G2BqYLWeR2QxAbKyuf3LzO5i4XZdiZi9o60QAt3A6KGGLazd0UuYdehQDqVwXzwimLeeuZbaPNmwoAy7DeatOdurrWbnL7ytaiPvAbwai6Grt3PhhM41qO+uojnqTdnFdSOEPVIYMR7+mYu9tuwHZcMJIbbvMPD6EvKumD5Ndn5OxiLY/zQF5PuG89pBdTkTzzREvbV1Dkh2hwAIvgavlZl3P64On+4+FAgjrAx5U55khoRAe2FbEvB+EUGwro0bRffiM2NmxkUBraEuT2Xt5K01ZoBU6F4feO0= tareq.aziz@dsinnovators.com + state: present + role: admin + - name: jamil + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINMSNTIIsM0C3uJg3V/Fqh2gi4lvl2y6nenrb2Ft1JlX jamil31415926@gmail.com + state: present + role: admin + - name: ashikul + ssh_keys: + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDFr/v3hUGEbc2wQsDLCmqLrwiz964yVrnLZ6kafemjmX8aRGLp1CNFvrZ674SLnXidZGMkx9d5xVvv8IdFR3R50MqSqfolF43MV34/JVHjQHh9Vk4MJT/3GIaeNmr2GQ/38qAmt2BQn1ecnb7FjNO2bFvHokLhm2wCXt+A4avuTgJe0p4e6uu01IHeIzDb5sPzZ3ID0h6jJnjEDcET+Lf5NGpCjn7YKhLhBWSSl9cXQdOGLzNzg3aBk32kgJ1beP1funSeVd0jniJPZeZRC1G/kRdqBUOHKiENtwgquzZxXzdHkZV9+4mF7YGlx6LpQdNuDpW7JADtYNldtdbexdyfrgNoRzKwyMmaKNDbeHd1FsIHSDJmGm9hCoLTM2dEtsGzgghfe0tat8sOWmsj5v2en0V8rKV+w8OQEmHtaQkgMjqmZaAnd8uWiB2xIbrUuax5Pq8zkj37xnfbRxUPOEkMlOUbhh1wzGbqeUEB7nbv/vXZxwC0b7ryMk5egBP+0ZRONsdib9RkSTr3B9uSb7iTOQftdhy+CTqqOq+6s+TyC2qnu12B1WZb9sx9jQl0mBHd9gx/FgYDs8jfIr2vF4jRkejW/moaVqvCd/FLyS91eCMXQjIXdGKWKPUUL7GEBqdZRLnYSJOqgPp9sk1+NEvMabTXlWmoUjaShq8z+o7JsQ== nileeeem36@gmail.com + state: present + role: admin + - name: markus + ssh_keys: + - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDGvvjFxXqcdKn9kk8VHzm38R3nLWvvwP8W0e3uXxOgby/7LJZx2bosXCZ28FyPTYwVRezHE9lguKiaCo2kxqzNwwx64MzUFRH60sE5cYeH1IqjCBTY3Ht8hkZlYaVoRmsHiqiqogW+bJPo8PBO+ydCh53KUdJFEOXAYvKZ/RfDsWh7/SjeQrQzpRFNeb9keefX+uNNBbKRm9/AEWIHFCGJpDvpJcz3i8hKbRPtXi5OTcEx1Kr4iOMikGXvGzsC1u84qgiy5moeBzpWeROwyJOHRLqPqQ/IHvUkE4F1BXen02G69nHpFdmjTOcjBbT1RzGTeWZs+ehc/kJaS3dUMHd5rSPsimjiCKZ5+wCAyxc5gJlQof71IpHVN4ZDoetH4Lo2bnLdA1YX6DaVU1Fd/6rPWw02DA1OEIhrjJ3Gak87/HUYGNhpZVyIxyNYGXBMPkmHCHCjzjN7sPdMRvkbl5tahD2PoS4172tsO7YYMfAZ/UYYZw745CDxQYIjjfrFRn8= markuslaurila@MacBook-Pro.local + state: present + role: admin + - name: vmudryi + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINgMcsSBwTE0EbMDRSF1T4vJDcN/5HAjKGbi2DqV7g/Q vmudryi@opencrvs.org + state: present + role: admin + - name: cihan + ssh_keys: + - ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEEtz5M5hYKcUehDiCm84BplV+3t1ex8DPjIsMtQEWGv cihan.m.bebek@gmail.com + state: present + role: admin + + children: + master: + hosts: + # Replace master with value returned by command: hostname + master: + # Keep values (ansible_host, ansible_connection) as is + # Ansible is executed on master node + ansible_host: localhost + ansible_connection: local + labels: + # traefik-role label is used to identify where to deploy traefik + traefik-role: ingress + # By default all datastores are deployed to node with role data1 + role: data1 + + + + From 68f23cc37032ae8ad92f631839ee8f6da02a1c87 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 27 Jan 2026 18:16:25 +0200 Subject: [PATCH 34/40] testing --- environments/swarm-to-k8s/traefik/values.yaml | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/environments/swarm-to-k8s/traefik/values.yaml b/environments/swarm-to-k8s/traefik/values.yaml index 8f16cfe4..71dae97d 100644 --- a/environments/swarm-to-k8s/traefik/values.yaml +++ b/environments/swarm-to-k8s/traefik/values.yaml @@ -35,21 +35,19 @@ ports: hostPort: 80 protocol: TCP nodePort: 30080 - redirections: - entryPoint: - to: websecure - scheme: https - permanent: true + http: + redirections: + entryPoint: + to: websecure + scheme: https + permanent: true websecure: port: 8443 hostPort: 443 protocol: TCP nodePort: 30443 - # šŸ‘‡ Adjust this section at websecure entrypoint - tls: - enabled: true - certResolver: letsencrypt + # šŸ‘‡ Adjust this section if needed certificatesResolvers: From 0db0a969c26020deba1f2c8a8d181c76386e3893 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Tue, 27 Jan 2026 18:50:41 +0200 Subject: [PATCH 35/40] testing --- environments/swarm-to-k8s/opencrvs-services/values.yaml | 5 +++++ environments/swarm-to-k8s/traefik/values.yaml | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/environments/swarm-to-k8s/opencrvs-services/values.yaml b/environments/swarm-to-k8s/opencrvs-services/values.yaml index 39f50531..49e4221d 100644 --- a/environments/swarm-to-k8s/opencrvs-services/values.yaml +++ b/environments/swarm-to-k8s/opencrvs-services/values.yaml @@ -42,6 +42,11 @@ imagePullSecrets: - name: dockerhub-credentials countryconfig: + env: + OPENID_PROVIDER_CLAIMS: name,family_name,given_name,middle_name,birthdate,address + OPENID_PROVIDER_CLIENT_ID: mock-client_id + ESIGNET_REDIRECT_URL: https://esignet-mock.swarm-to-k8s.opencrvs.dev/authorize + MOSIP_API_USERINFO_URL: https://mosip-api.swarm-to-k8s.opencrvs.dev/esignet/get-oidp-user-info secrets: smtp-config: - ALERT_EMAIL diff --git a/environments/swarm-to-k8s/traefik/values.yaml b/environments/swarm-to-k8s/traefik/values.yaml index 71dae97d..51a3ba38 100644 --- a/environments/swarm-to-k8s/traefik/values.yaml +++ b/environments/swarm-to-k8s/traefik/values.yaml @@ -47,6 +47,10 @@ ports: hostPort: 443 protocol: TCP nodePort: 30443 + http: + tls: + enabled: true + certResolver: letsencrypt # šŸ‘‡ Adjust this section if needed From 84b80596b2fdb2e943c267d5d4408953f484040e Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 28 Jan 2026 11:35:05 +0200 Subject: [PATCH 36/40] testing --- .github/workflows/provision.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/provision.yml b/.github/workflows/provision.yml index 063a7bd0..67c915c3 100644 --- a/.github/workflows/provision.yml +++ b/.github/workflows/provision.yml @@ -93,5 +93,6 @@ jobs: # Add --verbose to get more output options: |- --inventory inventory/${{ inputs.environment }}.yml + --verbose ${{ inputs.tags != 'all' && format('--tags={0}', inputs.tags) || '' }} --extra-vars ""${{ steps.ansible-variables.outputs.EXTRA_VARS }}"" From b9bfa025ff118791521569a35e2b8657e43e5410 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 28 Jan 2026 11:45:34 +0200 Subject: [PATCH 37/40] testing --- infrastructure/server-setup/tasks/k8s/install-containerd.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/infrastructure/server-setup/tasks/k8s/install-containerd.yml b/infrastructure/server-setup/tasks/k8s/install-containerd.yml index 6f911c11..c73faf26 100644 --- a/infrastructure/server-setup/tasks/k8s/install-containerd.yml +++ b/infrastructure/server-setup/tasks/k8s/install-containerd.yml @@ -5,6 +5,10 @@ purge: true loop: - docker + - docker-ce + - docker-ce-cli + - docker-buildx-plugin + - docker-compose-plugin - docker-engine - docker.io - containerd From 684e78aaf7cf0bea87161cfc0a1f18f31b28d49f Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 28 Jan 2026 12:02:46 +0200 Subject: [PATCH 38/40] testing --- infrastructure/server-setup/tasks/k8s/install-containerd.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/infrastructure/server-setup/tasks/k8s/install-containerd.yml b/infrastructure/server-setup/tasks/k8s/install-containerd.yml index c73faf26..e1ffaa73 100644 --- a/infrastructure/server-setup/tasks/k8s/install-containerd.yml +++ b/infrastructure/server-setup/tasks/k8s/install-containerd.yml @@ -8,8 +8,10 @@ - docker-ce - docker-ce-cli - docker-buildx-plugin + - docker-ce-rootless-extras - docker-compose-plugin - docker-engine + - python3-docker - docker.io - containerd - runc From 4807b84606dba394a43be40179e953ab039aa0dc Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 28 Jan 2026 15:13:25 +0200 Subject: [PATCH 39/40] testing --- infrastructure/environments/swarm-to-k8s.ts | 10 +- infrastructure/environments/templates.ts | 108 +++--------------- .../inventory/inventory.template.yml | 46 +++++++- package.json | 3 +- yarn.lock | 37 ++++++ 5 files changed, 99 insertions(+), 105 deletions(-) diff --git a/infrastructure/environments/swarm-to-k8s.ts b/infrastructure/environments/swarm-to-k8s.ts index 8b00d54e..cfcbaf8e 100644 --- a/infrastructure/environments/swarm-to-k8s.ts +++ b/infrastructure/environments/swarm-to-k8s.ts @@ -30,13 +30,11 @@ import { generateInventory, copyChartsValues, extractAndModifyUsers, extractWork const users = extractAndModifyUsers(data); // console.log(users.forEach((u: any) => console.log(u.name))); let worker_nodes: string[] = []; + worker_nodes = extractWorkerNodes(data); + log(` āœ“ Worker nodes: ${worker_nodes.join(', ')}`); let backup_host = ''; - if (environment === 'production') { - worker_nodes = extractWorkerNodes(data); - log(` āœ“ Worker nodes: ${worker_nodes.join(', ')}`); - backup_host = extractBackupNode(data); - log(` āœ“ Backup host: ${backup_host}`); - } + backup_host = extractBackupNode(data); + log(` āœ“ Backup host: ${backup_host}`); generateInventory( environment, diff --git a/infrastructure/environments/templates.ts b/infrastructure/environments/templates.ts index 00f5cb56..4fad2d4c 100644 --- a/infrastructure/environments/templates.ts +++ b/infrastructure/environments/templates.ts @@ -2,7 +2,12 @@ import fs from "fs"; import path from "path"; import { log, success, warn } from './logger' import * as yaml from 'js-yaml'; +import Handlebars from 'handlebars'; +// Register a helper to increment numbers +Handlebars.registerHelper('data_label_idx', function(value) { + return parseInt(value) + 2; +}); export function readYamlFile(filePath: any): any { const fileContent = fs.readFileSync(filePath, "utf8"); @@ -10,26 +15,12 @@ export function readYamlFile(filePath: any): any { } -// Refactor with jsonpath +// Extract users from the old inventory export function extractAndModifyUsers(data: any): any { if (!data?.all?.vars?.users) { - throw new Error('Invalid YAML structure: missing all.vars.users'); + return { users: [] }; } - const users = data.all.vars.users.map((user: any) => { - if (user.sudoer === true) { - delete user.sudoer; - user.role = 'admin'; - } - return user; - }); - - // Return with "users:" key - return yaml.dump({ users }, { - indent: 2, - lineWidth: -1, - noRefs: true, - }); - + return data.all.vars.users; } export function dockerManagerFirst(data: any): string { @@ -46,7 +37,7 @@ export function dockerManagerFirst(data: any): string { export function extractBackupNode(data: any): string { if (!data?.['backups']?.hosts) { - throw new Error('Invalid YAML structure: missing backups.hosts'); + return ''; } const hosts = data['backups'].hosts; const backupHostEntry = Object.values(hosts) @@ -57,7 +48,7 @@ export function extractBackupNode(data: any): string { export function extractWorkerNodes(data: any): string[] { if (!data?.['docker-workers']?.hosts) { - throw new Error('Invalid YAML structure: missing docker-workers.hosts'); + return []; } const hosts = data['docker-workers'].hosts; const worker_hosts = Object.values(hosts) @@ -66,20 +57,6 @@ export function extractWorkerNodes(data: any): string[] { return worker_hosts; } -/** - * Replace placeholders in file content. - * Customize the replacements map to your needs. - */ -function replacePlaceholders(content: string, replacements: Record): string { - let updated = content; - for (const [key, value] of Object.entries(replacements)) { - const regex = new RegExp(`\\{\\{${key}\\}\\}`, "g"); // matches ${KEY} - let clear_value = String(value).replace(/[\x00-\x1F\x7F]/g, ""); // remove control characters - updated = updated.replace(regex, clear_value); - } - return updated; -} - /** * Copy charts-values directory into environments/ * @param env Environment name @@ -110,8 +87,8 @@ export function copyChartsValues(env: string, replacements: Record){ } let template = fs.readFileSync(templatePath, "utf-8"); - // Extract worker nodes and backup host from values - let worker_nodes = values['worker_nodes'].map((e: string) => String(e) - .replace(/[\x00-\x1F\x7F]/g, "")) - .filter((e: string) => e.length > 0); - - // Generate workers block - if (worker_nodes && worker_nodes.length > 0) { - let workersBlock = ` - # Workers section is optional, for single node cluster feel free to remove this section - # section can be added later - # more workers can be added later as well - workers: - hosts:`; - - worker_nodes.forEach((host: string, index: number) => { - workersBlock += ` - worker${index}: - ansible_host: ${host} - labels: - # Labels have index + 2 for backward compatibility with existing swarm setup - role: data${index + 2} -`; - }); - - template = template.replace('{{WORKERS_BLOCK}}', workersBlock); - } else { - // No worker nodes, remove the placeholder - template = template.replace('{{WORKERS_BLOCK}}', ''); - } - - if (values['users']) { - const indentedYaml = values['users'].split('\n') - .filter((line: string) => line.trim().length > 0) - .map((line: string) => ' ' + line) // indent each line with 4 spaces - .join('\n'); - template = template.replace('{{USERS_BLOCK}}', indentedYaml); - } else { - template = template.replace('{{USERS_BLOCK}}', ' users: []'); - } - // Generate backup block if backup_host is provided - const backupHost = String(values['backup_host']).replace(/[\x00-\x1F\x7F]/g, ""); - let backupBlock = ''; - if (backupHost.length > 0) { - backupBlock = ` - # backup section is optional, feel free to remove if backups are not enabled - # section can be added later - backup: - hosts: - backup1: - ansible_host: ${backupHost} -`; - } - template = template.replace('{{BACKUP_BLOCK}}', backupBlock); + const tpl = Handlebars.compile(template); + values['single_node'] = (values['worker_nodes'].length > 0 || values['backup_host']) ? "false" : "true"; + console.log(values); + const updated = tpl(values); - // Determine if single-node or multi-node - values['single_node'] = (worker_nodes.length > 0 || backupHost) ? "false" : "true"; - const updated = replacePlaceholders(template, values); - values fs.mkdirSync(path.dirname(outputPath), { recursive: true }); fs.writeFileSync(outputPath, updated); log(`\nāœ… Generated inventory file at ${outputPath}\n`); diff --git a/infrastructure/environments/templates/inventory/inventory.template.yml b/infrastructure/environments/templates/inventory/inventory.template.yml index bc2e9a30..fa06b05c 100644 --- a/infrastructure/environments/templates/inventory/inventory.template.yml +++ b/infrastructure/environments/templates/inventory/inventory.template.yml @@ -37,7 +37,24 @@ all: # - present: user is allowed to login # - absent: user account is disabled # users: [] -{{USERS_BLOCK}} + {{#if users}} + users: + {{#each users as |user|}} + - name: {{user.name}} + ssh_keys: + {{#each user.ssh_keys as |key| }} + - {{key}} + {{/each}} + state: {{user.state}} + {{#if user.sudoer}} + role: admin + {{else}} + role: operator + {{/if}} + {{/each}} + {{else}} + users: [] + {{/if}} children: master: @@ -53,8 +70,25 @@ all: traefik-role: ingress # By default all datastores are deployed to node with role data1 role: data1 - -{{WORKERS_BLOCK}} - - -{{BACKUP_BLOCK}} \ No newline at end of file + {{#if worker_nodes}} + # Workers section is optional, for single node cluster feel free to remove this section + # section can be added later + # more workers can be added later as well + workers: + hosts: + {{#each worker_nodes as |host idx|}} + worker{{idx}}: + ansible_host: {{host}} + labels: + # Labels have index + 2 for backward compatibility with existing swarm setup + role: data{{data_label_idx idx}} + {{/each}} + {{/if}} + {{#if backup_host}} + # backup section is optional, feel free to remove if backups are not enabled + # section can be added later + backup: + hosts: + backup0: + ansible_host: {{backup_host}} + {{/if}} diff --git a/package.json b/package.json index b383ca1a..22353718 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,8 @@ "ts-node": "^10.9.1", "typescript": "^5.1.6", "js-yaml": "4.1.0", - "glob": "11.0.3" + "glob": "11.0.3", + "handlebars": "^4.7.8" }, "dependencies": { "@types/node": "^24.0.0", diff --git a/yarn.lock b/yarn.lock index 34f2ff28..d6af4c7e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -338,6 +338,18 @@ glob@11.0.3: package-json-from-dist "^1.0.0" path-scurry "^2.0.0" +handlebars@^4.7.8: + version "4.7.8" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.8.tgz#41c42c18b1be2365439188c77c6afae71c0cd9e9" + integrity sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.2" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + husky@9.1.7: version "9.1.7" resolved "https://registry.yarnpkg.com/husky/-/husky-9.1.7.tgz#d46a38035d101b46a70456a850ff4201344c0b2d" @@ -411,6 +423,11 @@ minimatch@^10.0.3: dependencies: "@isaacs/brace-expansion" "^5.0.0" +minimist@^1.2.5: + version "1.2.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + minipass@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" @@ -421,6 +438,11 @@ nan@^2.19.0, nan@^2.23.0: resolved "https://registry.yarnpkg.com/nan/-/nan-2.24.0.tgz#a8919b36e692aa5b260831910e4f81419fc0a283" integrity sha512-Vpf9qnVW1RaDkoNKFUvfxqAbtI8ncb8OJlqZ9wwpXzWPEsvsB1nvdUi6oYrHIkQ1Y/tMDnr1h4nczS0VB9Xykg== +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + node-fetch@^2.6.7: version "2.7.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" @@ -488,6 +510,11 @@ sisteransi@^1.0.5: resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== +source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + ssh2@^1.17.0: version "1.17.0" resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.17.0.tgz#dc686e8e3abdbd4ad95d46fa139615903c12258c" @@ -581,6 +608,11 @@ typescript@^5.1.6: resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.2.tgz#d93450cddec5154a2d5cabe3b8102b83316fb2a6" integrity sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A== +uglify-js@^3.1.4: + version "3.19.3" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.19.3.tgz#82315e9bbc6f2b25888858acd1fff8441035b77f" + integrity sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ== + undici-types@~5.26.4: version "5.26.5" resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" @@ -626,6 +658,11 @@ which@^2.0.1: dependencies: isexe "^2.0.0" +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" From 3a2bd3e9e6a276bf778a8c21f44ef886b2d85049 Mon Sep 17 00:00:00 2001 From: Vadym Mudryi Date: Wed, 28 Jan 2026 15:25:17 +0200 Subject: [PATCH 40/40] testing --- .github/workflows/import-config.yml | 106 ---------------------------- 1 file changed, 106 deletions(-) delete mode 100644 .github/workflows/import-config.yml diff --git a/.github/workflows/import-config.yml b/.github/workflows/import-config.yml deleted file mode 100644 index 1bc82bac..00000000 --- a/.github/workflows/import-config.yml +++ /dev/null @@ -1,106 +0,0 @@ -name: Import Configuration -run-name: Import Configuration - -on: - repository_dispatch: - types: [import-config] - workflow_dispatch: - inputs: - environment: - description: 'Target environment (leave empty for repository level)' - required: false - type: string - default: '' - data_type: - description: 'Type of data to import' - required: false - type: choice - default: secret - options: - - secrets - - variables - data: - description: 'Base64 encoded JSON data (KEY=base64value format, one per line)' - required: true - type: string - secrets: - data: - required: true -jobs: - import: - runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} - DATA: ${{ inputs.data || github.event.client_payload.data }} - ENVIRONMENT: ${{ inputs.environment || github.event.client_payload.environment }} - REPO: ${{ github.repository }} - steps: - - name: Debug input data - run: | - echo "data=${DATA}" >> $GITHUB_OUTPUT - echo "::add-mask::${DATA}" - NUMBER=$(echo "$DATA" | base64 -d | wc -l) - { - echo "# šŸ“¦ Received configuration:" - echo "" - echo "## Summary" - echo "- šŸŒ Scope/Environment: **${ENVIRONMENT:-repository}**" - echo "- šŸ“¦ Data type: **${DATA_TYPE}**" - echo "- šŸ”¢ Items count: **${NUMBER}**" - echo "" - echo "## Data keys:" - echo "$DATA" | base64 -d | cut -d\= -f1 | sed 's/^/ - /g' - } >> $GITHUB_STEP_SUMMARY - import-secrets: - needs: import - if: inputs.data_type == 'secrets' || github.event.client_payload.data_type == 'secrets' - runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} - DATA: ${{ inputs.data || github.event.client_payload.data }} - ENVIRONMENT: ${{ inputs.environment || github.event.client_payload.environment }} - REPO: ${{ github.repository }} - steps: - - name: Import secrets - run: | - echo "šŸ” Importing secrets to ${ENVIRONMENT:-repository level}..." - echo "$DATA" | base64 -d > items.env - - while IFS='=' read -r KEY VALUE; do - [ -z "$KEY" ] && continue - DECODED_VALUE=$(echo "$VALUE" | base64 -d) - echo " āœ“ Setting secret: $KEY" - - if [ -z "$ENVIRONMENT" ]; then - echo "$DECODED_VALUE" | gh secret set "$KEY" --repo "$REPO" - else - echo "$DECODED_VALUE" | gh secret set "$KEY" --env "$ENVIRONMENT" --repo "$REPO" - fi - done < items.env - - echo "āœ… Secrets imported successfully!" - import-variables: - needs: import - if: needs.import.outputs.data_type == 'variables' - runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} - ENVIRONMENT: ${{ needs.import.outputs.environment }} - DATA: ${{ needs.import.outputs.data }} - REPO: ${{ github.repository }} - steps: - - name: Import variables - run: | - echo "šŸ“Š Importing variables to ${ENVIRONMENT:-repository level}..." - echo "$DATA" | base64 -d > items.env - # Import each variable - while IFS='=' read -r KEY VALUE; do - [ -z "$KEY" ] && continue - DECODED_VALUE=$(echo "$VALUE" | base64 -d) - echo " āœ“ Setting variable: $KEY" - if [ -z "$ENVIRONMENT" ]; then - gh variable set "$KEY" --body "$DECODED_VALUE" --repo "$REPO" - else - gh variable set "$KEY" --body "$DECODED_VALUE" --env "$ENVIRONMENT" --repo "$REPO" - fi - done < items.env