Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
8bb3cf4
Bumped versions of base image dependencies (#365)
nicksantamaria Jul 28, 2025
e97cc1d
Adds workflow file to sync 6.x to 7.x (#368)
nicksantamaria Jul 28, 2025
dc77275
Added aws-es-proxy base image (#370)
nicksantamaria Aug 1, 2025
d2dc51a
Added required label to aws-es-proxy image
nicksantamaria Aug 1, 2025
cbd91bb
Fixed issue with aws-es-proxy with various proxy flags envvar combina…
nicksantamaria Aug 1, 2025
f6fe319
Updated aws-es-proxy port default from 3000 to 9200 (#372)
nicksantamaria Aug 3, 2025
672dcce
SDPPE-42: removed BAY_INGRESS_* environment variables and related log…
aparador Aug 5, 2025
44e5985
[SD-1147] Removed the colour indicator from bay.
MdNadimHossain Aug 12, 2025
87d4c03
[SDPPE-124] Fix 'aws es proxy not found' (#377)
GROwen Aug 12, 2025
4b83b6c
Revert "[SDPPE-124] Fix 'aws es proxy not found' (#377)"
GROwen Aug 13, 2025
507cabf
[SDPPE-127] Pinned version of EE community.general collection (#380)
GROwen Aug 13, 2025
185e000
SDPPE-124: Update branch-sync workflow resilient to cherry-pick confl…
aparador Aug 14, 2025
a9f8cc9
SDPPE-124: Update branch-sync workflow resilient to cherry-pick confl…
aparador Aug 14, 2025
2619b66
SDPPE-124: use merge strategy to prefer 7.x on conflicts
aparador Aug 15, 2025
f5fc9ee
SDPPE-123: Enable build and scan workflows for 7.x branch
aparador Aug 14, 2025
729f525
SDPPE-123: Align tagging with support for 5.x and set 7x schedule sca…
aparador Aug 15, 2025
8155445
Merge pull request #376 from dpc-sdp/bug/SD-1147-fixing-one-environme…
MdNadimHossain Aug 18, 2025
733f7d7
Update lagoon.api collection version to 2.2.0 (#384)
Randominty Aug 19, 2025
62bd90b
Merge pull request #383 from dpc-sdp/feature/SDPPE-123-7x-workflows
aparador Aug 22, 2025
539b104
Merge pull request #382 from dpc-sdp/feature/sdppe-124
aparador Aug 22, 2025
6c52786
[SD-1322] Prevent scheduled_transition crashes caused by missing node…
vincent-gao Oct 7, 2025
d97e550
Merge pull request #385 from dpc-sdp/SD-1322-add-scheduled_transition…
vincent-gao Oct 8, 2025
0f4ea22
[SDPPE-126] Added opensearch image (#379)
nicksantamaria Oct 10, 2025
290249e
[SDPPE-169] Removed deprecated class from awx-ee (#386)
GROwen Oct 12, 2025
681d671
Update aws-es-proxy go packages (#387)
nicksantamaria Oct 16, 2025
abb484e
[SDPPE-167] Removed install steps for Hub cli.
GROwen Oct 17, 2025
9e5d08c
Removed redundant opensearch config file. (#389)
nicksantamaria Oct 17, 2025
ec5197a
[SDPPE-121] Added support for BAY_OPENSEARCH_ environment variables a…
nicksantamaria Oct 28, 2025
ee204a1
[SD-1111] Update prefix for data pipelines
vincent-gao Oct 28, 2025
9fffed9
Merge pull request #390 from dpc-sdp/SD-1111-update-search-listing-pr…
vincent-gao Oct 29, 2025
f011cd4
Backport GHA 7.x branch sync updates.
GROwen Nov 17, 2025
2c1d57e
Revert "Backport GHA 7.x branch sync updates."
GROwen Nov 17, 2025
f29fa48
[SDPPE-167] Refactored gh cli install.
GROwen Nov 18, 2025
29ddf43
[SDPPE-167] Fixed syntax error.
GROwen Nov 18, 2025
2ce6bcf
Merge pull request #388 from dpc-sdp/feature/SDPPE-167--remove-hub-cl…
rashed-k Nov 19, 2025
1a3a455
[SD-1247] Fix branch sync (#401)
GROwen Nov 24, 2025
677d85e
[hotfix] Fixed syntax error in GHA workflow.
GROwen Nov 25, 2025
dc2754d
Merge remote-tracking branch 'origin/7.x' into feature/pr-402-fix-con…
GROwen Nov 25, 2025
330438c
Reverted php version upgrade for 6.x
GROwen Nov 25, 2025
43ad68a
Revert "Reverted php version upgrade for 6.x"
GROwen Nov 25, 2025
9b98119
Reverted php version upgrade for 7.x
GROwen Nov 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 68 additions & 34 deletions .github/workflows/branch-sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,56 +13,90 @@ permissions:
jobs:
create-pull-request:
runs-on: ubuntu-latest
container: ghcr.io/dpc-sdp/bay/ci-builder:6.x
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v5
with:
fetch-depth: 0
ref: 6.x

- name: Set ownership of the workspace
run: chown -R $(id -u):$(id -g) $PWD

- name: Configure git
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"

- name: Prepare sync branch
- name: Create PR
id: create-pr
run: |
# Fetch all branches
git fetch origin
set +e
output=$(gh pr create --base 7.x --title "Automated PR - merge 6.x into 7.x" --body "This pull request syncs relevant changes from the 6.x branch into the 7.x branch. This PR was automatically generated by the CI workflow." --label "sync" --label "automated" 2>&1)
COMMAND_STATUS=$?

diff=$(echo $output | grep -q "No commits" && echo "false" || echo "true")

# Create a new sync branch from 7.x (target)
git checkout -b temp-sync-branch origin/7.x
echo "stdout<<EOF" | tee "$GITHUB_OUTPUT"
echo $output | tee -a "$GITHUB_OUTPUT"
( echo ; echo "EOF" ) | tee -a "$GITHUB_OUTPUT"

# Find all 6.x commits not on 7.x and cherry-pick them
COMMITS=$(git log --reverse --pretty=format:"%H" origin/7.x..origin/6.x)
if [ -z "$COMMITS" ]; then
echo "NO_CHANGES=true" >> "${GITHUB_ENV}"
exit 0
if [[ $diff == "false" ]]; then
echo "There are no commits between the base and HEAD branches." >> $GITHUB_STEP_SUMMARY
COMMAND_STATUS=0
fi

for COMMIT in $COMMITS; do
git log -1 --format=%s "${COMMIT}"
git cherry-pick "${COMMIT}" || { echo "Cherry-pick failed"; exit 1; }
done
echo "diff=$diff" >> "$GITHUB_OUTPUT"

- name: Create a pull request from sync branch to 7.x
if: env.NO_CHANGES != 'true'
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.GITHUB_TOKEN }}
base: 7.x
branch: temp-sync-branch
title: "Sync changes from 6.x"
body: |
This pull request syncs relevant changes from the `6.x` branch into the `7.x` branch.

This PR was automatically generated by the CI workflow.
labels: sync, automated
draft: false
sign-commits: true
exit $COMMAND_STATUS
shell: bash
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: If successful set PR number to env var
if: steps.create-pr.conclusion == 'success' && steps.create-pr.outputs.diff == 'true'
run: |
output="${{ steps.create-pr.outputs.stdout }}"
pr_url=$(echo $output | grep -o "^https.*[0-9]")
pr=$(echo $pr_url | cut -d "/" -f 7)

echo "PR_URL=$pr_url" >> GITHUB_ENV
echo "PR_NUMBER=$pr" >> $GITHUB_ENV

- name: Enable auto-merge
if: env.NO_CHANGES != 'true'
- name: Successful, check for conflicts
if: env.PR_NUMBER
run: |
gh pr merge temp-sync-branch --auto --merge
conflicts=$(gh pr view ${{ env.PR_NUMBER }} --json mergeStateStatus --jq 'if .mergeStateStatus == "DIRTY" then true else false end')
echo "CONFLICTS=$conflicts" >> $GITHUB_ENV
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Send Slack notification if pr created but conflicts exist
id: notify_slack_conflicts
uses: slackapi/slack-github-action@v2.1.1
if: always() && env.CONFLICTS
env:
LINK: ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}
with:
webhook: ${{ secrets.SLACK_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
text: "GitHub Action - ${{ github.workflow }} requires manual intervention. \n${{ env.LINK }}"
blocks:
- type: "section"
text:
type: "mrkdwn"
text: "${{ github.workflow }} needs conflicts resolved.\n${{ env.PR_URL }}"

- name: Send Slack notification if PR creation failed for a reason other than "no commits"
id: notify_slack_failed
uses: slackapi/slack-github-action@v2.1.1
if: always() && (steps.create-pr.conclusion == 'failure' && steps.create-pr.outputs.diff == 'true')
env:
LINK: ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}
with:
webhook: ${{ secrets.SLACK_WEBHOOK_URL }}
webhook-type: incoming-webhook
payload: |
channel: ${{ secrets.SLACK_CHANNEL_ID }}
text: "GitHub Action ${{ github.workflow }} failed. \n${{ env.LINK }}"
9 changes: 8 additions & 1 deletion gh-actions-bake.hcl
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,16 @@ target "ci-builder" {

platforms = ["linux/amd64", "linux/arm64"]
}
target "opensearch" {
inherits = ["docker-metadata-action"]
context = "${CONTEXT}/opensearch"
dockerfile = "Dockerfile"

platforms = ["linux/amd64", "linux/arm64"]
}
target "elasticsearch" {
inherits = ["docker-metadata-action"]
context = "${CONTEXT}/elasticsearch"
context = "${CONTEXT}/opensearch"
dockerfile = "Dockerfile"

platforms = ["linux/amd64", "linux/arm64"]
Expand Down
1 change: 1 addition & 0 deletions images/aws-es-proxy/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ RUN apk add --no-cache git
RUN git clone https://github.com/abutaha/aws-es-proxy.git /go/src/github.com/abutaha/aws-es-proxy
WORKDIR /go/src/github.com/abutaha/aws-es-proxy

RUN go get -u && go mod tidy
RUN CGO_ENABLED=0 GOOS=linux go build -o aws-es-proxy

FROM alpine:latest
Expand Down
3 changes: 0 additions & 3 deletions images/awx-ee/execution-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ additional_build_steps:
- ARG NVM_DIR="/runner/.nvm"
- ARG PHP_VERSION="8.4"
- ARG COMPOSER_VERSION="2.7.7"
- ARG HUB_VERSION="2.14.2"
- ARG GOJQ_VERSION="0.12.17"
- ARG HELM_VERSION="3.18.3"
- ARG YAMLFMT_VERSION="0.17.2"
Expand Down Expand Up @@ -67,8 +66,6 @@ additional_build_steps:

- RUN curl -L "https://github.com/uselagoon/lagoon-cli/releases/download/${LAGOON_CLI_VERSION}/lagoon-cli-${LAGOON_CLI_VERSION}-linux-amd64" -o /usr/local/bin/lagoon
- RUN chmod +x /usr/local/bin/lagoon
- RUN curl -L "https://github.com/github/hub/releases/download/v${HUB_VERSION}/hub-linux-amd64-${HUB_VERSION}.tgz" -o /tmp/hub && tar -xvf /tmp/hub -C /tmp && mv /tmp/hub-linux-amd64-${HUB_VERSION}/bin/hub /usr/local/bin
- RUN chmod +x /usr/local/bin/hub
- RUN lagoon config feature --enable-local-dir-check false --force
- RUN curl -sS "https://getcomposer.org/download/${COMPOSER_VERSION}/composer.phar" --output composer.phar
- RUN chmod +x composer.phar
Expand Down
5 changes: 3 additions & 2 deletions images/awx-ee/requirements.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ collections:
- ansible.posix
- ansible.utils
- awx.awx
- community.general
- name: community.general
version: "11.1.2"
- kubernetes.core
- name: lagoon.api
source: https://github.com/salsadigitalauorg/lagoon_ansible_collection.git
version: 2.1.0
version: "2.2.4"
type: git
- name: section.api
source: https://github.com/salsadigitalauorg/section_ansible_collection.git
Expand Down
8 changes: 1 addition & 7 deletions images/ci-builder/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ FROM ghcr.io/dpc-sdp/sumocli:v0.11.1 AS sumocli
FROM php:8.4-cli-alpine
ARG AHOY_VERSION=2.4.0
ARG GOJQ_VERSION=0.12.17
ARG HUB_VERSION=2.14.2
ARG LAGOON_CLI_VERSION=0.32.0
ARG SHIPSHAPE_VERSION=1.0.0-alpha.1.5.1

Expand All @@ -26,6 +25,7 @@ RUN apk add --update --no-cache \
docker \
docker-compose \
git \
github-cli \
libffi-dev \
musl-dev \
ncurses \
Expand All @@ -37,12 +37,6 @@ RUN apk add --update --no-cache \
python3 \
python3-dev

## Install GitHub CLI tool.
RUN curl -sL "https://github.com/mislav/hub/releases/download/v${HUB_VERSION}/hub-$(echo ${TARGETPLATFORM:-linux/amd64} | tr '/' '-')-${HUB_VERSION}.tgz" -o /tmp/hub.tgz && \
tar -C /tmp -xzvf /tmp/hub.tgz && \
mv /tmp/hub-$(echo ${TARGETPLATFORM:-linux/amd64} | tr '/' '-')-${HUB_VERSION}/bin/hub /usr/local/bin && \
chmod 755 /usr/local/bin

## Install required PHP extensions for Drupal and python packages.
RUN apk add --no-cache \
py3-flake8 \
Expand Down
6 changes: 0 additions & 6 deletions images/elasticsearch/Dockerfile

This file was deleted.

7 changes: 7 additions & 0 deletions images/opensearch/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
FROM uselagoon/opensearch-2:latest

RUN for plugin in \
analysis-kuromoji \
analysis-icu; do \
/usr/share/opensearch/bin/opensearch-plugin install $plugin; \
done
1 change: 1 addition & 0 deletions images/php/mtk/drupal.conf
Original file line number Diff line number Diff line change
Expand Up @@ -63,3 +63,4 @@ nodata:
- router
- sessions
- webform_*
- scheduled_transition*
67 changes: 45 additions & 22 deletions images/php/settings.php
Original file line number Diff line number Diff line change
Expand Up @@ -314,37 +314,60 @@
$config['clamav.settings']['mode_daemon_tcpip']['hostname'] = $clamav_host;
$config['clamav.settings']['mode_daemon_tcpip']['port'] = $clamav_port;

// Configure elasticsearch connections from environment variables.
if (getenv('SEARCH_HASH') && getenv('SEARCH_URL')) {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['url'] = sprintf('http://%s.%s', getenv('SEARCH_HASH'), getenv('SEARCH_URL'));
$opensearch_profile = getenv('BAY_OPENSEARCH_PROFILE') ?: 'sdp-elastic';
if ($opensearch_profile == 'sdp-elastic') {
// Configuration for legacy sdp-elastic integration.
// @todo remove this sdp-elastic block when all applications migrated to opensearch.
if (getenv('SEARCH_HASH') && getenv('SEARCH_URL')) {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['url'] = sprintf('http://%s.%s', getenv('SEARCH_HASH'), getenv('SEARCH_URL'));
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['url'] = "http://elasticsearch:9200";
}

if (getenv('SEARCH_INDEX')) {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['rewrite_index'] = 1;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['index'] = [
'prefix' => getenv('SEARCH_INDEX'),
'suffix' => '',
];
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['index'] = [
'prefix' => 'elasticsearch_index_default_',
'suffix' => '',
];
}

if (getenv('SEARCH_AUTH_USERNAME') && getenv('SEARCH_AUTH_PASSWORD')) {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['username'] = getenv('SEARCH_AUTH_USERNAME');
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['password'] = getenv('SEARCH_AUTH_PASSWORD');
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['use_authentication'] = 1;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['authentication_type'] = 'Basic';
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['use_authentication'] = 0;
}
// Override data_pipelines url.
$config['data_pipelines.dataset_destination.sdp_elasticsearch']['destinationSettings']['url'] = (getenv('SEARCH_HASH') && getenv('SEARCH_URL')) ? sprintf('http://%s.%s', getenv('SEARCH_HASH'), getenv('SEARCH_URL')) : "http://elasticsearch:9200";
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['url'] = "http://elasticsearch:9200";
}
// Configuration for bay opensearch integration.

// Connect to a proxy service that handles AWS IAM auth.
$endpoint = "http://aws-es-proxy:9200";

if (getenv('SEARCH_INDEX')) {
// Default index prefix looks like "${PROJECT}__${ENVIRONMENT}__". This can be overridden with BAY_OPENSEARCH_PREFIX.
$environment = getenv('LAGOON_ENVIRONMENT') ?: 'default';
$index_prefix = getenv('BAY_OPENSEARCH_PREFIX') ?: sprintf('%s__%s', getenv('LAGOON_PROJECT'), $environment);
$config['elasticsearch_connector.cluster.elasticsearch_bay']['url'] = $endpoint;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['use_authentication'] = FALSE;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['rewrite_index'] = 1;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['index'] = [
'prefix' => getenv('SEARCH_INDEX'),
'suffix' => '',
];
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['rewrite']['index'] = [
'prefix' => 'elasticsearch_index_default_',
'prefix' => sprintf('%s__%s_', $index_prefix, "sapi"),
'suffix' => '',
];
$config['data_pipelines.dataset_destination.sdp_elasticsearch']['destinationSettings']['url'] = $endpoint;
$config['data_pipelines.dataset_destination.sdp_elasticsearch']['destinationSettings']['prefix'] = sprintf('%s__sdp_data_pipelines_', $index_prefix);
}

if (getenv('SEARCH_AUTH_USERNAME') && getenv('SEARCH_AUTH_PASSWORD')) {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['username'] = getenv('SEARCH_AUTH_USERNAME');
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['password'] = getenv('SEARCH_AUTH_PASSWORD');
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['use_authentication'] = 1;
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['authentication_type'] = 'Basic';
} else {
$config['elasticsearch_connector.cluster.elasticsearch_bay']['options']['use_authentication'] = 0;
}

// Override data_pipelines url.
$config['data_pipelines.dataset_destination.sdp_elasticsearch']['destinationSettings']['url'] = (getenv('SEARCH_HASH') && getenv('SEARCH_URL')) ? sprintf('http://%s.%s', getenv('SEARCH_HASH'), getenv('SEARCH_URL')) : "http://elasticsearch:9200";

// Configure tide_logs.
if (getenv('TIDE_LOGS_UDPLOG_HOST')) {
Expand Down
Loading