diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index 0f3007e9a..000000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -# Dockerfile to support rebuild container in codespaces -FROM aztfmod/rover-preview:1.4.2-2303.221435 - -RUN ln -s -f /tf/caf /workspaces/terraform-azurerm-caf - -WORKDIR /tf/caf diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index f4a5c095e..0638f9ff5 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,36 +1,64 @@ { - "name": "Azure CAF rover", - // Update the 'dockerComposeFile' list if you have more compose files or use different names. - "dockerComposeFile": "docker-compose.yml", + "name": "Azure SRE rover", + // Container user to use in VSCode Online and GitHub Codespaces + "image": "mcr.microsoft.com/devcontainers/base:jammy", "remoteUser": "vscode", - // The 'service' property is the name of the service for the container that VS Code should - // use. Update this value and .devcontainer/docker-compose.yml to the real service name. - "service": "rover", - // The optional 'workspaceFolder' property is the path VS Code should open by default when - // connected. This is typically a volume mount in .devcontainer/docker-compose.yml - "workspaceFolder": "/tf/caf", - // Use 'settings' to set *default* container specific settings.json values on container create. - // You can edit these settings after create using File > Preferences > Settings > Remote. - "settings": { - "files.eol": "\n", - "terminal.integrated.defaultProfile.linux": "zsh", - "editor.tabSize": 2, - "terminal.integrated.scrollback": 64000, + + // Volume mounts for the container. + "mounts": [ + "source=volume-caf-vscode,target=/home/vscode", + "source=volume-caf-vscode-bashhistory,target=/commandhistory", + "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind", + "source=${localEnv:HOME}${localEnv:USERPROFILE}/.ssh,target=/home/vscode/.ssh,type=bind,consistency=cached" + ], + + // Run args for the container. + "runArgs": [ + "--cap-add=SYS_PTRACE", + "--security-opt", + "seccomp=unconfined", + "--init", + "--network=host" + ], + + // Adding features to the container. + "features": { + "ghcr.io/azure/azure-dev/azd:latest": {}, + "ghcr.io/devcontainers/features/azure-cli:1": {}, + "ghcr.io/devcontainers-contrib/features/act:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers-contrib/features/zsh-plugins:0": { + "plugins": [ + "aliases syntax-highlighting autosuggestions" + ], + "username": [ + "vscode" + ] + } }, - // Uncomment the next line if you want start specific services in your Docker Compose config. - // "runServices": [], - // Uncomment this like if you want to keep your containers running after VS Code shuts down. - // "shutdownAction": "none", + + // Workspace mounts and environment variables can be added here. + "workspaceMount": "source=${localWorkspaceFolder},target=/tf/caf,type=bind", + "workspaceFolder": "/tf/caf", + + // Uncomment the next line to run commands after the container is created. - //"postCreateCommand": "cp -R /tmp/.ssh-localhost/* ~/.ssh && sudo chmod 600 ~/.ssh/* && sudo chown -R $(whoami) /tf/caf && git config --global core.editor vim && pre-commit install && pre-commit autoupdate", + // "postCreateCommand" : "sudo chmod 400 ~/.ssh/* && git config --global core.editor vi && echo 'export PATH=$HOME/.tfenv/tfenv-3.0.0/bin:$PATH' >> ~/.zshrc", "postCreateCommand": "sudo cp -R /tmp/.ssh-localhost/* ~/.ssh && sudo chown -R $(whoami):$(whoami) /tf/caf ~/.ssh && sudo chmod 400 ~/.ssh/* && git config --global core.editor vi && pre-commit install && pre-commit autoupdate", "postStartCommand": "sudo chmod 666 /var/run/docker.sock", - // Add the IDs of extensions you want installed when the container is created in the array below. - "extensions": [ - "4ops.terraform", - "mutantdino.resourcemonitor", - "eamodio.gitlens", - "golang.go" - ] + // "postCreateCommand": "sudo cp -R /tmp/.ssh-localhost/* ~/.ssh && sudo chown -R $(whoami):$(whoami) /tf/caf ~/.ssh && sudo chmod 400 ~/.ssh/* && git config --global core.editor vi && pre-commit install && pre-commit autoupdate", + + + // Adding the extensions you in the container. + "customizations": { + "vscode": { + "extensions": [ + "mutantdino.resourcemonitor", + "eamodio.gitlens", + "GitHub.copilot" + ] + } + } + } diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml deleted file mode 100644 index 80756f724..000000000 --- a/.devcontainer/docker-compose.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- -#------------------------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. -#------------------------------------------------------------------------------------------------------------- - -version: "3.7" -services: - rover: - image: aztfmod/rover:1.7.4-2402.2908 - user: vscode - - labels: - - "caf=Azure CAF" - - volumes: - - ..:/tf/caf - - volume-caf-vscode:/home/vscode - - volume-caf-vscode-bashhistory:/commandhistory - - ~/.ssh:/tmp/.ssh-localhost:ro - - - /var/run/docker.sock:/var/run/docker.sock - - environment: - - HISTFILE=/commandhistory/.bash_history - - HISTSIZE=1000 - - # Overrides default command so things don't shut down after the process ends. - command: /bin/sh -c "while sleep 1000; do :; done" - - # rover_ssh: - # image: aztfmod/rover:1.1.6-2202.2503 - # user: vscode - - # labels: - # - "caf=Azure CAF" - - # volumes: - # - ..:/tf/caf - # - volume-caf-vscode:/home/vscode - # - volume-caf-vscode-bashhistory:/commandhistory - # - ~/.ssh:/tmp/.ssh-localhost:ro - - # - /var/run/docker.sock:/var/run/docker.sock - - # environment: - # - HISTFILE=/commandhistory/.bash_history - # - HISTSIZE=1000 - -volumes: - volume-caf-vscode: - labels: - - "caf=Azure CAF" - volume-caf-vscode-bashhistory: diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 859978348..c874ece4e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,6 +11,10 @@ updates: - 1.15.8 - 1.16.0 - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" +- package-ecosystem: "devcontainers" directory: "/" schedule: interval: "daily" \ No newline at end of file diff --git a/.github/workflows/rover-preview.yml b/.github/workflows/rover-preview.yml.disabled similarity index 100% rename from .github/workflows/rover-preview.yml rename to .github/workflows/rover-preview.yml.disabled diff --git a/.github/workflows/rover.yml b/.github/workflows/rover.yml index 38b8fd3f4..8d368d1c7 100644 --- a/.github/workflows/rover.yml +++ b/.github/workflows/rover.yml @@ -17,13 +17,13 @@ jobs: with: username: aztfmod password: ${{ secrets.docker_registry_password }} - # - name: Cache Docker layers - # uses: actions/cache@v2 - # with: - # path: /tmp/.buildx-cache - # key: ${{ runner.os }}-buildx-${{ github.sha }} - # restore-keys: | - # ${{ runner.os }}-buildx- + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- - name: Build the rover run: | set -e diff --git a/.github/workflows/roverlight-build.yml b/.github/workflows/roverlight-build.yml new file mode 100644 index 000000000..2d77194b6 --- /dev/null +++ b/.github/workflows/roverlight-build.yml @@ -0,0 +1,172 @@ +name: Roverlight-Build-Release +# Workflow for building and scanning roverlight image + +on: + workflow_dispatch: + release: + types: [created] + push: + branches: + - 'main' + - 'roverlight' + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + security-events: write # Required for SARIF upload + outputs: + tag: ${{ steps.tag.outputs.date }} + strategy: + matrix: + platform: [linux/amd64, linux/arm64] + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Cache Docker layers + uses: actions/cache@v4 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ matrix.platform }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx-${{ matrix.platform }}- + ${{ runner.os }}-buildx- + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Generate tags + id: tag + run: | + echo "date=$(date +'%g%m.%d%H%M')" >> $GITHUB_OUTPUT + echo "tag=$(date +'%g%m.%d%H%M')" >> $GITHUB_ENV + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} + + - name: Get build start time + id: build-start-time + run: echo "build_start_time=$(date +%s)" >> $GITHUB_OUTPUT + + - name: Building roverlight + id: push + run: | + # Build roverlight only + TARGETARCH=$(echo ${{ matrix.platform }} | cut -d'/' -f2) && \ + docker buildx bake \ + -f docker-bake.hcl \ + -f docker-bake.override.hcl \ + --set *.platform=${{ matrix.platform }} \ + --set *.args.versionRover=${{ steps.tag.outputs.date }} \ + --set *.args.TARGETARCH=$TARGETARCH \ + --set *.cache-from=type=gha \ + --set *.cache-to=type=gha,mode=max \ + --set *.tags=ghcr.io/${{ github.repository }}/roverlight:${{ steps.tag.outputs.date }} \ + --push \ + roverlight + + - name: Calculate build duration + id: build-time + run: | + end_time=$(date +%s) + start_time=${{ steps.build-start-time.outputs.build_start_time }} + duration=$((end_time - start_time)) + echo "build_duration=$duration" >> $GITHUB_OUTPUT + echo "🏗️ Build took $duration seconds" >> $GITHUB_STEP_SUMMARY + + - name: Comment build time on PR + if: github.event_name == 'pull_request' + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const duration = ${{ steps.build-time.outputs.build_duration }}; + const message = `🏗️ Build metrics:\n- Build duration: ${duration} seconds`; + github.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.name, + body: message + }); + + scan: + needs: build + runs-on: ubuntu-latest + env: + TAG: ${{ needs.build.outputs.tag }} + permissions: + contents: read + packages: read + security-events: write + steps: + - name: Get scan start time + id: scan-start-time + run: echo "scan_start_time=$(date +%s)" >> $GITHUB_OUTPUT + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Pull image for scanning + run: docker pull ghcr.io/${{ github.repository }}/roverlight:${{ env.TAG }} + + - name: Scan container + uses: anchore/scan-action@v3 + id: scan + with: + image: ghcr.io/${{ github.repository }}/roverlight:${{ env.TAG }} + severity-cutoff: critical + fail-build: false + output-format: sarif + + - name: Upload scan SARIF report + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: ${{ steps.scan.outputs.sarif }} + + - name: Calculate scan duration + id: scan-time + run: | + end_time=$(date +%s) + start_time=${{ steps.scan-start-time.outputs.scan_start_time }} + duration=$((end_time - start_time)) + echo "scan_duration=$duration" >> $GITHUB_OUTPUT + echo "🔍 Security scan took $duration seconds" >> $GITHUB_STEP_SUMMARY + + - name: Comment scan time on PR + if: github.event_name == 'pull_request' + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const duration = ${{ steps.scan-time.outputs.scan_duration }}; + const message = `🔍 Security scan metrics:\n- Scan duration: ${duration} seconds`; + github.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.name, + body: message + }); diff --git a/.github/workflows/roverlight-security-scan.yml b/.github/workflows/roverlight-security-scan.yml new file mode 100644 index 000000000..d55191306 --- /dev/null +++ b/.github/workflows/roverlight-security-scan.yml @@ -0,0 +1,41 @@ +name: Roverlight-Security-Scan + +on: + workflow_run: + workflows: ["Roverlight-Build-Release"] + types: + - completed + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + scan: + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + permissions: + contents: read + packages: read + security-events: write + steps: + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Scan container + uses: anchore/scan-action@v3 + id: scan + with: + image: ghcr.io/${{ github.repository }}/roverlight:${{ github.event.workflow_run.head_commit.id }} + severity-cutoff: critical + fail-build: false + output-format: sarif + + - name: Upload scan SARIF report + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: ${{ steps.scan.outputs.sarif }} diff --git a/Dockerfile b/Dockerfile index c5bbbfcf3..96ee4e3e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,142 +5,148 @@ FROM ubuntu:22.04 AS base SHELL ["/bin/bash", "-c"] - -# Arguments set during docker-compose build -b --build from .env file - -ARG versionVault \ - versionKubectl \ - versionKubelogin \ - versionDockerCompose \ - versionPowershell \ - versionPacker \ - versionGolang \ - versionTerraformDocs \ - versionAnsible \ - versionTerrascan \ - versionTfupdate \ - extensionsAzureCli \ - SSH_PASSWD \ - TARGETARCH \ - TARGETOS - +# Build arguments +ARG TARGETOS=linux +ARG TARGETARCH=amd64 ARG USERNAME=vscode ARG USER_UID=1000 -ARG USER_GID=${USER_UID} +ARG USER_GID=1000 +ARG TF_PLUGIN_CACHE_DIR=/tf/cache + +# Version arguments +ARG versionDockerCompose +ARG versionGolang +ARG versionKubectl +ARG versionKubelogin +ARG versionPacker +ARG versionPowershell +ARG versionTerraformDocs +ARG versionVault +ARG versionAnsible +ARG versionTerrascan +ARG versionTfupdate +ARG extensionsAzureCli + +# Set environment variables +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=UTC \ + LANG=en_US.UTF-8 \ + LANGUAGE=en_US:en \ + LC_ALL=en_US.UTF-8 -ENV SSH_PASSWD=${SSH_PASSWD} \ - USERNAME=${USERNAME} \ - versionVault=${versionVault} \ - versionGolang=${versionGolang} \ - versionKubectl=${versionKubectl} \ - versionKubelogin=${versionKubelogin} \ - versionDockerCompose=${versionDockerCompose} \ - versionTerraformDocs=${versionTerraformDocs} \ - versionPacker=${versionPacker} \ - versionPowershell=${versionPowershell} \ - versionAnsible=${versionAnsible} \ - extensionsAzureCli=${extensionsAzureCli} \ - versionTerrascan=${versionTerrascan} \ - versionTfupdate=${versionTfupdate} \ - PATH="${PATH}:/opt/mssql-tools/bin:/home/vscode/.local/lib/shellspec/bin:/home/vscode/go/bin:/usr/local/go/bin" \ +# Set user environment variables +ENV USERNAME=${USERNAME} \ + USER_UID=${USER_UID} \ + USER_GID=${USER_GID} \ + PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/opt/mssql-tools/bin:/home/${USERNAME}/.local/lib/shellspec/bin:/home/${USERNAME}/go/bin \ TF_DATA_DIR="/home/${USERNAME}/.terraform.cache" \ - TF_PLUGIN_CACHE_DIR="/tf/cache" \ + TF_PLUGIN_CACHE_DIR=/tf/cache \ TF_REGISTRY_DISCOVERY_RETRY=5 \ TF_REGISTRY_CLIENT_TIMEOUT=15 \ - ARM_USE_MSGRAPH=true \ - LANG=en_US.UTF-8 \ - LANGUAGE=en_US:en \ - LC_ALL=en_US.UTF-8 \ - DEBIAN_FRONTEND=noninteractive \ - DEBCONF_NONINTERACTIVE_SEEN=true + ARM_USE_MSGRAPH=true + +# Configure locales first +RUN apt-get update && \ + apt-get install -y --no-install-recommends locales tzdata && \ + echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \ + locale-gen en_US.UTF-8 && \ + update-locale LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LANGUAGE=en_US:en WORKDIR /tf/rover COPY ./scripts/.kubectl_aliases . COPY ./scripts/zsh-autosuggestions.zsh . - # installation common tools + +# Install common tools +# Remove duplicate ARG/ENV declarations + +# Install base packages RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - apt-transport-https \ - apt-utils \ - bsdmainutils \ - ca-certificates \ - curl \ - fonts-powerline \ - gcc \ - gettext \ - git \ - gpg \ - gpg-agent \ - jq \ - less \ - locales \ - make \ - # Networking tools - dnsutils net-tools iputils-ping traceroute \ - python3-dev \ - python3-pip \ - rsync \ - # openvpn client and ipsec tools to generate certificates - openvpn network-manager-openvpn strongswan strongswan-pki libstrongswan-extra-plugins libtss2-tcti-tabrmd0 openssh-client \ - # - software-properties-common \ - gosu \ - sudo \ - unzip \ - vim \ - wget \ - zsh \ - zip && \ - # - # Create USERNAME - # - echo "Creating ${USERNAME} user..." && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + apt-transport-https \ + apt-utils \ + bsdmainutils \ + ca-certificates \ + curl \ + gpg \ + gpg-agent && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + ca-certificates \ + curl \ + fonts-powerline \ + gcc \ + gettext \ + git \ + gpg \ + gpg-agent \ + jq \ + less \ + locales \ + make \ + dnsutils \ + net-tools \ + iputils-ping \ + traceroute \ + python3-dev \ + python3-pip \ + rsync \ + software-properties-common \ + sudo \ + unzip \ + vim \ + wget \ + zsh \ + zip && \ + # + # Create user and group groupadd docker && \ - useradd --uid $USER_UID -m -G docker ${USERNAME} && \ - # - # Set the locale - locale-gen en_US.UTF-8 && \ + useradd --uid 1000 -m -G docker vscode && \ # # ############### APT Repositories ################### # # Add Microsoft key # - curl -sSL https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/microsoft.gpg && \ + mkdir -p /etc/apt/keyrings && \ + curl -fsSL https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /etc/apt/keyrings/microsoft.gpg && \ # # Add Microsoft repository # - gosu root apt-add-repository https://packages.microsoft.com/ubuntu/22.04/prod && \ + echo "deb [arch=${TARGETARCH} signed-by=/etc/apt/keyrings/microsoft.gpg] https://packages.microsoft.com/ubuntu/22.04/prod jammy main" > /etc/apt/sources.list.d/microsoft.list && \ # # Add Docker repository # - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor > /etc/apt/trusted.gpg.d/docker-archive-keyring.gpg && \ - echo "deb [arch=${TARGETARCH}] https://download.docker.com/linux/ubuntu focal stable" > /etc/apt/sources.list.d/docker.list && \ + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg && \ + echo "deb [arch=${TARGETARCH} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu jammy stable" > /etc/apt/sources.list.d/docker.list && \ # # Kubernetes repo # - curl -fsSL https://pkgs.k8s.io/core:/stable:/v${versionKubectl}/deb/Release.key | gpg --dearmor -o /etc/apt/keyrings/kubernetes-apt-keyring.gpg && \ - echo "deb [signed-by=/etc/apt/keyrings/kubernetes-apt-keyring.gpg] https://pkgs.k8s.io/core:/stable:/v${versionKubectl}/deb/ /" | gosu root tee /etc/apt/sources.list.d/kubernetes.list && \ + curl -fsSL https://pkgs.k8s.io/core:/stable:/v1.29/deb/Release.key | gpg --dearmor -o /etc/apt/keyrings/kubernetes.gpg && \ + echo "deb [signed-by=/etc/apt/keyrings/kubernetes.gpg] https://pkgs.k8s.io/core:/stable:/v1.29/deb/ /" | tee /etc/apt/sources.list.d/kubernetes.list > /dev/null &&\ # # Github shell - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | gosu root dd of=/etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg && \ - echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null &&\ + curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg && \ + echo "deb [arch=${TARGETARCH} signed-by=/etc/apt/trusted.gpg.d/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null &&\ # apt-get update && \ - apt-get install -y --no-install-recommends \ - docker-ce-cli \ - kubectl \ - gh && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + docker-ce-cli \ + kubectl \ + gh \ + gosu \ + openvpn \ + network-manager-openvpn \ + strongswan \ + strongswan-pki \ + libstrongswan-extra-plugins \ + libtss2-tcti-tabrmd0 \ + openssh-client && \ # # Install Docker Compose - required to rebuild the rover and dynamic terminal in VSCode # echo "Installing docker compose ${versionDockerCompose}..." && \ mkdir -p /usr/libexec/docker/cli-plugins/ && \ - if [ ${TARGETARCH} == "amd64" ]; then \ - curl -L -o /usr/libexec/docker/cli-plugins/docker-compose https://github.com/docker/compose/releases/download/v${versionDockerCompose}/docker-compose-${TARGETOS}-x86_64 ; \ - else \ - curl -L -o /usr/libexec/docker/cli-plugins/docker-compose https://github.com/docker/compose/releases/download/v${versionDockerCompose}/docker-compose-${TARGETOS}-aarch64 ; \ - fi \ - && chmod +x /usr/libexec/docker/cli-plugins/docker-compose && \ + ARCH=$([ "${TARGETARCH}" = "amd64" ] && echo "x86_64" || echo "aarch64") && \ + curl -L -o /usr/libexec/docker/cli-plugins/docker-compose https://github.com/docker/compose/releases/download/v${versionDockerCompose}/docker-compose-${TARGETOS}-${ARCH} && \ + chmod +x /usr/libexec/docker/cli-plugins/docker-compose && \ # # Install Helm # @@ -156,23 +162,16 @@ RUN apt-get update && \ # Install terrascan # echo "Installing terrascan v${versionTerrascan} ..." && \ - if [ ${TARGETARCH} == "amd64" ]; then \ - curl -sSL -o terrascan.tar.gz https://github.com/tenable/terrascan/releases/download/v${versionTerrascan}/terrascan_${versionTerrascan}_Linux_x86_64.tar.gz ; \ - else \ - curl -sSL -o terrascan.tar.gz https://github.com/tenable/terrascan/releases/download/v${versionTerrascan}/terrascan_${versionTerrascan}_Linux_${TARGETARCH}.tar.gz ; \ - fi \ - && tar -xf terrascan.tar.gz terrascan && rm terrascan.tar.gz && \ + ARCH=$([ "${TARGETARCH}" = "amd64" ] && echo "x86_64" || echo "arm64") && \ + curl -sSL -o terrascan.tar.gz https://github.com/tenable/terrascan/releases/download/v${versionTerrascan}/terrascan_${versionTerrascan}_Linux_${ARCH}.tar.gz && \ + tar -xf terrascan.tar.gz terrascan && rm terrascan.tar.gz && \ install terrascan /usr/local/bin && rm terrascan && \ # # Install tfupdate # echo "Installing tfupdate v${versionTfupdate} ..." && \ - if [ ${TARGETARCH} == "amd64" ]; then \ - curl -sSL -o tfupdate.tar.gz https://github.com/minamijoyo/tfupdate/releases/download/v${versionTfupdate}/tfupdate_${versionTfupdate}_linux_amd64.tar.gz ; \ - else \ - curl -sSL -o tfupdate.tar.gz https://github.com/minamijoyo/tfupdate/releases/download/v${versionTfupdate}/tfupdate_${versionTfupdate}_linux_${TARGETARCH}.tar.gz ; \ - fi \ - && tar -xf tfupdate.tar.gz tfupdate && rm tfupdate.tar.gz && \ + curl -sSL -o tfupdate.tar.gz https://github.com/minamijoyo/tfupdate/releases/download/v${versionTfupdate}/tfupdate_${versionTfupdate}_${TARGETOS}_${TARGETARCH}.tar.gz && \ + tar -xf tfupdate.tar.gz tfupdate && rm tfupdate.tar.gz && \ install tfupdate /usr/local/bin && rm tfupdate && \ # # Install tfsec @@ -198,10 +197,9 @@ RUN apt-get update && \ # https://docs.microsoft.com/en-us/powershell/scripting/install/install-other-linux?view=powershell-7.2#binary-archives # echo "Installing PowerShell ${versionPowershell}..." && \ - if [ ${TARGETARCH} == "amd64" ]; then curl -L -o /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v${versionPowershell}/powershell-${versionPowershell}-${TARGETOS}-x64.tar.gz ; \ - else curl -L -o /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v${versionPowershell}/powershell-${versionPowershell}-${TARGETOS}-${TARGETARCH}.tar.gz ; \ - fi \ - && mkdir -p /opt/microsoft/powershell/7 && \ + ARCH=$([ "${TARGETARCH}" = "amd64" ] && echo "x64" || echo "arm64") && \ + curl -L -o /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v${versionPowershell}/powershell-${versionPowershell}-${TARGETOS}-${ARCH}.tar.gz && \ + mkdir -p /opt/microsoft/powershell/7 && \ tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \ chmod +x /opt/microsoft/powershell/7/pwsh && \ ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \ @@ -226,12 +224,10 @@ RUN apt-get update && \ # echo "Installing Kubelogin ${versionKubelogin}..." && \ curl -sSL -o /tmp/kubelogin.zip https://github.com/Azure/kubelogin/releases/download/v${versionKubelogin}/kubelogin-${TARGETOS}-${TARGETARCH}.zip 2>&1 && \ - unzip -d /usr/ /tmp/kubelogin.zip && \ - if [ ${TARGETARCH} == "amd64" ]; then \ - chmod +x /usr/bin/linux_amd64/kubelogin ; \ - else \ - chmod +x /usr/bin/linux_arm64/kubelogin ; \ - fi && \ + unzip -d /usr/bin /tmp/kubelogin.zip && \ + mv /usr/bin/bin/linux_${TARGETARCH}/kubelogin /usr/bin/kubelogin && \ + rm -rf /usr/bin/bin && \ + chmod +x /usr/bin/kubelogin && \ # Hashicorp Vault # echo "Installing Vault ${versionVault}..." && \ @@ -274,25 +270,24 @@ RUN apt-get update && \ # # Install Ansible # - echo "Installing Ansible ${versionAnsible} ..." && \ - pip3 install ansible-core==${versionAnsible} && \ + echo "Installing Ansible 2.16.2 ..." && \ + pip3 install ansible-core==2.16.2 && \ # # # ################ Install apt packages ################## # For amd64 only - as no arm64 version packages available per: https://packages.microsoft.com/ubuntu/20.04/prod/pool/main/m/mssql-tools/ - if [ ${TARGETARCH} == "amd64" ]; then \ - echo ACCEPT_EULA=Y apt-get install -y --no-install-recommends unixodbc mssql-tools; \ + if [ "${TARGETARCH}" = "amd64" ]; then \ + ACCEPT_EULA=Y apt-get install -y --no-install-recommends unixodbc mssql-tools; \ else \ - echo "mssql-tools skipped as not running on amr64"; \ - fi \ - # - && echo "Installing latest shellspec..." && \ + echo "mssql-tools skipped as not running on arm64"; \ + fi && \ + echo "Installing latest shellspec..." && \ curl -fsSL https://git.io/shellspec | sh -s -- --yes && \ # # Golang # echo "Installing Golang ${versionGolang}..." && \ - curl -sSL -o /tmp/golang.tar.gz https://go.dev/dl/go${versionGolang}.${TARGETOS}-${TARGETARCH}.tar.gz 2>&1 && \ + curl -sSL -o /tmp/golang.tar.gz https://go.dev/dl/go${versionGolang}.${TARGETOS}-${TARGETARCH}.tar.gz && \ tar -C /usr/local -xzf /tmp/golang.tar.gz && \ export PATH=$PATH:/usr/local/go/bin && \ go version && \ @@ -337,50 +332,70 @@ RUN apt-get update && \ chown -R ${USERNAME} /commandhistory && \ echo "set -o history" >> "/home/${USERNAME}/.bashrc" && \ echo "export HISTCONTROL=ignoredups:erasedups" >> "/home/${USERNAME}/.bashrc" && \ - echo "PROMPT_COMMAND=\"${PROMPT_COMMAND:+$PROMPT_COMMAND$'\n'}history -a; history -c; history -r\"" >> "/home/${USERNAME}/.bashrc" && \ - echo "[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases" >> "/home/${USERNAME}/.bashrc" && \ - echo "alias watch=\"watch \"" >> "/home/${USERNAME}/.bashrc" && \ + echo "PROMPT_COMMAND='history -a; history -c; history -r'" >> "/home/${USERNAME}/.bashrc" && \ + echo '[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases' >> "/home/${USERNAME}/.bashrc" && \ + echo 'alias watch="watch "' >> "/home/${USERNAME}/.bashrc" && \ # # Clean-up # apt-get remove -y \ gcc \ - python3-dev \ - apt-utils && \ + python3-dev && \ apt-get autoremove -y && \ apt-get clean && \ rm -rf /tmp/* && \ rm -rf /var/lib/apt/lists/* && \ - find . | grep -E "(__pycache__|\.pyc|\.pyo$)" | xargs rm -rf + find / -type d -name __pycache__ -exec rm -r {} + 2>/dev/null || true && \ + find / -type f -name '*.py[cod]' -delete 2>/dev/null || true # # Switch to non-root ${USERNAME} context # -USER ${USERNAME} - COPY .devcontainer/.zshrc /home/${USERNAME}/ COPY ./scripts/sshd_config /home/${USERNAME}/.ssh/sshd_config -RUN echo "Setting up OMZ environment" && \ - # - # Install Oh My Zsh - # - curl -fsSL https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh | bash -s -- --unattended && \ +# Use a pre-built base image that includes essential packages +FROM mcr.microsoft.com/vscode/devcontainers/base:ubuntu-22.04 AS config-base + +# Set up environment variables +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=UTC \ + LANG=en_US.UTF-8 \ + LANGUAGE=en_US:en \ + LC_ALL=en_US.UTF-8 + +# Set up user environment +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=1000 + +# Configure shell files and aliases +RUN mkdir -p /home/${USERNAME}/.ssh && \ + touch /home/${USERNAME}/.ssh/sshd_config && \ + touch /home/${USERNAME}/.zshrc && \ + chown -R ${USERNAME}:${USERNAME} /home/${USERNAME}/.ssh && \ + chmod 700 /home/${USERNAME}/.ssh && \ + chmod 644 /home/${USERNAME}/.zshrc && \ + chmod 600 /home/${USERNAME}/.ssh/sshd_config && \ chmod 700 -R /home/${USERNAME}/.oh-my-zsh && \ - echo "DISABLE_UNTRACKED_FILES_DIRTY=\"true\"" >> /home/${USERNAME}/.zshrc && \ - echo "alias rover=/tf/rover/rover.sh" >> /home/${USERNAME}/.bashrc && \ - echo "alias rover=/tf/rover/rover.sh" >> /home/${USERNAME}/.zshrc && \ - echo "alias t=/usr/bin/terraform" >> /home/${USERNAME}/.bashrc && \ - echo "alias t=/usr/bin/terraform" >> /home/${USERNAME}/.zshrc && \ - echo "alias k=/usr/bin/kubectl" >> /home/${USERNAME}/.zshrc && \ - echo "alias k=/usr/bin/kubectl" >> /home/${USERNAME}/.bashrc && \ - echo "cd /tf/caf || true" >> /home/${USERNAME}/.bashrc && \ - echo "cd /tf/caf || true" >> /home/${USERNAME}/.zshrc && \ - echo "[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases" >> /home/${USERNAME}/.zshrc && \ - echo "source /tf/rover/zsh-autosuggestions.zsh" >> /home/${USERNAME}/.zshrc && \ - echo "alias watch=\"watch \"" >> /home/${USERNAME}/.zshrc + { \ + echo "DISABLE_UNTRACKED_FILES_DIRTY=\"true\""; \ + echo "alias rover=/tf/rover/rover.sh"; \ + echo "alias t=/usr/bin/terraform"; \ + echo "alias k=/usr/bin/kubectl"; \ + echo "cd /tf/caf || true"; \ + echo "[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases"; \ + echo "source /tf/rover/zsh-autosuggestions.zsh"; \ + echo "alias watch=\"watch \""; \ + } >> /home/${USERNAME}/.zshrc && \ + { \ + echo "alias rover=/tf/rover/rover.sh"; \ + echo "alias t=/usr/bin/terraform"; \ + echo "alias k=/usr/bin/kubectl"; \ + echo "cd /tf/caf || true"; \ + } >> /home/${USERNAME}/.bashrc -FROM base +FROM config-base ARG versionTerraform \ USERNAME=vscode \ @@ -393,19 +408,56 @@ ENV versionRover=${versionRover} \ # # Keeping this method to support alpha build installations -RUN echo "Set rover version to ${versionRover}..." && echo "Installing Terraform ${versionTerraform}..." && \ - curl -sSL -o /tmp/terraform.zip "https://releases.hashicorp.com/terraform/${versionTerraform}/terraform_${versionTerraform}_${TARGETOS}_${TARGETARCH}.zip" 2>&1 && \ - sudo unzip -o -d /usr/bin /tmp/terraform.zip && \ - sudo chmod +x /usr/bin/terraform && \ +# Create required directories +RUN mkdir -p /tf/rover && \ mkdir -p "/home/${USERNAME}/.terraform.cache/plugin-cache" && \ + chown -R ${USERNAME}:${USERNAME} /tf && \ + chown -R ${USERNAME}:${USERNAME} "/home/${USERNAME}/.terraform.cache" + +# Install Terraform +ARG TARGETOS +ARG TARGETARCH +ARG versionTerraform +ARG versionRover + +RUN echo "Installing Terraform ${versionTerraform}..." && \ + curl -sSL -o /tmp/terraform.zip "https://releases.hashicorp.com/terraform/${versionTerraform}/terraform_${versionTerraform}_${TARGETOS}_${TARGETARCH}.zip" && \ + unzip -o -d /usr/bin /tmp/terraform.zip && \ + chmod +x /usr/bin/terraform && \ rm /tmp/terraform.zip && \ - # - echo "Set rover version to ${versionRover}..." && \ echo "${versionRover}" > /tf/rover/version.txt -RUN az config set core.login_experience_v2=false +# Install Azure CLI and extensions (with architecture-specific handling) +ARG extensionsAzureCli +ARG TARGETARCH +RUN if [ "${TARGETARCH}" = "amd64" ]; then \ + curl -sL https://aka.ms/InstallAzureCLIDeb | bash && \ + az config set core.login_experience_v2=false && \ + az extension add --name resource-graph --system; \ + else \ + echo "Skipping Azure CLI installation for ${TARGETARCH} due to QEMU limitations"; \ + fi + +# Create script directories and set permissions +RUN mkdir -p /tf/rover/scripts && \ + chown -R ${USERNAME}:${USERNAME} /tf/rover -COPY ./scripts/rover.sh ./scripts/tfstate.sh ./scripts/functions.sh ./scripts/remote.sh ./scripts/parse_command.sh ./scripts/banner.sh ./scripts/clone.sh ./scripts/walkthrough.sh ./scripts/sshd.sh ./scripts/backend.hcl.tf ./scripts/backend.azurerm.tf ./scripts/ci.sh ./scripts/cd.sh ./scripts/task.sh ./scripts/symphony_yaml.sh ./scripts/test_runner.sh ./ +# Copy rover scripts +COPY --chown=${USERNAME}:${USERNAME} \ + ./scripts/rover.sh \ + ./scripts/tfstate.sh \ + ./scripts/functions.sh \ + ./scripts/remote.sh \ + ./scripts/parse_command.sh \ + ./scripts/banner.sh \ + ./scripts/clone.sh \ + ./scripts/walkthrough.sh \ + ./scripts/sshd.sh \ + ./scripts/backend.hcl.tf \ + ./scripts/backend.azurerm.tf \ + ./scripts/task.sh \ + ./scripts/test_runner.sh \ + /tf/rover/scripts/ COPY ./scripts/ci_tasks/* ./ci_tasks/ COPY ./scripts/lib/* ./lib/ COPY ./scripts/tfcloud/* ./tfcloud/ diff --git a/Dockerfile.roverlight b/Dockerfile.roverlight new file mode 100644 index 000000000..689084c14 --- /dev/null +++ b/Dockerfile.roverlight @@ -0,0 +1,161 @@ +########################################################### +# Builder stage for initial setup and build dependencies +########################################################### +FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/devcontainers/base:ubuntu as builder + +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=${USER_UID} + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + apt-transport-https \ + ca-certificates \ + curl \ + gpg \ + gpg-agent \ + locales \ + unzip \ + wget && \ + locale-gen en_US.UTF-8 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +########################################################### +# Base stage for common tools and configurations +########################################################### +FROM --platform=${BUILDPLATFORM} mcr.microsoft.com/devcontainers/base:ubuntu as base + +ARG BUILDPLATFORM +ARG TARGETPLATFORM +ARG SSH_PASSWD TARGETARCH TARGETOS +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=${USER_UID} + +ENV SSH_PASSWD=${SSH_PASSWD} \ + USERNAME=${USERNAME} \ + PATH="${PATH}:/opt/mssql-tools/bin:/home/vscode/.local/lib/shellspec/bin:/home/vscode/go/bin:/usr/local/go/bin" \ + TF_DATA_DIR="/home/${USERNAME}/.terraform.cache" \ + TF_PLUGIN_CACHE_DIR="/tf/cache" \ + TF_REGISTRY_DISCOVERY_RETRY=5 \ + TF_REGISTRY_CLIENT_TIMEOUT=15 \ + ARM_USE_MSGRAPH=true \ + LANG=en_US.UTF-8 \ + LANGUAGE=en_US:en \ + LC_ALL=en_US.UTF-8 + +WORKDIR /tf/rover +COPY ./scripts/.kubectl_aliases . +COPY ./scripts/zsh-autosuggestions.zsh . + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + apt-transport-https \ + apt-utils \ + bsdmainutils \ + ca-certificates \ + curl \ + fonts-powerline \ + gettext \ + git \ + gpg \ + gpg-agent \ + jq \ + less \ + locales \ + sudo \ + unzip \ + vim \ + wget \ + zsh \ + zip && \ + locale-gen en_US.UTF-8 && \ + mkdir /tf/cache && \ + chown -R ${USERNAME}:${USERNAME} ${TF_PLUGIN_CACHE_DIR} && \ + mkdir -p /tf/caf \ + /tf/rover \ + /tf/logs \ + /home/${USERNAME}/.ansible \ + /home/${USERNAME}/.azure \ + /home/${USERNAME}/.gnupg \ + /home/${USERNAME}/.packer.d \ + /home/${USERNAME}/.ssh \ + /home/${USERNAME}/.ssh-localhost \ + /home/${USERNAME}/.terraform.logs \ + /home/${USERNAME}/.terraform.cache \ + /home/${USERNAME}/.terraform.cache/tfstates \ + /home/${USERNAME}/.vscode-server \ + /home/${USERNAME}/.vscode-server-insiders && \ + chown -R ${USER_UID}:${USER_GID} /home/${USERNAME} /tf/rover /tf/caf /tf/logs && \ + chmod 777 -R /home/${USERNAME} /tf/caf /tf/rover && \ + chmod 700 /home/${USERNAME}/.ssh && \ + echo ${USERNAME} ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/${USERNAME} && \ + chmod 0440 /etc/sudoers.d/${USERNAME} && \ + mkdir /commandhistory && \ + touch /commandhistory/.bash_history && \ + chown -R ${USERNAME} /commandhistory && \ + echo "set -o history" >> "/home/${USERNAME}/.bashrc" && \ + echo "export HISTCONTROL=ignoredups:erasedups" >> "/home/${USERNAME}/.bashrc" && \ + echo "PROMPT_COMMAND=\"${PROMPT_COMMAND:+$PROMPT_COMMAND$'\n'}history -a; history -c; history -r\"" >> "/home/${USERNAME}/.bashrc" && \ + echo "[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases" >> "/home/${USERNAME}/.bashrc" && \ + echo "alias watch=\"watch \"" >> "/home/${USERNAME}/.bashrc" && \ + apt-get clean && \ + rm -rf /tmp/* && \ + rm -rf /var/lib/apt/lists/* && \ + find . | grep -E "(__pycache__|\.pyc|\.pyo$)" | xargs rm -rf + +USER ${USERNAME} + +COPY .devcontainer/.zshrc $HOME +COPY ./scripts/sshd_config /home/${USERNAME}/.ssh/sshd_config + +RUN echo "Customizing userenv" && \ + echo "DISABLE_UNTRACKED_FILES_DIRTY=\"true\"" >> /home/${USERNAME}/.zshrc && \ + echo "alias rover=/tf/rover/rover.sh" >> /home/${USERNAME}/.bashrc && \ + echo "alias rover=/tf/rover/rover.sh" >> /home/${USERNAME}/.zshrc && \ + echo "alias t=$HOME/.tfenv/tfenv-3.0.0/bin/terraform" >> /home/${USERNAME}/.bashrc && \ + echo "alias t=$HOME/.tfenv/tfenv-3.0.0/bin/terraform" >> /home/${USERNAME}/.zshrc && \ + echo "cd /tf/caf || true" >> /home/${USERNAME}/.bashrc && \ + echo "cd /tf/caf || true" >> /home/${USERNAME}/.zshrc && \ + echo "[ -f /tf/rover/.kubectl_aliases ] && source /tf/rover/.kubectl_aliases" >> /home/${USERNAME}/.zshrc && \ + echo "source /tf/rover/zsh-autosuggestions.zsh" >> /home/${USERNAME}/.zshrc && \ + echo "alias watch=\"watch \"" >> /home/${USERNAME}/.zshrc + +########################################################### +# Final stage with minimal runtime dependencies +########################################################### +FROM --platform=${TARGETPLATFORM} base + +ARG USERNAME=vscode \ + versionRover + +ENV versionRover=${versionRover} + +COPY --from=builder /usr/local/bin /usr/local/bin +COPY --from=base /home/${USERNAME} /home/${USERNAME} +COPY --from=base /tf /tf +COPY --from=base /etc/sudoers.d/${USERNAME} /etc/sudoers.d/${USERNAME} + +RUN echo "${versionRover}" > /tf/rover/version.txt + +COPY ./scripts/rover.sh ./scripts/tfstate.sh ./scripts/functions.sh ./scripts/remote.sh ./scripts/parse_command.sh ./scripts/banner.sh ./scripts/clone.sh ./scripts/walkthrough.sh ./scripts/sshd.sh ./scripts/backend.hcl.tf ./scripts/backend.azurerm.tf ./scripts/task.sh ./scripts/test_runner.sh ./ +COPY ./scripts/ci_tasks/* ./ci_tasks/ +COPY ./scripts/lib/* ./lib/ +COPY ./scripts/tfcloud/* ./tfcloud/ + +USER ${USERNAME} + +RUN sudo apt-get update && \ + sudo apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + jq \ + less \ + sudo \ + vim \ + zsh && \ + sudo apt-get clean && \ + sudo rm -rf /var/lib/apt/lists/* && \ + sudo find / -type f -name "*.pyc" -delete && \ + sudo find / -type d -name "__pycache__" -exec rm -r {} + || true diff --git a/README.md b/README.md index fdabf6a8b..770a17dd5 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ ![](https://github.com/aztfmod/rover/workflows/master/badge.svg) -![](https://github.com/aztfmod/rover/workflows/.github/workflows/ci-branches.yml/badge.svg) +![](https://github.com/aztfmod/rover/workflows/roverlight-build/badge.svg) +![](https://github.com/aztfmod/rover/workflows/roverlight-security-scan/badge.svg) [![Gitter](https://badges.gitter.im/aztfmod/community.svg)](https://gitter.im/aztfmod/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) # Azure Terraform SRE - Landing zones on Terraform - Rover @@ -10,7 +11,10 @@ Azure Terraform SRE provides you with guidance and best practices to adopt Azure The CAF **rover** is helping you managing your enterprise Terraform deployments on Microsoft Azure and is composed of two parts: -- **A docker container** +- **A docker container** (available in standard and light variants) + - Standard: Full featured development environment with all tools + - Light: Streamlined version focused on essential functionality + - Both variants available on GitHub Container Registry - Allows consistent developer experience on PC, Mac, Linux, including the right tools, git hooks and DevOps tools. - Native integration with [Visual Studio Code](https://code.visualstudio.com/docs/remote/containers), [GitHub Codespaces](https://github.com/features/codespaces). - Contains the versioned toolset you need to apply landing zones. diff --git a/docker-bake.hcl b/docker-bake.hcl index 7d6d2f629..ef5a72d5e 100644 --- a/docker-bake.hcl +++ b/docker-bake.hcl @@ -8,12 +8,12 @@ # group "default" { - targets = ["rover_local", "rover_agents"] + targets = ["rover_local", "roverlight", "rover_agents"] } target "rover_local" { dockerfile = "./Dockerfile" - tags = ["${tag}"] + tags = ["rover_local:latest"] args = { extensionsAzureCli = extensionsAzureCli versionDockerCompose = versionDockerCompose @@ -30,11 +30,23 @@ target "rover_local" { versionTerrascan = versionTerrascan versionTfupdate = versionTfupdate } - platforms = ["linux/amd64","linux/arm64" ] + platforms = ["linux/arm64", "linux/amd64" ] cache-to = ["type=local,dest=/tmp/.buildx-cache,mode=max"] cache-from = ["type=local,src=/tmp/.buildx-cache"] } +target "roverlight" { + dockerfile = "./Dockerfile.roverlight" + tags = ["ghcr.io/arnaudlh/roverlight:latest"] + args = { + versionRover = "${versionRover}" + USERNAME = "vscode" + TARGETOS = "linux" + TARGETARCH = "amd64" + } + platforms = ["${platform}"] +} + target "rover_registry" { inherits = ["rover_local"] tags = ["${versionRover}"] @@ -43,18 +55,38 @@ target "rover_registry" { } } + +# Docker build configuration variable "registry" { - default = "" + default = "ghcr.io/aztfmod" +} + +variable "image_name" { + default = "roverlight" } -variable "tag" { - default = "latest" +variable "version" { + default = "latest" } +variable "platform" { + default = "linux/amd64" +} + +# Version configuration variable "versionRover" { - default = "" + default = "" } variable "versionTerraform" { - default = "" -} \ No newline at end of file + default = "" +} + +# Build arguments +variable "targetarch" { + default = "amd64" +} + +variable "username" { + default = "vscode" +} diff --git a/docker-bake.override.hcl b/docker-bake.override.hcl index fc032a418..48abe6a15 100644 --- a/docker-bake.override.hcl +++ b/docker-bake.override.hcl @@ -19,3 +19,5 @@ versionGithubRunner="2.320.0" versionGitlab="17.5.3" versionTfc="1.17.0" +# Build configuration is in docker-bake.hcl + diff --git a/docs/CONTINOUS_INTEGRATION.md b/docs/CONTINOUS_INTEGRATION.md deleted file mode 100644 index b4c9b6eb1..000000000 --- a/docs/CONTINOUS_INTEGRATION.md +++ /dev/null @@ -1,21 +0,0 @@ -# Continuous Integration - -Rover ci invokes a set of predefined tools to ensure code quality. These tools are defined via yaml files in [scripts/ci_tasks](../scripts/ci_tasks) - -### Pre-requisites to running CI: - -* Landing zones and configs are cloned to a base directory (eg. /tf/caf) -* A symphony.yaml file. Please see [samples.symphony.yaml](symphony/sample.symphony.yaml) - -### Run CI -* Run all CI tools - - ```shell - rover ci -ct tflint -sc /tf/config/symphony.yml -b /tf/caf -env demo -d - ``` - -* Run a single ci tool by name (tflint in this example) - - ```shell - rover ci -ct tflint -sc /tf/config/symphony.yml -b /tf/caf -env demo -d - ``` diff --git a/docs/DEV_CONTAINER.md b/docs/DEV_CONTAINER.md index 1589cc423..998eae2b5 100644 --- a/docs/DEV_CONTAINER.md +++ b/docs/DEV_CONTAINER.md @@ -2,6 +2,10 @@ The dev container in this repo uses docker compose and an image hosted on dockerhub (aztfmod/rover:tag). +Two variants are available: +- Standard: Full development environment (aztfmod/rover:tag) +- Light: Streamlined version (ghcr.io/aztfmod/roverlight:tag) + If you would like to make changes to the base image used by the dev container, you need to build the image then update the docker-compose.yml file to point to the newly created local image. ## 1) Build the local image diff --git a/docs/ROVERLIGHT.md b/docs/ROVERLIGHT.md new file mode 100644 index 000000000..4febbc509 --- /dev/null +++ b/docs/ROVERLIGHT.md @@ -0,0 +1,61 @@ +# Roverlight - Simplified Rover Container + +Roverlight is a streamlined version of the rover container focused on essential functionality with modern Docker practices. + +## Features +- Multi-stage Docker builds for optimized image size +- Modern GitHub Actions workflows with caching +- Security scanning integration +- Cross-platform support (linux/amd64, linux/arm64) + +## Usage +```shell +docker pull ghcr.io/aztfmod/roverlight:latest +``` + +## Building Locally +Follow these steps to build the roverlight container locally: + +1. Clone the repository +```shell +git clone https://github.com/aztfmod/rover +cd rover +``` + +2. Build using Docker buildx +```shell +docker buildx build -f Dockerfile.roverlight . +``` + +## CI/CD Pipeline +Roverlight uses modern GitHub Actions workflows: +- Automated builds on push to roverlight branch +- Security scanning with Anchore +- Build metrics tracking +- Multi-architecture support + +## Differences from Standard Rover +Roverlight is designed to be a lighter alternative to the standard rover container: +- Focused on essential development tools +- Optimized image size through multi-stage builds +- Simplified configuration + +## Environment Variables +The following environment variables are available in the container: +- `TF_DATA_DIR`: Terraform data directory (/home/vscode/.terraform.cache) +- `TF_PLUGIN_CACHE_DIR`: Terraform plugin cache directory (/tf/cache) +- `TF_REGISTRY_DISCOVERY_RETRY`: Number of retries for Terraform registry discovery (5) +- `TF_REGISTRY_CLIENT_TIMEOUT`: Timeout for Terraform registry client (15) +- `ARM_USE_MSGRAPH`: Use Microsoft Graph API (true) + +## Container Structure +The container follows a multi-stage build pattern: +1. Builder stage: Initial setup and build dependencies +2. Base stage: Common tools and configurations +3. Final stage: Minimal runtime dependencies + +## Security Features +- Security scanning with Anchore +- SARIF report generation +- Critical vulnerability checks +- Automated security reports in PRs diff --git a/docs/USAGE.md b/docs/USAGE.md index ecafbbd89..9215aae8e 100644 --- a/docs/USAGE.md +++ b/docs/USAGE.md @@ -9,8 +9,6 @@ Usage: rover commands: login Start the interactive login process to get access to your azure subscription. Performs an az login. logout Clear out login information related to the azure subscription. Performs an az logout. - ci Invoke the continuous integration workflow. - cd Invoke the continuous deployment workflow. landingzone Commands for managing landing zones. list Lists out all landing zones ( rover landingzone list) workspace Commands for managing workspaces. @@ -19,9 +17,7 @@ Usage: rover delete Delete a workspace switches: - -sc | --symphony-config (ci workflow) Path to a symphony.yml file. - -ct | --ci-task-name (ci workflow) CI Tool to invoke. If omitted all tools are run, if provided only that tool is run. - -b | --base-dir (ci workflow) Base directory for paths in symphony.yml. + -b | --base-dir Base directory for configuration. -d | --debug Show debug (verbose) logs | --log-severity This is the desired log degree. It can be set to FATAL,ERROR, WARN, INFO, DEBUG or VERBOSE -lz | --landingzone Path to a landing zone @@ -34,5 +30,3 @@ Usage: rover -var-folder Path to the folder containing configurations for the lz. ``` - -See [Continuous Integration document](CONTINOUS_INTEGRATION.md) for examples on running CI. diff --git a/scripts/build_image.sh b/scripts/build_image.sh index c7d94397e..58b698dbf 100644 --- a/scripts/build_image.sh +++ b/scripts/build_image.sh @@ -91,13 +91,6 @@ function build_base_rover_image { export rover="${rover_base}:${tag}" tag_strategy="preview-" ;; - "ci") - registry="symphonydev.azurecr.io/" - tag=${versionTerraform}-${tag_date_preview} - rover_base="${registry}rover-ci" - export rover="${rover_base}:${tag}" - tag_strategy="ci-" - ;; "local") registry="localhost:5000/" tag=${versionTerraform}-${tag_date_preview} @@ -146,7 +139,7 @@ function build_base_rover_image { docker buildx bake \ -f docker-bake.hcl \ -f docker-bake.override.hcl \ - --push rover_registry + --push rover_registry rover_agents ;; esac @@ -257,9 +250,7 @@ case "${strategy}" in esac echo "Building rover images." -if [ "$strategy" == "ci" ]; then - build_base_rover_image "1.0.0" ${strategy} -else +if [ true ]; then while read versionTerraform; do build_base_rover_image ${versionTerraform} ${strategy} done <./.env.terraform @@ -278,4 +269,4 @@ case "${strategy}" in ;; esac -docker buildx rm rover \ No newline at end of file +docker buildx rm rover diff --git a/scripts/cd.sh b/scripts/cd.sh deleted file mode 100644 index 87f8c2667..000000000 --- a/scripts/cd.sh +++ /dev/null @@ -1,215 +0,0 @@ -#!/bin/bash - -function cd_usage { - local code=$1 - _helpText=" - Usage: - rover deploy - - actions: - Select one of the following options: - * run Terraform plan, Terraform apply, run integration tests - * plan Terraform plan only - * apply Terraform plan, Terraform apply - * test run integration tests - - flags: - -sc required path to symphony.yml file. - -b required base path to be used for symphony.yml file. - -env optional name of the environment (defaults to sandpit) - -level optional Specifiy a level only performs cd on that level. If ommitted, action is performed on all levels in symphony.yml. - -h | --help optional Show the help usage guide (this.) -" - information "$_helpText" 1>&2 - - if [ -z "$code" ]; then - escape 0 - else - escape $code - fi -} - -function escape { - exit $1 -} -function verify_cd_parameters { - echo "@Verifying cd parameters" - echo "symphony_yaml_file: $symphony_yaml_file" - # Handle 1st level sub commands - case "${cd_action}" in - run | plan | apply ) - information "Found valid cd action - terraform ${cd_action}" - ;; - test) - information "Found valid cd action ${cd_action}" - ;; - -h | --help) - cd_usage - ;; - *) - if [ ! -z "$cd_action" ]; then - error_message "Invalid cd action ${cd_action}" - fi - cd_usage "1" - esac - - # Handle 2nd level sub commands. Only -h|--help is supported for now - case "${PARAMS}" in - "-h "| "--help ") - cd_usage - ;; - esac - - # verify symphony yaml - if [ -z "$symphony_yaml_file" ]; then - export code="1" - error "1" "Missing path to symphony.yml. Please provide a path to the file via -sc or --symphony-config" - return $code - fi - - if [ ! -f "$symphony_yaml_file" ]; then - export code="1" - error "1" "Invalid path, $symphony_yaml_file file not found. Please provide a valid path to the file via -sc or --symphony-config" - return $code - fi - - validate_symphony "$symphony_yaml_file" -} - - -function join_path { - local base_path=$1 - local part=$2 - - if [[ "$base_path" != *'/' ]]; then - base_path="$base_path/" - fi - - if [[ "$part" == '/'* ]]; then - part="${part:1}" - fi - - echo "$base_path$part" -} - -# Convert AZURE_ENVIRONMENT to comply with autorest's expectations -# https://github.com/Azure/go-autorest/blob/master/autorest/azure/environments.go#L37 -# To see az cli cloud names - az cloud list -o table -# We are only handling AzureCloud because the other cloud names are the same, only AzureCloud is different between az cli and autorest. -# Note the names below are camel case, Autorest converts all to upper case - https://github.com/Azure/go-autorest/blob/master/autorest/azure/environments.go#L263 -function set_autorest_environment_variables { - case $AZURE_ENVIRONMENT in - AzureCloud) - export AZURE_ENVIRONMENT='AzurePublicCloud' - ;; - AzureUSGovernment) - export AZURE_ENVIRONMENT='AzureUSGovernmentCloud' - ;; - esac -} - -function execute_cd { - local action=$cd_action - echo "@Starting CD execution" - echo "@CD action: $action" - - local successMessage="" - if [ "${TF_VAR_level}" == "all" ]; then - # get all levels from symphony yaml (only useful in env where there is a single MSI for all levels.) - local -a levels=($(get_all_level_names "$symphony_yaml_file")) - #echo "get all levels $levels" - else - # run CD for a single level - local -a levels=($(echo $TF_VAR_level)) - #echo "single level CD - ${TF_VAR_level}" - fi - - for level in "${levels[@]}" - do - if [ "$level" == "level0" ]; then - export caf_command="launchpad" - else - export caf_command="landingzone" - fi - - information "Deploying level: $level caf_command: $caf_command" - - local -a stacks=($(get_all_stack_names_for_level "$symphony_yaml_file" "$level" )) - - if [ ${#stacks[@]} -eq 0 ]; then - export code="1" - error ${LINENO} "No stacks found, check that level ${level} exist and has stacks defined in ${symphony_yaml_file}" - fi - - for stack in "${stacks[@]}" - do - # Reset TFVAR file list - PARAMS="" - - information "deploying stack $stack" - - landing_zone_path=$(get_landingzone_path_for_stack "$symphony_yaml_file" "$level" "$stack") - config_path=$(get_config_path_for_stack "$symphony_yaml_file" "$level" "$stack") - state_file_name=$(get_state_file_name_for_stack "$symphony_yaml_file" "$level" "$stack") - integration_test_relative_path=$(get_integration_test_path "$symphony_yaml_file") - integration_test_absolute_path=$(join_path "$base_directory" "$integration_test_relative_path") - - local plan_file="${state_file_name%.*}.tfplan" - - export landingzone_name=$landing_zone_path - export TF_VAR_tf_name=${state_file_name} - export TF_VAR_tf_plan=${plan_file} - export TF_VAR_level=${level} - expand_tfvars_folder "$config_path" - tf_command=$(echo $PARAMS | sed -e 's/^[ \t]*//') - - - log_debug @"Starting Deployment" - log_debug " landingzone_name: $landingzone_name" - log_debug " TF_VAR_tf_name: $TF_VAR_tf_name" - log_debug " TF_VAR_tf_plan: $TF_VAR_tf_plan" - log_debug " TF_VAR_level: $TF_VAR_level" - log_debug " tf_command: $tf_command" - log_debug " TF_VAR_workspace: $TF_VAR_workspace" - log_debug " integration_test_absolute_path: $integration_test_absolute_path" - - case "${action}" in - run) - export tf_action="apply" - log_debug " tf_action: $tf_action" - __set_tf_log__ "rover.deploy.run" - deploy "${TF_VAR_workspace}" - __reset_log__ - set_autorest_environment_variables - run_integration_tests "$integration_test_absolute_path" - ;; - plan) - export tf_action="plan" - log_debug " tf_action: $tf_action" - __set_tf_log__ "rover.deploy.plan" - deploy "${TF_VAR_workspace}" - __reset_log__ - ;; - apply) - export tf_action="apply" - log_debug " tf_action: $tf_action" - __set_tf_log__ "rover.deploy.apply" - deploy "${TF_VAR_workspace}" - __reset_log__ - ;; - test) - set_autorest_environment_variables - run_integration_tests "$integration_test_absolute_path" - ;; - *) - error "invalid cd action: $action" - esac - - if [ ! -z "$text_log_status" ]; then - information "$text_log_status" - fi - done - done - success "Continuous Deployment complete." -} - diff --git a/scripts/ci.sh b/scripts/ci.sh deleted file mode 100755 index 837887dd8..000000000 --- a/scripts/ci.sh +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/bash - -source /tf/rover/task.sh -source /tf/rover/symphony_yaml.sh - -declare -a CI_TASK_CONFIG_FILE_LIST=() -declare -a REGISTERED_CI_TASKS=() -declare CI_TASK_DIR=/tf/rover/ci_tasks/ - -function verify_task_name(){ - local ci_task_name=$1 - local isTaskNameRegistered=$(task_is_registered "$ci_task_name") - if [ "$isTaskNameRegistered" != "true" ]; then - export code="1" - error "1" "$ci_task_name is not a registered ci command!" - return $code - fi -} - -function verify_ci_parameters { - echo "@Verifying ci parameters" - - # verify symphony yaml - if [ -z "$symphony_yaml_file" ]; then - export code="1" - error "1" "Missing path to symphony.yml. Please provide a path to the file via -sc or --symphony-config" - return $code - fi - - if [ ! -f "$symphony_yaml_file" ]; then - export code="1" - error "1" "Invalid path, $symphony_yaml_file file not found. Please provide a valid path to the file via -sc or --symphony-config" - return $code - fi - - validate_symphony "$symphony_yaml_file" - - # verify ci task name is valid - if [ ! -z "$ci_task_name" ]; then - verify_task_name "$ci_task_name" - fi -} - -function set_default_parameters { - echo "@Setting default parameters" - export caf_command="landingzone" - - # export landingzone_name= - # export TF_VAR_tf_name=${TF_VAR_tf_name:="$(basename ${landingzone_name}).tfstate"} - - # export tf_action= - # expand_tfvars_folder - # deploy ${TF_VAR_workspace} -} - -function register_ci_tasks { - echo @"Registering available ci task..." - - # Get List of config files - CI_TASK_CONFIG_FILE_LIST=$(get_list_of_task ${CI_TASK_DIR}) - - # For each config, grab the tool name - # TODO: Eventually we will want to validate configs. For now, we can assume if the yaml parses it is valid. - for config in $CI_TASK_CONFIG_FILE_LIST - do - task_name=$(get_task_name ${config}) - echo @"Registered task... '${task_name}'" - REGISTERED_CI_TASKS+=("${task_name}") - done - -} - -function task_is_registered { - local task_name=$1 - for task in "${REGISTERED_CI_TASKS[@]}" - do - if [ "$task" == "$task_name" ]; then - echo "true" - return - fi - done - echo "false" -} - -function execute_ci_actions { - echo "@Starting CI tools execution" - - if [ "${TF_VAR_level}" == "all" ]; then - # get all levels from symphony yaml (only useful in env where there is a single MSI for all levels.) - local -a levels=($(get_all_level_names "$symphony_yaml_file")) - # echo "get all levels" - else - # run CI for a single level - local -a levels=($(echo $TF_VAR_level)) - # echo "single level CI - ${TF_VAR_level}" - fi - - for level in "${levels[@]}" - do - local -a stacks=($(get_all_stack_names_for_level "$symphony_yaml_file" "$level" )) - - if [ ${#stacks[@]} -eq 0 ]; then - export code="1" - error ${LINENO} "No stacks found, check that level ${level} exist and has stacks defined in ${symphony_yaml_file}" - fi - - for stack in "${stacks[@]}" - do - landing_zone_path=$(get_landingzone_path_for_stack "$symphony_yaml_file" "$level" "$stack") - config_path=$(get_config_path_for_stack "$symphony_yaml_file" "$level" "$stack") - - if [ ! -z "$ci_task_name" ]; then - # run a single task by name - run_task "$ci_task_name" "$level" "$landing_zone_path" "$config_path" - else - # run all tasks - for task in "${REGISTERED_CI_TASKS[@]}" - do - run_task "$task" "$level" "$landing_zone_path" "$config_path" - done - echo " " - fi - done - done - - success "All CI tasks have run successfully." -} - -function clone_repos { - echo @"Cloning repo ${1}" - # TODO: We will start with git clone prior to CI execution. -} \ No newline at end of file diff --git a/scripts/functions.sh b/scripts/functions.sh index eb5c1641e..215749535 100644 --- a/scripts/functions.sh +++ b/scripts/functions.sh @@ -115,7 +115,7 @@ function process_actions { verify_parameters deploy ${TF_VAR_workspace} ;; - ci) + *) register_ci_tasks verify_ci_parameters set_default_parameters @@ -914,7 +914,11 @@ function verify_rover_version { user=$(whoami) if [ "${ROVER_RUNNER}" = false ]; then - required_version=$(cat /tf/caf/.devcontainer/docker-compose.yml | yq | jq -r '.services | first(.[]).image' || true) + if [ -f "/tf/caf/.devcontainer/docker-compose.yml" ]; then + required_version=$(cat /tf/caf/.devcontainer/docker-compose.yml | yq | jq -r '.services | first(.[]).image' || true) + else + required_version=$(sed '/\/\//d' /tf/caf/.devcontainer/devcontainer.json | jq -r .image | awk -F ':' '{print $NF}' || true) + fi running_version=$(cat ${script_path}/version.txt | egrep -o '[^\/]+$') if [ "${required_version}" != "${TF_VAR_rover_version}" ]; then diff --git a/scripts/lib/parse_parameters.sh b/scripts/lib/parse_parameters.sh index 871e828ca..d11e43fa6 100644 --- a/scripts/lib/parse_parameters.sh +++ b/scripts/lib/parse_parameters.sh @@ -64,11 +64,6 @@ parse_parameters() { shift 1 export caf_command="login" ;; - validate | ci) - shift 1 - export caf_command="ci" - export devops="true" - ;; ignite) shift 1 export caf_command="ignite" @@ -110,19 +105,6 @@ parse_parameters() { export caf_command="test" export devops="true" ;; - -sc|--symphony-config) - export symphony_yaml_file=$(parameter_value --symphony-config ${2}) - shift 2 - ;; - -ct|--ci-task-name) - export ci_task_name=$(parameter_value --ci-task-name ${2}) - export symphony_run_all_tasks=false - shift 2 - ;; - -b|--base-dir) - export base_directory=$(parameter_value --base-dir ${2}) - shift 2 - ;; -tfc|--tfc|-remote|--remote) shift 1 export gitops_terraform_backend_type="remote" @@ -291,4 +273,4 @@ parse_parameters() { ;; esac done -} \ No newline at end of file +} diff --git a/scripts/rover.sh b/scripts/rover.sh index 638945afd..ee44fcb3d 100755 --- a/scripts/rover.sh +++ b/scripts/rover.sh @@ -21,10 +21,7 @@ source ${script_path}/tfstate.sh source ${script_path}/walkthrough.sh -# symphony -source ${script_path}/ci.sh -source ${script_path}/cd.sh -source ${script_path}/symphony_yaml.sh +# test runner source ${script_path}/test_runner.sh export ROVER_RUNNER=${ROVER_RUNNER:=false} @@ -38,7 +35,6 @@ export ARM_USE_AZUREAD=${ARM_USE_AZUREAD:="true"} export ARM_STORAGE_USE_AZUREAD=${ARM_STORAGE_USE_AZUREAD:="true"} export ARM_USE_MSAL=${ARM_USE_MSAL:="false"} export skip_permission_check=${skip_permission_check:=false} -export symphony_run_all_tasks=true export debug_mode=${debug_mode:="false"} export devops=${devops:="false"} export log_folder_path=${log_folderpath:=~/.terraform.logs} @@ -122,8 +118,7 @@ if [ "${caf_command}" != "walkthrough" ]; then fi information "Tenant id : '$(echo ${TF_VAR_tenant_id})'" information "CI/CD enabled : '$(echo ${devops})'" - information "Symphony Yaml file path : '$(echo ${symphony_yaml_file})'" - information "Run all tasks : '$(echo ${symphony_run_all_tasks})'" + information "Run all tasks : 'true'" if [ ! -z "$TF_LOG" ]; then information "TF_LOG : '$(echo ${TF_LOG})'" @@ -133,7 +128,7 @@ if [ "${caf_command}" != "walkthrough" ]; then fi fi -if [ $symphony_run_all_tasks == false ]; then +if [ ! -z "$ci_task_name" ]; then information "Running task : '$(echo ${ci_task_name})'" fi information "" diff --git a/scripts/symphony_yaml.sh b/scripts/symphony_yaml.sh deleted file mode 100755 index 971db3676..000000000 --- a/scripts/symphony_yaml.sh +++ /dev/null @@ -1,223 +0,0 @@ -#!/bin/bash - -# helper functions for working with symphony yaml file - -function get_integration_test_path { - local symphony_yaml_file=$1 - - integration_test_path=$(yq -r '.integrationTestsPath' $symphony_yaml_file) - echo "$integration_test_path" -} - - -function get_level { - symphony_yaml_file=$1 - index=$2 - - json=$(yq ".levels[${2}]" $1) - echo $json -} - -function get_level_by_name { - local symphony_yaml_file=$1 - local level=$2 - - json=$(yq -r --arg level $level '.levels[] | select(.level == $level)' $symphony_yaml_file) - echo $json -} - -function get_level_count { - local symphony_yaml_file=$1 - - yq '.levels | length' $symphony_yaml_file -} - - -function get_all_level_names { - local symphony_yaml_file=$1 - - echo $(yq -r -c '.levels[].level' $symphony_yaml_file) -} - -function get_landingzone_path_for_stack { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - relativePath=$(yq -r -c --arg level $level_name --arg stack $stack_name \ - '.levels[] | select(.level == $level) | .stacks[] | select (.stack == $stack) | .landingZonePath' $symphony_yaml_file) - - echo "${base_directory}/${relativePath}" -} - -function get_config_path_for_stack { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - relativePath=$(yq -r -c --arg level $level_name --arg stack $stack_name \ - '.levels[] | select(.level == $level) | .stacks[] | select (.stack == $stack) | .configurationPath' $symphony_yaml_file) - - echo "${base_directory}/${relativePath}" -} - -function get_state_file_name_for_stack { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - stateFileName=$(yq -r -c --arg level $level_name --arg stack $stack_name \ - '.levels[] | select(.level == $level) | .stacks[] | select (.stack == $stack) | .tfState' $symphony_yaml_file) - - echo $stateFileName -} - -function get_all_stack_names_for_level { - local symphony_yaml_file=$1 - level_name=$2 - - echo $(yq -r -c --arg level $level_name '.levels[] | select(.level == $level) | .stacks[].stack' $symphony_yaml_file) -} - -function get_stack_by_name_for_level { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - json=$(yq -r -c --arg level $level_name --arg stack $stack_name \ - '.levels[] | select(.level == $level) | .stacks[] | select (.stack == $stack)' $symphony_yaml_file) - echo $json -} - -function validate_symphony { - information "\n@ starting validation of symphony yaml. path: $symphony_yaml_file" - local symphony_yaml_file=$1 - - local -a levels=($(get_all_level_names "$symphony_yaml_file")) - local -a results=() - - # for each level and each stack within the level - # Validate path exist for lz and config - # For stack config path, check at least 1 .tfvars exist - # For lz config path, check at least 1 .tf file exist - local code=0 - - for level in "${levels[@]}" - do - - local -a stacks=($(get_all_stack_names_for_level "$symphony_yaml_file" "$level" )) - for stack in "${stacks[@]}" - do - - # test landing zone path - test_lz=$(check_landing_zone_path_exists "$symphony_yaml_file" "$level" "$stack") - - if [ $test_lz == 'false' ]; then - code=1 - error_message " - error: Level '${level}' - Stack '$stack' has invalid landing zone path." - fi - - # test configuration path - test_config=$(check_configuration_path_exists "$symphony_yaml_file" "$level" "$stack") - - if [ $test_config == 'false' ]; then - code=1 - error_message " - error: Level '${level}' - Stack '$stack' has invalid configuration folder path." - fi - - # test if tf files exist in landing zone - test_lz_files=$(check_tf_exists "$symphony_yaml_file" "$level" "$stack") - - if [ $test_lz_files == 'false' ]; then - code=1 - error_message " - error: Level '${level}' - Stack '$stack', no .tf files found in landing zone." - fi - - # test if tfvars files exist in configuration directory - test_config_files=$(check_tfvars_exists "$symphony_yaml_file" "$level" "$stack") - - if [ $test_config_files == 'false' ]; then - code=1 - error_message " - error: Level '${level}' - Stack '$stack', no .tfvars files found in configuration folder." - fi - done - done - - if [ "$code" != "0" ]; then - echo "" - error "" "$symphony_yaml_file contains invalid paths." - return 1 - fi - - success " All paths in $symphony_yaml_file are valid. \n" - return 0 - -} - -function check_landing_zone_path_exists { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - landing_zone_path=$(get_landingzone_path_for_stack "$symphony_yaml_file" "$level_name" "$stack_name") - - if [[ ! -d $landing_zone_path ]]; then - # path does not exist - echo false - return - fi - - # path exists - echo true -} - -function check_configuration_path_exists { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - config_path=$(get_config_path_for_stack $symphony_yaml_file $level_name $stack_name) - - if [[ ! -d $config_path ]]; then - # path does not exist - echo false - return - fi - - # path exists - echo true - -} - -function check_tfvars_exists { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - config_path=$(get_config_path_for_stack $symphony_yaml_file $level_name $stack_name) - - local files=(${config_path}*.tfvars) - - if [[ ${#files[@]} -gt 0 ]]; then - echo true - return - fi - - echo false -} -function check_tf_exists { - local symphony_yaml_file=$1 - local level_name=$2 - local stack_name=$3 - - landing_zone_path=$(get_landingzone_path_for_stack "$symphony_yaml_file" "$level_name" "$stack_name") - - local files=(${landing_zone_path}*.tf) - - if [[ ${#files[@]} -gt 0 ]]; then - echo true - return - fi - - echo false -} \ No newline at end of file diff --git a/spec/harness/symphony.yml b/spec/harness/symphony.yml deleted file mode 100644 index 373d74444..000000000 --- a/spec/harness/symphony.yml +++ /dev/null @@ -1,29 +0,0 @@ -environment: prod -integrationTestsPath: /symphony/tests - -repositories: - - name: launchpad_lz - uri: rguthrie-gitlab-ce.eastus.cloudapp.azure.com:reference_app_caf/caf_modules_public.git - branch: master - - name: launchpad_config - uri: rguthrie-gitlab-ce.eastus.cloudapp.azure.com:reference_app_caf/base_config.git - branch: master - - name: solution_lz - uri: rguthrie-gitlab-ce.eastus.cloudapp.azure.com:reference_app_caf/caf_modules_app.git - branch: master - - name: solution_aks_config - uri: rguthrie-gitlab-ce.eastus.cloudapp.azure.com:reference_app_caf/app_config_aks.git - branch: master - - name: argocd_config - uri: rguthrie-gitlab-ce.eastus.cloudapp.azure.com:reference_app_caf/app_config_argocd.git - branch: master - - # All paths are relative -levels: -- level: level0 - type: platform - stacks: - - stack: launchpad - landingZonePath: spec/harness/landingzones/launchpad - configurationPath: spec/harness/configs/level0/launchpad - launchpad: true \ No newline at end of file diff --git a/spec/unit/cd/execute_cd_spec.sh b/spec/unit/cd/execute_cd_spec.sh deleted file mode 100644 index 6b9a47fe3..000000000 --- a/spec/unit/cd/execute_cd_spec.sh +++ /dev/null @@ -1,255 +0,0 @@ -Describe 'cd.sh' - Include scripts/cd.sh - Include scripts/lib/logger.sh - Include scripts/functions.sh - - Describe "execute_cd" - #Function Mocks - - validate_symphony () { - echo "" - } - - escape () { - echo "Escape code: $1" - } - - error() { - # local parent_lineno="$1" - # local message="$2" - # >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - # return ${code} - echo "" - } - - get_config_path_for_stack() { - echo "foo" - } - - get_state_file_name_for_stack() { - echo "bar" - } - - get_integration_test_path() { - echo "integration_test_path" - } - - get_all_level_names() { - echo "level1" - } - - get_all_stack_names_for_level() { - echo "foundations" - } - - get_landingzone_path_for_stack() { - echo "caf_modules_public/landingzones/caf_foundations/" - } - - deploy() { - export deploy_called=true - echo "deploy called with: $1" - } - - set_autorest_environment_variables () { - export set_autorest_environment_variables_called=true - } - - run_integration_tests (){ - export run_integration_tests_called=true - echo "run_integration_tests called with: $1" - } - - Context "cd action == run" - setup() { - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should call deploy and run tests' - When call execute_cd - The output should include '@Starting CD execution' - The variable deploy_called should equal true - The variable set_autorest_environment_variables_called should equal true - The variable run_integration_tests_called should equal true - End - End - - Context "cd action == apply" - setup() { - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="apply" - } - BeforeEach 'setup' - - It 'should call deploy and not run tests' - When call execute_cd - The output should include '@Starting CD execution' - The variable deploy_called should equal true - The variable set_autorest_environment_variables_called should equal false - The variable run_integration_tests_called should equal false - End - End - - Context "cd action == test" - setup() { - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="test" - } - BeforeEach 'setup' - - It 'should run tests and not call deploy' - When call execute_cd - The output should include '@Starting CD execution' - The variable deploy_called should equal false - The variable set_autorest_environment_variables_called should equal true - The variable run_integration_tests_called should equal true - End - End - - Context "cd action == run, workspace=test1workspace" - setup() { - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should call deploy with the specified workspace' - When call execute_cd - The output should include 'deploy called with: test1workspace' - The variable deploy_called should equal true - The variable set_autorest_environment_variables_called should equal true - The variable run_integration_tests_called should equal true - End - End - - Context "cd action == apply, workspace=test1workspace" - setup() { - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="apply" - } - BeforeEach 'setup' - - It 'should call deploy with the specified workspace' - When call execute_cd - The output should include 'deploy called with: test1workspace' - The variable deploy_called should equal true - The variable set_autorest_environment_variables_called should equal false - The variable run_integration_tests_called should equal false - End - End - - Context "cd action == run, workspace=test1workspace" - setup() { - export base_directory="base_dir/" - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="all" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should call run_integration_tests with the correct test path' - When call execute_cd - The output should include 'run_integration_tests called with: base_dir/integration_test_path' - End - End - - Context "level=0" - setup() { - export base_directory="base_dir/" - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="level0" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should set caf_command to launchpad' - When call execute_cd - The variable caf_command should equal "launchpad" - The output should include '@Starting CD execution' - End - End - - - Context "level=1" - setup() { - export base_directory="base_dir/" - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="level1" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should set caf_command to landingzone' - When call execute_cd - The variable caf_command should equal "landingzone" - The output should include '@Starting CD execution' - End - End - - Context "level=1 cd_action=plan" - setup() { - export base_directory="base_dir/" - export TF_VAR_workspace="test1workspace" - export deploy_called=false - export run_integration_tests_called=false - export set_autorest_environment_variables_called=false - - export TF_VAR_level="level1" - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="plan" - } - BeforeEach 'setup' - - It 'should set tf_action to plan' - When call execute_cd - The variable tf_action should equal "plan" - The output should include '@Starting CD execution' - End - End - - End -End \ No newline at end of file diff --git a/spec/unit/cd/join_path_spec.sh b/spec/unit/cd/join_path_spec.sh deleted file mode 100644 index 9ed5a5df9..000000000 --- a/spec/unit/cd/join_path_spec.sh +++ /dev/null @@ -1,41 +0,0 @@ -Describe 'cd.sh' - Include scripts/cd.sh - Include scripts/lib/logger.sh - Include scripts/functions.sh - - Describe "join_path" - - It 'should handle base path with no ending slash' - When call join_path "a/b" "c" - The output should include 'a/b/c' - End - - It 'should handle base path with an ending slash' - When call join_path "a/b/" "c" - The output should include 'a/b/c' - End - - It 'should handle a part with a leading slash and basepath with ending slash' - When call join_path "a/b/" "/c" - The output should include 'a/b/c' - End - - - It 'should handle a part with a leading slash and basepath with no ending slash ' - When call join_path "a/b" "/c" - The output should include 'a/b/c' - End - - It 'should handle a part with no leading slash and basepath with ending slash' - When call join_path "a/b/" "c" - The output should include 'a/b/c' - End - - - It 'should handle a part with no leading slash and basepath with no ending slash ' - When call join_path "a/b" "c" - The output should include 'a/b/c' - End - - End -End \ No newline at end of file diff --git a/spec/unit/cd/set_autorest_environment_variables_spec.sh b/spec/unit/cd/set_autorest_environment_variables_spec.sh deleted file mode 100644 index 5002610c6..000000000 --- a/spec/unit/cd/set_autorest_environment_variables_spec.sh +++ /dev/null @@ -1,33 +0,0 @@ -Describe 'cd.sh' - Include scripts/cd.sh - Include scripts/lib/logger.sh - Include scripts/functions.sh - - Describe "set_autorest_environment_variables" - - Context "AZURE_ENVIRONMENT == AzureCloud" - setup() { - export AZURE_ENVIRONMENT="AzureCloud" - } - BeforeEach 'setup' - - It 'should set AZURE_ENVIRONMENT to AzurePublicCloud' - When call set_autorest_environment_variables - The variable AZURE_ENVIRONMENT should equal "AzurePublicCloud" - End - End - - Context "AZURE_ENVIRONMENT == AzureCloud" - setup() { - export AZURE_ENVIRONMENT="AzureUSGovernment" - } - BeforeEach 'setup' - - It 'should set AZURE_ENVIRONMENT to AzureUSGovernmentCloud' - When call set_autorest_environment_variables - The variable AZURE_ENVIRONMENT should equal "AzureUSGovernmentCloud" - End - End - - End -End \ No newline at end of file diff --git a/spec/unit/cd/verify_cd_parameters_spec.sh b/spec/unit/cd/verify_cd_parameters_spec.sh deleted file mode 100644 index c0eb74b34..000000000 --- a/spec/unit/cd/verify_cd_parameters_spec.sh +++ /dev/null @@ -1,147 +0,0 @@ -Describe 'cd.sh' - Include scripts/cd.sh - Include scripts/lib/logger.sh - Include scripts/functions.sh - - Describe "verify_cd_parameters" - #Function Mocks - - validate_symphony () { - echo "" - } - - escape () { - echo "Escape code: $1" - } - - error() { - # local parent_lineno="$1" - # local message="$2" - # >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - # return ${code} - echo "here*******" - } - - Context "run action & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - } - BeforeEach 'setup' - - It 'should handle known cd run' - When call verify_cd_parameters - The output should include 'Found valid cd action - terraform run' - The status should eq 0 - End - End - - Context "run action & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="apply" - } - BeforeEach 'setup' - - It 'should handle known cd apply' - When call verify_cd_parameters - The output should include 'Found valid cd action - terraform apply' - The status should eq 0 - End - End - - Context "run action & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="plan" - } - BeforeEach 'setup' - - It 'should handle known cd plan' - When call verify_cd_parameters - The output should include 'Found valid cd action - terraform plan' - The status should eq 0 - End - End - - Context "test action & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="test" - } - BeforeEach 'setup' - - It 'should handle known cd test' - When call verify_cd_parameters - The output should include 'Found valid cd action test' - The status should eq 0 - End - End - - Context "rover deploy -h & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="-h" - } - BeforeEach 'setup' - - It 'should show help usage' - When call verify_cd_parameters - The output should include '@Verifying cd parameters' - The error should include 'Usage:' - The error should include 'rover deploy ' - The status should eq 0 - End - End - - Context "rover cd run -h & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="run" - export PARAMS="-h " - } - BeforeEach 'setup' - - It 'should show help usage' - When call verify_cd_parameters - The output should include '@Verifying cd parameters' - The error should include 'Usage:' - The error should include 'rover deploy ' - The status should eq 0 - End - End - - Context "invalid action & valid Symphony Yaml Provided" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export cd_action="bad_action" - } - - BeforeEach 'setup' - - It 'should handle show an error message for invalid cd actions' - When call verify_cd_parameters - The output should include '@Verifying cd parameters' - The error should include 'Invalid cd action bad_action' - The output should include 'Escape code: 1' - End - End - - Context "rover cd only" - setup() { - unset symphony_yaml_file - export cd_action="bad_action" - } - - BeforeEach 'setup' - - It 'show usage if rover cd is called' - When call verify_cd_parameters - The output should include '@Verifying cd parameters' - The error should include 'Invalid cd action bad_action' - The status should eq 1 - End - End - - End -End \ No newline at end of file diff --git a/spec/unit/ci_spec.sh b/spec/unit/ci_spec.sh deleted file mode 100644 index a1f7fe6de..000000000 --- a/spec/unit/ci_spec.sh +++ /dev/null @@ -1,230 +0,0 @@ -Describe 'ci.sh' - Include scripts/ci.sh - Include scripts/functions.sh - - Describe "verify_ci_parameters" - #Function Mocks - error() { - local parent_lineno="$1" - local message="$2" - >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - return ${code} - } - - Context "No Symphony Yaml Provided" - setup() { - unset symphony_yaml_file - } - BeforeEach 'setup' - - It 'should return an error that the path to symphony.yml was not provided' - When call verify_ci_parameters - The output should eq '@Verifying ci parameters' - The error should eq 'Error line:1: message:Missing path to symphony.yml. Please provide a path to the file via -sc or --symphony-config status :1' - The status should eq 1 - End - End - - Context "Symphony Yaml Provided, invalid file" - setup() { - export symphony_yaml_file="spec/harness/symphony2.yml" - export base_directory="." - } - BeforeEach 'setup' - - It 'should return an error if the symphony yaml path points to an invalid or missing file' - When call verify_ci_parameters - The output should eq '@Verifying ci parameters' - The error should eq 'Error line:1: message:Invalid path, spec/harness/symphony2.yml file not found. Please provide a valid path to the file via -sc or --symphony-config status :1' - The status should eq 1 - End - End - - - Context "Symphony Yaml Provided, valid file" - Describe "tasks registered" - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - - # create mock dirs - mkdir -p ./spec/harness/landingzones/launchpad - touch ./spec/harness/landingzones/launchpad/main.tf - - mkdir -p ./spec/harness/configs/level0/launchpad - touch ./spec/harness/configs/level0/launchpad/configuration.tfvars - } - - teardown(){ - rm -rf ./spec/harness/configs - rm -rf ./spec/harness/landingzones - } - - BeforeEach 'setup' - AfterEach 'teardown' - - It 'should return no errors if symphony yaml is valid and ci tasks are registered' - When call verify_ci_parameters - The output should include '@Verifying ci parameters' - The output should include '@ starting validation of symphony yaml. path:' - The error should eq '' - The status should eq 0 - End - End - - Describe "single task execution - success" - validate_symphony() { - echo "" - } - - setup() { - CI_TASK_CONFIG_FILE_LIST=() - REGISTERED_CI_TASKS=() - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - export ci_task_name='task1' - export CI_TASK_DIR='spec/harness/ci_tasks/' - register_ci_tasks - } - - Before 'setup' - - It 'should return no errors if symphony yaml is valid and ci tasks are registered' - When call verify_ci_parameters - The error should include '' - The output should include '@Verifying ci parameters' - The status should eq 0 - End - End - - Describe "single task execution - error" - validate_symphony() { - echo "" - } - - setup() { - CI_TASK_CONFIG_FILE_LIST=() - REGISTERED_CI_TASKS=() - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - export ci_task_name='task' - export CI_TASK_DIR='spec/harness/ci_tasks/' - register_ci_tasks - } - - Before 'setup' - - It 'should return an error if symphony yaml is valid and ci task name is not registered' - When call verify_ci_parameters - The error should include 'task is not a registered ci command!' - The output should include '@Verifying ci parameters' - The status should eq 1 - End - End - End - - End - - Describe "execute_ci_actions" - - Context "Happy Path Validation" - - get_all_level_names() { - echo "level1" - } - - get_all_stack_names_for_level() { - echo "foundations" - } - - get_landingzone_path_for_stack() { - echo "caf_modules_public/landingzones/caf_foundations/" - } - - run_task() { - echo "run_task arguments: $@"; - return 0 - } - - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - export TF_VAR_level='all' - } - - BeforeEach 'setup' - - It 'should return no errors when executing all task using the test symphony yaml.' - When call execute_ci_actions - The output should include "@Starting CI tools execution" - The output should include "All CI tasks have run successfully." - The error should eq '' - The status should eq 0 - End - - End - - End - - Describe "single level test - execute_ci_actions" - - Context "Single Level Test - Invalid Level" - - #Function Mocks - error() { - local parent_lineno="$1" - local message="$2" - >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - return ${code} - } - - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - export TF_VAR_level='level1' - } - - BeforeEach 'setup' - - It 'should return an error when executing because the level is invalid.' - When call execute_ci_actions - The output should include "@Starting CI tools execution" - The error should include 'message:No stacks found, check that level level1 exist and has stacks defined in spec/harness/symphony.yml status :1' - End - - End - - End - - Describe "execute_ci_actions - single level test " - - Context "Single Level Test - Valid Level" - - #Function Mocks - error() { - local parent_lineno="$1" - local message="$2" - >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - return ${code} - } - - setup() { - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - export TF_VAR_level='level0' - } - - BeforeEach 'setup' - - It 'should return no errors when executing all task using the test symphony yaml because the level name is valid.' - When call execute_ci_actions - The output should include "@Starting CI tools execution" - The error should eq '' - The status should eq 0 - End - - End - - End - -End \ No newline at end of file diff --git a/spec/unit/symphony_spec.sh b/spec/unit/symphony_spec.sh deleted file mode 100644 index 8d59e8c1e..000000000 --- a/spec/unit/symphony_spec.sh +++ /dev/null @@ -1,78 +0,0 @@ -Describe 'symphony_yaml.sh' - Include scripts/symphony_yaml.sh - get_landingzone_path_for_stack() { - echo "./temp_lz_dir/" - } - - get_config_path_for_stack() { - echo "./temp_config_dir/" - } - - setup() { - mkdir "./temp_lz_dir/" - mkdir "./temp_config_dir/" - - touch ./temp_lz_dir/main.tf - touch temp_config_dir/configuration.tfvars - - export symphony_yaml_file="spec/harness/symphony.yml" - export base_directory="." - } - - teardown() { - rm -rf ./temp_lz_dir/ - rm -rf ./temp_config_dir/ - } - - Context "check_landing_zone_path_exists" - - Before 'setup' - After 'teardown' - - It 'should return no errors and test that ./temp_lz_dir/ exists.' - When call check_landing_zone_path_exists $symphony_yaml_file 'lvl' 'stack' - The output should include 'true' - The error should eq '' - The status should eq 0 - End - End - - Context "check_configuration_path_exists" - - Before 'setup' - After 'teardown' - - It 'should return no errors and test that ./temp_lz_dir/ exists.' - When call check_configuration_path_exists $symphony_yaml_file 'lvl' 'stack' - The output should include 'true' - The error should eq '' - The status should eq 0 - End - End - - Context "check_tfvars_exists" - - Before 'setup' - After 'teardown' - - It 'should return no errors and test ./spec/harness/configs/level0/launchpad/configuration.tfvars exists.' - When call check_tfvars_exists $symphony_yaml_file 'level0' 'launchpad' - The output should include 'true' - The error should eq '' - The status should eq 0 - End - End - - Context "check_tf_exists" - - Before 'setup' - After 'teardown' - - It 'should return no errors and test ./spec/harness/landingzones/launchpad/main.tf exists.' - When call check_tf_exists $symphony_yaml_file 'level0' 'launchpad' - The output should include 'true' - The error should eq '' - The status should eq 0 - End - End -End \ No newline at end of file diff --git a/spec/unit/task_spec.sh b/spec/unit/task_spec.sh deleted file mode 100644 index 00b06c2d7..000000000 --- a/spec/unit/task_spec.sh +++ /dev/null @@ -1,58 +0,0 @@ -Describe 'task.sh' - Include scripts/task.sh - Describe "get_list_of_task" - #Function Mocks - error() { - local parent_lineno="$1" - local message="$2" - >&2 echo "Error line:${parent_lineno}: message:${message} status :${code}" - return ${code} - } - - Context "Invalid CI Task Dir Provided" - - It 'should return an error that the path to symphony.yml is not provided' - When call get_list_of_task './bogus_ci_dir/' - The error should eq 'Error line:1: message:Invalid CI Directory path, ./bogus_ci_dir/ not found. status :1' - The status should eq 1 - End - End - - Context "Detect 2 tasks" - It 'should return no errors if task.yml files exist in the provided directory path' - When call get_list_of_task 'spec/harness/ci_tasks/' - The output should eq 'spec/harness/ci_tasks/task1.yml spec/harness/ci_tasks/task2.yml spec/harness/ci_tasks/task3.yml' - The status should eq 0 - End - End - End - - Describe "format_task_parameters" - Context "Invalid Json" - - It 'should return an error that the JSON is invalid' - When call format_task_parameters "Invalid Json" - The error should eq 'parse error: Invalid numeric literal at line 1, column 8' - The output should eq '' - End - End - - Context "Valid Json without prefix" - - It 'should return valid parameters' - When call format_task_parameters $(get_task_parameters_json spec/harness/ci_tasks/task1.yml) - The error should eq '' - The output should eq 'config1=value1' - End - End - - Context "Valid Json w/ prefix" - - It 'should return valid parameters with prefix' - When call format_task_parameters $(get_task_parameters_json spec/harness/ci_tasks/task2.yml) - The error should eq '' - The output should eq '--config2=value2' - End - End - End -End \ No newline at end of file