Skip to content

retrigger ci

retrigger ci #4

Workflow file for this run

name: "Delta Flink"
on: [push, pull_request]
# Cancel previous runs when new commits are pushed
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
# Point SBT to our cache directories for consistency
SBT_OPTS: "-Dsbt.coursier.home-dir=/home/runner/.cache/coursier -Dsbt.ivy.home=/home/runner/.ivy2"
jobs:
test:
name: "DF"
runs-on: ubuntu-24.04
steps:
- name: Show runner specs
run: |
echo "=== GitHub Runner Specs ==="
echo "CPU cores: $(nproc)"
echo "CPU info: $(lscpu | grep 'Model name' | cut -d':' -f2 | xargs)"
echo "Total RAM: $(free -h | grep '^Mem:' | awk '{print $2}')"
echo "Available RAM: $(free -h | grep '^Mem:' | awk '{print $7}')"
echo "Disk space: $(df -h / | tail -1 | awk '{print $2 " total, " $4 " available"}')"
echo "Runner OS: ${{ runner.os }}"
echo "Runner arch: ${{ runner.arch }}"
- name: Checkout code
uses: actions/checkout@v4
# Run unit tests with JDK 17. These unit tests depend on Spark, and Spark 4.0+ is JDK 17.
- name: install java
uses: actions/setup-java@v4
with:
distribution: "zulu"
java-version: "17"
- name: Cache SBT and dependencies
id: cache-sbt
uses: actions/cache@v4
with:
path: |
~/.sbt
~/.ivy2/cache
~/.coursier/cache
~/.cache/coursier
key: sbt-flink
- name: Check cache status
run: |
if [ "${{ steps.cache-sbt.outputs.cache-hit }}" == "true" ]; then
echo "✅ Cache HIT - using cached dependencies"
else
echo "❌ Cache MISS - will download dependencies"
fi
- name: Run unit tests
run: |
build/sbt flinkGroup/test