diff --git a/.github/REUSABLE_WORKFLOWS.md b/.github/REUSABLE_WORKFLOWS.md index 7faf28d3..82317010 100644 --- a/.github/REUSABLE_WORKFLOWS.md +++ b/.github/REUSABLE_WORKFLOWS.md @@ -5,7 +5,7 @@ The workflows in `Framework-R-D/phlex/` may be invoked as follows: 1. Automatically as part of CI checks on a PR submitted to `Framework-R-D/phlex`, at PR creation time and thereafter on pushes to the PR branch. This should work whether your PR branch is situated in the primary repository or a fork. -1. Via triggering comments on the PR (`@phlexbot `). +1. Via triggering comments on the PR (`@${{ github.event.repository.name }}bot `). 1. Via the "actions" tab on the project's GitHub web page. Additionally, you can configure your own fork of Phlex to run CI checks on local PRs, and on its default branch, following the instructions below. @@ -34,7 +34,7 @@ However, to enable the automatic fixing features (e.g., for `cmake-format-fix` o 1. **Enable Workflows:** By default, GitHub Actions are disabled on forks. You must manually enable them by going to the `Actions` tab of your forked repository and clicking the "I understand my workflows, go ahead and enable them" button. 1. **Create the `WORKFLOW_PAT` Secret:** The auto-fix workflows require a Personal Access Token (PAT) with write permissions to commit changes back to your PR branch. Follow the instructions below to create a PAT and add it as a secret named `WORKFLOW_PAT` **to your forked repository's settings**. -Once you have done this, you can trigger the auto-fix workflows by commenting on a pull request in your fork (e.g., `@phlexbot format`). +Once you have done this, you can trigger the auto-fix workflows by commenting on a pull request in your fork (e.g., `@${{ github.event.repository.name }}bot format`). ### Creating a Personal Access Token (PAT) @@ -56,7 +56,7 @@ To use a workflow, you call it from a workflow file in your own repository's `.g ```yaml jobs: some_job: - uses: Framework-R-D/phlex/.github/workflows/.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/.yaml@ with: # ... inputs for the workflow ... secrets: @@ -69,6 +69,25 @@ You should follow the instructions in the previous section to create the `WORKFL For development purposes, you may choose to use `@main` at your own risk to get the latest changes. +#### Emulating Trigger Types and Relevance Checks + +When calling a reusable workflow, it's often desirable to emulate the behavior of the calling workflow's trigger. For example, if your workflow is triggered by a manual `workflow_dispatch`, you likely want the reusable workflow to skip its relevance detection and check all files. Conversely, if triggered by a `pull_request`, you want detection enabled. + +You can achieve this by passing the appropriate value to the `skip-relevance-check` input: + +```yaml + with: + skip-relevance-check: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'issue_comment' }} +``` + +Additionally, to ensure the reusable workflow can access the correct code in an extra-repository context, always pass the `ref` and `repo`: + +```yaml + with: + ref: ${{ github.event.pull_request.head.sha || github.sha }} + repo: ${{ github.repository }} +``` + --- ## Available Workflows and Their Inputs @@ -82,7 +101,7 @@ Builds and tests your project using CMake. ```yaml jobs: build_and_test: - uses: Framework-R-D/phlex/.github/workflows/cmake-build.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/cmake-build.yaml@ with: # Optional: A list of build combinations to run (e.g., "gcc/asan clang/tsan") build-combinations: 'all -clang/valgrind' @@ -111,7 +130,7 @@ Checks CMake files for formatting issues using `gersemi`. ```yaml jobs: check_cmake_format: - uses: Framework-R-D/phlex/.github/workflows/cmake-format-check.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/cmake-format-check.yaml@ ``` #### All Inputs @@ -140,7 +159,7 @@ jobs: github.event.issue.pull_request && (github.event.comment.author_association == 'COLLABORATOR' || github.event.comment.author_association == 'OWNER') && startsWith(github.event.comment.body, format('@{0}bot format', github.event.repository.name)) - uses: Framework-R-D/phlex/.github/workflows/cmake-format-fix.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/cmake-format-fix.yaml@ with: # The ref and repo of the PR need to be retrieved and passed ref: ${{ steps.get_pr_info.outputs.ref }} @@ -166,7 +185,7 @@ Checks Python code for formatting and type errors using `ruff` and `mypy`. ```yaml jobs: check_python: - uses: Framework-R-D/phlex/.github/workflows/python-check.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/python-check.yaml@ ``` #### All Inputs @@ -195,7 +214,7 @@ jobs: github.event.issue.pull_request && (github.event.comment.author_association == 'COLLABORATOR' || github.event.comment.author_association == 'OWNER') && startsWith(github.event.comment.body, format('@{0}bot python-fix', github.event.repository.name)) - uses: Framework-R-D/phlex/.github/workflows/python-fix.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/python-fix.yaml@ with: # The ref and repo of the PR need to be retrieved and passed ref: ${{ steps.get_pr_info.outputs.ref }} @@ -219,7 +238,7 @@ Checks Markdown files for formatting issues using `markdownlint`. ```yaml jobs: check_markdown: - uses: Framework-R-D/phlex/.github/workflows/markdown-check.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/markdown-check.yaml@ ``` #### All Inputs @@ -253,7 +272,7 @@ jobs: startsWith(github.event.comment.body, format('@{0}bot format', github.event.repository.name)) || startsWith(github.event.comment.body, format('@{0}bot markdown-fix', github.event.repository.name)) ) - uses: Framework-R-D/phlex/.github/workflows/markdown-fix.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/markdown-fix.yaml@ with: # The ref and repo of the PR need to be retrieved and passed ref: ${{ steps.get_pr_info.outputs.ref }} @@ -277,7 +296,7 @@ Checks GitHub Actions workflow files for errors and best practices using `action ```yaml jobs: check_actions: - uses: Framework-R-D/phlex/.github/workflows/actionlint-check.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/actionlint-check.yaml@ ``` #### All Inputs @@ -296,7 +315,7 @@ Performs static analysis on the codebase using GitHub CodeQL to identify potenti ```yaml jobs: analyze: - uses: Framework-R-D/phlex/.github/workflows/codeql-analysis.yaml@cef968c52aab432b836bb28119a9661c82c8b0d1 + uses: Framework-R-D/phlex/.github/workflows/codeql-analysis.yaml@ ``` #### All Inputs @@ -308,6 +327,44 @@ jobs: - `pr-head-repo` (string, optional): The full name of the PR head repository. - `pr-base-repo` (string, optional): The full name of the PR base repository. +### 5. `jsonnet-format-check.yaml` + +Checks Jsonnet files for formatting issues using `jsonnetfmt`. + +#### Usage Example + +```yaml +jobs: + check_jsonnet: + uses: Framework-R-D/phlex/.github/workflows/jsonnet-format-check.yaml@ + with: + # Optional: bypass detection and check all files (useful for manual triggers) + skip-relevance-check: ${{ github.event_name == 'workflow_dispatch' }} +``` + +#### All Inputs + +- `checkout-path` (string, optional): Path to check out code to. +- `skip-relevance-check` (boolean, optional, default: `false`): Bypass the check that only runs if Jsonnet files have changed. +- `ref` (string, optional): The branch or ref to check out. +- `repo` (string, optional): The repository to check out from. +- `pr-base-sha` (string, optional): Base SHA of the PR for relevance check. +- `pr-head-sha` (string, optional): Head SHA of the PR for relevance check. + +### 6. `jsonnet-format-fix.yaml` + +Automatically formats Jsonnet files using `jsonnetfmt` and commits the changes. Typically triggered by an `issue_comment`. + +#### Usage Example + +*Similar to `cmake-format-fix.yaml`, but triggered by a command like `@bot jsonnet-format-fix`.* + +#### All Inputs + +- `checkout-path` (string, optional): Path to check out code to. +- `ref` (string, **required**): The branch or ref to check out. +- `repo` (string, **required**): The repository to check out from. + ### Other Workflows -The repository also provides `clang-format-check.yaml`, `clang-format-fix.yaml`, `clang-tidy-check.yaml`, and `clang-tidy-fix.yaml`, which can be used in a similar manner. +The repository also provides `clang-format-check.yaml`, `clang-format-fix.yaml`, `clang-tidy-check.yaml`, and `clang-tidy-fix.yaml`. However, these workflows are currently **not** available for reuse via `workflow_call` as they are specifically intended for use on this repository and its forks. diff --git a/.github/actions/detect-relevant-changes/action.yaml b/.github/actions/detect-relevant-changes/action.yaml index 07ab8dbf..30968874 100644 --- a/.github/actions/detect-relevant-changes/action.yaml +++ b/.github/actions/detect-relevant-changes/action.yaml @@ -54,7 +54,11 @@ runs: DEFAULT_TYPE_PATTERNS[cmake]=$'CMakeLists.txt\n*.cmake' DEFAULT_TYPE_PATTERNS[python]=$'*.py' DEFAULT_TYPE_PATTERNS[md]=$'*.md' - + DEFAULT_TYPE_PATTERNS[jsonnet]=$'*.jsonnet\n*.libsonnet' + # Types that do not support .in variants for relevance detection + declare -A NO_IN_VARIANT_TYPES + NO_IN_VARIANT_TYPES[jsonnet]="1" + parse_list() { local input="$1" printf '%s' "$input" | tr ',' '\n' | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' -e '/^$/d' @@ -134,7 +138,11 @@ runs: while IFS= read -r pattern; do pattern=${pattern#./} [ -z "$pattern" ] && continue - add_pattern_variant "$pattern" + if [ -n "${NO_IN_VARIANT_TYPES[$type]:-}" ]; then + PATTERN_SET["$pattern"]=1 + else + add_pattern_variant "$pattern" + fi done <<< "$patterns" done fi diff --git a/.github/workflows/jsonnet-format-check.yaml b/.github/workflows/jsonnet-format-check.yaml new file mode 100644 index 00000000..5319ee03 --- /dev/null +++ b/.github/workflows/jsonnet-format-check.yaml @@ -0,0 +1,166 @@ +name: Jsonnet Format Check +run-name: "${{ github.actor }} checking jsonnet format" + +permissions: + contents: read + pull-requests: read + +on: + pull_request: + branches: [ main, develop ] + workflow_dispatch: + inputs: + ref: + description: "The branch, ref, or SHA to checkout. Defaults to the repository's default branch." + required: false + type: string + workflow_call: + inputs: + checkout-path: + description: "Path to check out code to" + required: false + type: string + skip-relevance-check: + description: "Bypass relevance check" + required: false + type: boolean + default: false + ref: + description: "The branch, ref, or SHA to checkout" + required: false + type: string + repo: + description: "The repository to checkout from" + required: false + type: string + pr-base-sha: + description: "Base SHA of the PR for relevance check" + required: false + type: string + pr-head-sha: + description: "Head SHA of the PR for relevance check" + required: false + type: string + +env: + local_checkout_path: ${{ (github.event_name == 'workflow_call' && inputs.checkout-path) || format('{0}-src', github.event.repository.name) }} + +jobs: + pre-check: + runs-on: ubuntu-latest + outputs: + is_act: ${{ steps.detect_act.outputs.is_act }} + ref: ${{ (github.event_name == 'workflow_call' && inputs.ref) || (github.event_name == 'workflow_dispatch' && (github.event.inputs.ref || github.ref)) || github.sha }} + repo: ${{ (github.event_name == 'workflow_call' && inputs.repo) || github.repository }} + steps: + - name: Detect act environment + id: detect_act + uses: Framework-R-D/phlex/.github/actions/detect-act-env@main + + detect-changes: + needs: pre-check + if: > + needs.pre-check.outputs.is_act != 'true' && + ( + github.event_name == 'pull_request' || + github.event_name == 'push' || + ( + github.event_name == 'workflow_call' && + inputs.skip-relevance-check != 'true' && + github.event.inputs == null && + github.event.comment == null + ) + ) + runs-on: ubuntu-latest + permissions: + contents: read + packages: read + outputs: + has_changes: ${{ steps.filter.outputs.matched }} + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + path: ${{ env.local_checkout_path }} + ref: ${{ needs.pre-check.outputs.ref }} + repository: ${{ needs.pre-check.outputs.repo }} + + - name: Detect Jsonnet formatting changes + id: filter + uses: Framework-R-D/phlex/.github/actions/detect-relevant-changes@main + with: + repo-path: ${{ env.local_checkout_path }} + base-ref: ${{ (github.event_name == 'workflow_call' && inputs.pr-base-sha) || github.event.pull_request.base.sha || github.event.before }} + head-ref: ${{ (github.event_name == 'workflow_call' && inputs.pr-head-sha) || github.event.pull_request.head.sha || github.sha }} + file-type: jsonnet + + - name: Report detection outcome + run: | + if [ "${{ steps.filter.outputs.matched }}" != "true" ]; then + echo "::notice::No Jsonnet-related changes detected; formatting check will be skipped." + else + echo "::group::Jsonnet-related files" + printf '%s\n' "${{ steps.filter.outputs.matched_files }}" + echo "::endgroup::" + fi + + jsonnet-format-check: + needs: [pre-check, detect-changes] + if: > + needs.detect-changes.result == 'skipped' || + ( + needs.detect-changes.result == 'success' && + needs.detect-changes.outputs.has_changes == 'true' + ) + runs-on: ubuntu-latest + container: + image: public.ecr.aws/bitnami/jsonnet:latest + + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ needs.pre-check.outputs.ref }} + repository: ${{ needs.pre-check.outputs.repo }} + path: ${{ env.local_checkout_path }} + + - name: Check Jsonnet formatting + id: lint + working-directory: ${{ env.local_checkout_path }} + run: | + find . \( -name "*.jsonnet" -o -name "*.libsonnet" \) -print0 | xargs -0 -r -I {} \ + bash -c 'jsonnetfmt --test "{}" || (echo "FAILED: {}" && diff -u <(jsonnetfmt "{}") "{}" && exit 1)' + continue-on-error: true + + - name: Evaluate Jsonnet formatting result + if: always() + run: | + if [ "${{ steps.lint.outcome }}" = 'success' ]; then + echo "✅ Jsonnet formatting check passed." + else + echo "::error::Jsonnet formatting issues found. Please review the output above for details." + echo "::error::Run 'jsonnetfmt -i ' locally or comment '@${{ github.event.repository.name }}bot format' on the PR to auto-fix." + exit 1 + fi + + jsonnet-format-check-skipped: + needs: [pre-check, detect-changes] + if: > + needs.pre-check.outputs.is_act != 'true' && + ( + github.event_name == 'pull_request' || + github.event_name == 'push' || + ( + github.event_name == 'workflow_call' && + inputs.skip-relevance-check != 'true' && + github.event.inputs == null && + github.event.comment == null + ) + ) && + (needs.detect-changes.result == 'success' && needs.detect-changes.outputs.has_changes != 'true') + runs-on: ubuntu-latest + + steps: + - name: No relevant Jsonnet changes detected + run: echo "::notice::No Jsonnet-related changes detected; jsonnet-format check skipped." diff --git a/.github/workflows/jsonnet-format-fix.yaml b/.github/workflows/jsonnet-format-fix.yaml new file mode 100644 index 00000000..e4fb3a05 --- /dev/null +++ b/.github/workflows/jsonnet-format-fix.yaml @@ -0,0 +1,94 @@ +name: Jsonnet Format Fix +run-name: "${{ github.actor }} fixing jsonnet format" + +on: + issue_comment: + types: + - created + workflow_dispatch: + inputs: + ref: + description: "The branch, ref, or SHA to checkout. Defaults to the repository's default branch." + required: false + type: string + workflow_call: + inputs: + checkout-path: + description: "Path to check out code to" + required: false + type: string + ref: + description: "The branch or ref to checkout" + required: true + type: string + repo: + description: "The repository to checkout from" + required: true + type: string + +permissions: + pull-requests: write + contents: write + +env: + local_checkout_path: ${{ (github.event_name == 'workflow_call' && inputs.checkout-path) || format('{0}-src', github.event.repository.name) }} + +jobs: + pre-check: + runs-on: ubuntu-latest + name: Parse command + if: > + github.event_name == 'workflow_dispatch' || + github.event_name == 'workflow_call' || + ( + github.event_name == 'issue_comment' && + github.event.issue.pull_request && + contains(fromJSON('["OWNER", "COLLABORATOR"]'), github.event.comment.author_association) && + ( + startsWith(github.event.comment.body, format('@{0}bot format', github.event.repository.name)) || + startsWith(github.event.comment.body, format('@{0}bot jsonnet-format-fix', github.event.repository.name)) + ) + ) + outputs: + ref: ${{ (github.event_name == 'workflow_call' && inputs.ref) || (github.event_name == 'workflow_dispatch' && (github.event.inputs.ref || github.ref)) || steps.get_pr.outputs.ref }} + repo: ${{ (github.event_name == 'workflow_call' && inputs.repo) || (github.event_name == 'workflow_dispatch' && github.repository) || steps.get_pr.outputs.repo }} + + steps: + - name: Get PR Info + if: github.event_name == 'issue_comment' + id: get_pr + uses: Framework-R-D/phlex/.github/actions/get-pr-info@main + + apply_jsonnet_formatting: + runs-on: ubuntu-latest + name: Apply Jsonnet formatting + needs: pre-check + if: needs.pre-check.result == 'success' + container: + image: public.ecr.aws/bitnami/jsonnet:latest + options: --user root + + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + path: ${{ env.local_checkout_path }} + ref: ${{ needs.pre-check.outputs.ref }} + repository: ${{ needs.pre-check.outputs.repo }} + token: ${{ secrets.WORKFLOW_PAT }} + + - name: Apply Jsonnet formatting + id: lint + working-directory: ${{ env.local_checkout_path }} + run: | + find . \( -name "*.jsonnet" -o -name "*.libsonnet" \) -print0 | xargs -0 -r jsonnetfmt -i + continue-on-error: true + + - name: Handle fix commit + uses: Framework-R-D/phlex/.github/actions/handle-fix-commit@main + with: + tool: jsonnet-format + working-directory: ${{ env.local_checkout_path }} + token: ${{ secrets.WORKFLOW_PAT }} + pr-info-ref: ${{ needs.pre-check.outputs.ref }} + pr-info-repo: ${{ needs.pre-check.outputs.repo }} diff --git a/CLANG_TIDY_CONFIGURATION.md b/CLANG_TIDY_CONFIGURATION.md index e2a589f9..8f31028b 100644 --- a/CLANG_TIDY_CONFIGURATION.md +++ b/CLANG_TIDY_CONFIGURATION.md @@ -198,7 +198,7 @@ These targets: The clang-tidy workflows complement the existing formatting workflows: -- **Format checks:** `@phlexbot format` - Fixes C++, CMake, and Markdown formatting +- **Format checks:** `@phlexbot format` - Fixes C++, CMake, Jsonnet, and Markdown formatting - **Tidy checks:** `@phlexbot tidy-fix` - Fixes Core Guidelines violations Both can be run independently or together as needed. diff --git a/test/benchmarks/benchmark-01.jsonnet b/test/benchmarks/benchmark-01.jsonnet index 3859967d..4f816e94 100644 --- a/test/benchmarks/benchmark-01.jsonnet +++ b/test/benchmarks/benchmark-01.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a_creator: { diff --git a/test/benchmarks/benchmark-02.jsonnet b/test/benchmarks/benchmark-02.jsonnet index 968db98d..5b119219 100644 --- a/test/benchmarks/benchmark-02.jsonnet +++ b/test/benchmarks/benchmark-02.jsonnet @@ -2,22 +2,22 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a1_creator: { cpp: 'last_index', - product_name: "a1" + product_name: 'a1', }, a2_creator: { cpp: 'last_index', - product_name: "a2" + product_name: 'a2', }, }, } diff --git a/test/benchmarks/benchmark-03.jsonnet b/test/benchmarks/benchmark-03.jsonnet index bba3585c..a66d7f6d 100644 --- a/test/benchmarks/benchmark-03.jsonnet +++ b/test/benchmarks/benchmark-03.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { read_id: { diff --git a/test/benchmarks/benchmark-04.jsonnet b/test/benchmarks/benchmark-04.jsonnet index 5d5a3702..304aed17 100644 --- a/test/benchmarks/benchmark-04.jsonnet +++ b/test/benchmarks/benchmark-04.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a_creator: { @@ -16,7 +16,7 @@ }, read_index: { cpp: 'read_index', - consumes: { product: 'a', layer: "event" } + consumes: { product: 'a', layer: 'event' }, }, }, } diff --git a/test/benchmarks/benchmark-05.jsonnet b/test/benchmarks/benchmark-05.jsonnet index 495995e1..e4762965 100644 --- a/test/benchmarks/benchmark-05.jsonnet +++ b/test/benchmarks/benchmark-05.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { b_creator: { @@ -21,7 +21,7 @@ }, d: { cpp: 'verify_difference', - expected: 0 + expected: 0, }, }, } diff --git a/test/benchmarks/benchmark-06.jsonnet b/test/benchmarks/benchmark-06.jsonnet index ee6c9a50..cf310509 100644 --- a/test/benchmarks/benchmark-06.jsonnet +++ b/test/benchmarks/benchmark-06.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a_creator: { diff --git a/test/benchmarks/benchmark-07.jsonnet b/test/benchmarks/benchmark-07.jsonnet index 3b6eef46..183e6c87 100644 --- a/test/benchmarks/benchmark-07.jsonnet +++ b/test/benchmarks/benchmark-07.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { even_filter: { @@ -27,7 +27,7 @@ }, d: { cpp: 'verify_difference', - expected: 0 + expected: 0, }, }, } diff --git a/test/benchmarks/benchmark-08.jsonnet b/test/benchmarks/benchmark-08.jsonnet index a3f30291..17784167 100644 --- a/test/benchmarks/benchmark-08.jsonnet +++ b/test/benchmarks/benchmark-08.jsonnet @@ -4,13 +4,13 @@ local max_number = 100000; driver: { cpp: 'generate_layers', layers: { - event: { total: max_number } - } + event: { total: max_number }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a_creator: { @@ -19,17 +19,17 @@ local max_number = 100000; }, even_filter: { cpp: 'accept_even_numbers', - consumes: { product: 'a', layer: 'event' } + consumes: { product: 'a', layer: 'event' }, }, fibonacci_filter: { cpp: 'accept_fibonacci_numbers', - consumes: { product: 'a', layer: "event" }, + consumes: { product: 'a', layer: 'event' }, max_number: max_number, }, d: { cpp: 'verify_even_fibonacci_numbers', experimental_when: ['even_filter:accept_even_numbers', 'fibonacci_filter:accept'], - consumes: { product: 'a', layer: "event" }, + consumes: { product: 'a', layer: 'event' }, max_number: max_number, }, }, diff --git a/test/benchmarks/benchmark-09.jsonnet b/test/benchmarks/benchmark-09.jsonnet index b70f69cb..1a109684 100644 --- a/test/benchmarks/benchmark-09.jsonnet +++ b/test/benchmarks/benchmark-09.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 100000 } - } + event: { total: 100000 }, + }, }, sources: { provider: { - cpp: 'benchmarks_provider' - } + cpp: 'benchmarks_provider', + }, }, modules: { a_creator: { @@ -19,12 +19,12 @@ }, even_filter: { cpp: 'accept_even_numbers', - consumes: { product: 'a', layer: "event" } + consumes: { product: 'a', layer: 'event' }, }, d: { cpp: 'read_index', experimental_when: ['even_filter:accept_even_numbers'], - consumes: { product: 'b', layer: "event" } + consumes: { product: 'b', layer: 'event' }, }, }, } diff --git a/test/form/form_test.jsonnet b/test/form/form_test.jsonnet index 9527a1d5..3b144763 100644 --- a/test/form/form_test.jsonnet +++ b/test/form/form_test.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { total: 10 } - } + event: { total: 10 }, + }, }, sources: { provider: { - cpp: 'ij_source' - } + cpp: 'ij_source', + }, }, modules: { add: { @@ -16,7 +16,7 @@ }, form_output: { cpp: 'form_module', - products: ["sum"], + products: ['sum'], }, }, } diff --git a/test/max-parallelism/check_parallelism_cli.jsonnet b/test/max-parallelism/check_parallelism_cli.jsonnet index 071aee35..001af1e9 100644 --- a/test/max-parallelism/check_parallelism_cli.jsonnet +++ b/test/max-parallelism/check_parallelism_cli.jsonnet @@ -1,4 +1,4 @@ -local base = import "check_parallelism_default.jsonnet"; +local base = import 'check_parallelism_default.jsonnet'; base { modules+: { diff --git a/test/max-parallelism/check_parallelism_cli_over_config.jsonnet b/test/max-parallelism/check_parallelism_cli_over_config.jsonnet index f052adc4..b99899e8 100644 --- a/test/max-parallelism/check_parallelism_cli_over_config.jsonnet +++ b/test/max-parallelism/check_parallelism_cli_over_config.jsonnet @@ -1,4 +1,4 @@ -local base = import "check_parallelism_default.jsonnet"; +local base = import 'check_parallelism_default.jsonnet'; base { max_concurrency: 7, diff --git a/test/max-parallelism/check_parallelism_config.jsonnet b/test/max-parallelism/check_parallelism_config.jsonnet index a52e9940..a13b4232 100644 --- a/test/max-parallelism/check_parallelism_config.jsonnet +++ b/test/max-parallelism/check_parallelism_config.jsonnet @@ -1,4 +1,4 @@ -local base = import "check_parallelism_default.jsonnet"; +local base = import 'check_parallelism_default.jsonnet'; base { local concurrency = 7, diff --git a/test/max-parallelism/check_parallelism_default.jsonnet.in b/test/max-parallelism/check_parallelism_default.jsonnet.in index 1abf6ff7..f77c7eda 100644 --- a/test/max-parallelism/check_parallelism_default.jsonnet.in +++ b/test/max-parallelism/check_parallelism_default.jsonnet.in @@ -5,12 +5,12 @@ sources: { parallelism_provider: { cpp: 'provide_parallelism', - } + }, }, modules: { verify: { cpp: 'check_parallelism', - expected_parallelism: @NPROC@, + expected_parallelism: '@NPROC@', }, }, } diff --git a/test/mock-workflow/G4Stage1.libsonnet b/test/mock-workflow/G4Stage1.libsonnet index df22de14..4c8f25f4 100644 --- a/test/mock-workflow/G4Stage1.libsonnet +++ b/test/mock-workflow/G4Stage1.libsonnet @@ -1,11 +1,11 @@ -local ev = import "event_product.libsonnet"; local generators = import 'SinglesGen.libsonnet'; +local ev = import 'event_product.libsonnet'; { largeant: { cpp: 'largeant', - duration_usec: 156, # Typical: 15662051 - inputs: [ev.event_product(f + "/MCTruths") for f in std.objectFields(generators)], - outputs: ["ParticleAncestryMap", "Assns", "SimEnergyDeposits", "AuxDetHits", "MCParticles"], - } + duration_usec: 156, // Typical: 15662051 + inputs: [ev.event_product(f + '/MCTruths') for f in std.objectFields(generators)], + outputs: ['ParticleAncestryMap', 'Assns', 'SimEnergyDeposits', 'AuxDetHits', 'MCParticles'], + }, } diff --git a/test/mock-workflow/G4Stage2.libsonnet b/test/mock-workflow/G4Stage2.libsonnet index 535e90a7..cae58242 100644 --- a/test/mock-workflow/G4Stage2.libsonnet +++ b/test/mock-workflow/G4Stage2.libsonnet @@ -1,17 +1,17 @@ -local ev = import "event_product.libsonnet"; local g4stage1 = import 'G4Stage1.libsonnet'; +local ev = import 'event_product.libsonnet'; { IonAndScint: { cpp: 'ion_and_scint', - duration_usec: 546, # Typical: 5457973 - inputs: [ev.event_product(f + "/SimEnergyDeposits") for f in std.objectFields(g4stage1)], - outputs: ["SimEnergyDeposits", "SimEnergyDeposits_priorSCE"], + duration_usec: 546, // Typical: 5457973 + inputs: [ev.event_product(f + '/SimEnergyDeposits') for f in std.objectFields(g4stage1)], + outputs: ['SimEnergyDeposits', 'SimEnergyDeposits_priorSCE'], }, PDFastSim: { cpp: 'pd_fast_sim', - duration_usec: 69, # Typical: 69681950 + duration_usec: 69, // Typical: 69681950 inputs: [ev.event_product('SimEnergyDeposits_priorSCE')], outputs: ['SimPhotonLites', 'OpDetBacktrackerRecords'], - } + }, } diff --git a/test/mock-workflow/SinglesGen.libsonnet b/test/mock-workflow/SinglesGen.libsonnet index de065307..1fc50518 100644 --- a/test/mock-workflow/SinglesGen.libsonnet +++ b/test/mock-workflow/SinglesGen.libsonnet @@ -2,39 +2,39 @@ local ev = import 'event_product.libsonnet'; { rn222: { - cpp: "MC_truth_algorithm", + cpp: 'MC_truth_algorithm', duration_usec: 39, - inputs: [ev.event_product("id")], - outputs: ["MCTruths"] + inputs: [ev.event_product('id')], + outputs: ['MCTruths'], }, ar39: { - cpp: "MC_truth_algorithm", + cpp: 'MC_truth_algorithm', duration_usec: 12410, - inputs: [ev.event_product("id")], - outputs: ["MCTruths"] + inputs: [ev.event_product('id')], + outputs: ['MCTruths'], }, cosmicgenerator: { - cpp: "MC_truth_algorithm", - duration_usec: 492, # Typical: 4926215 - inputs: [ev.event_product("id")], - outputs: ["MCTruths"] + cpp: 'MC_truth_algorithm', + duration_usec: 492, // Typical: 4926215 + inputs: [ev.event_product('id')], + outputs: ['MCTruths'], }, kr85: { - cpp: "MC_truth_algorithm", + cpp: 'MC_truth_algorithm', duration_usec: 1643, - inputs: [ev.event_product("id")], - outputs: ["MCTruths"] + inputs: [ev.event_product('id')], + outputs: ['MCTruths'], }, generator: { - cpp: "three_tuple_algorithm", + cpp: 'three_tuple_algorithm', duration_usec: 69616, - inputs: [ev.event_product("id")], - outputs: ["MCTruths", "BeamEvents", "beamsims"] + inputs: [ev.event_product('id')], + outputs: ['MCTruths', 'BeamEvents', 'beamsims'], }, ar42: { - cpp: "MC_truth_algorithm", + cpp: 'MC_truth_algorithm', duration_usec: 148, - inputs: [ev.event_product("id")], - outputs: ["MCTruths"] + inputs: [ev.event_product('id')], + outputs: ['MCTruths'], }, } diff --git a/test/mock-workflow/event_product.libsonnet b/test/mock-workflow/event_product.libsonnet index a3f0cfc4..ee8edf83 100644 --- a/test/mock-workflow/event_product.libsonnet +++ b/test/mock-workflow/event_product.libsonnet @@ -2,6 +2,6 @@ event_product(product):: { product: product, - layer: "event" - } + layer: 'event', + }, } diff --git a/test/mock-workflow/mock-workflow.jsonnet b/test/mock-workflow/mock-workflow.jsonnet index c4c15ada..9a241a9e 100644 --- a/test/mock-workflow/mock-workflow.jsonnet +++ b/test/mock-workflow/mock-workflow.jsonnet @@ -1,18 +1,18 @@ -local singlesgen = import 'SinglesGen.libsonnet'; local g4stage1 = import 'G4Stage1.libsonnet'; local g4stage2 = import 'G4Stage2.libsonnet'; +local singlesgen = import 'SinglesGen.libsonnet'; { driver: { cpp: 'generate_layers', layers: { - event: { total: 1 } - } + event: { total: 1 }, + }, }, sources: { provider: { - cpp: 'id_provider' - } + cpp: 'id_provider', + }, }, modules: singlesgen + g4stage1 + g4stage2, } diff --git a/test/plugins/add.jsonnet b/test/plugins/add.jsonnet index 08c3637e..b13fb310 100644 --- a/test/plugins/add.jsonnet +++ b/test/plugins/add.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: "job", total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { - cpp: 'ij_source' - } + cpp: 'ij_source', + }, }, modules: { add: { diff --git a/test/python/pyadd.jsonnet b/test/python/pyadd.jsonnet index c54c8eab..ec08d271 100644 --- a/test/python/pyadd.jsonnet +++ b/test/python/pyadd.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pyadd: { diff --git a/test/python/pyconfig.jsonnet b/test/python/pyconfig.jsonnet index 2effc8c4..e2debf32 100644 --- a/test/python/pyconfig.jsonnet +++ b/test/python/pyconfig.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pyconfig: { diff --git a/test/python/pyfailure.jsonnet b/test/python/pyfailure.jsonnet index 63b6c608..9b849f4f 100644 --- a/test/python/pyfailure.jsonnet +++ b/test/python/pyfailure.jsonnet @@ -2,18 +2,18 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pyadd: { py: 'adder', - #input: ['i', 'j'], # commented out to cause a failure + //input: ['i', 'j'], # commented out to cause a failure output: ['sum'], }, }, diff --git a/test/python/pyreduce.jsonnet b/test/python/pyreduce.jsonnet index af5c8778..ddabf987 100644 --- a/test/python/pyreduce.jsonnet +++ b/test/python/pyreduce.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pyreduce: { diff --git a/test/python/pysyspath.jsonnet.in b/test/python/pysyspath.jsonnet.in index 4330b780..22c0c13e 100644 --- a/test/python/pysyspath.jsonnet.in +++ b/test/python/pysyspath.jsonnet.in @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pysyspath: { diff --git a/test/python/pyvec.jsonnet b/test/python/pyvec.jsonnet index 29e3b9a1..ffc775e5 100644 --- a/test/python/pyvec.jsonnet +++ b/test/python/pyvec.jsonnet @@ -2,13 +2,13 @@ driver: { cpp: 'generate_layers', layers: { - event: { parent: 'job', total: 10, starting_number: 1 } - } + event: { parent: 'job', total: 10, starting_number: 1 }, + }, }, sources: { provider: { cpp: 'cppsource4py', - } + }, }, modules: { pysum: {