diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index 74747d3fe15..f64a14137d4 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -1,3 +1,3 @@
# From https://github.com/microsoft/vscode-dev-containers/blob/master/containers/go/.devcontainer/Dockerfile
-ARG VARIANT="17-jdk-bookworm"
+ARG VARIANT="21-jdk-bookworm"
FROM mcr.microsoft.com/vscode/devcontainers/java:${VARIANT}
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index d167be89720..d9a309d3661 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -5,7 +5,7 @@
"dockerfile": "Dockerfile",
"args": {
// Update the VARIANT arg to pick a version of Java
- "VARIANT": "17-jdk-bookworm",
+ "VARIANT": "21-jdk-bookworm",
}
},
"containerEnv": {
diff --git a/.editorconfig b/.editorconfig
index 23e7176794a..7b8947ec3c6 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -115,7 +115,7 @@ ij_java_for_statement_wrap = off
ij_java_generate_final_locals = false
ij_java_generate_final_parameters = false
ij_java_if_brace_force = never
-ij_java_imports_layout = *,|,javax.**,java.**,|,$*
+ij_java_imports_layout = *,|,javax.**,jakarta.**,java.**,|,$*
ij_java_indent_case_from_switch = true
ij_java_insert_inner_class_imports = false
ij_java_insert_override_annotation = true
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 00000000000..a5f5cdf5aaf
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,35 @@
+### 🔧 Type of changes
+- [ ] new bid adapter
+- [ ] bid adapter update
+- [ ] new feature
+- [ ] new analytics adapter
+- [ ] new module
+- [ ] module update
+- [ ] bugfix
+- [ ] documentation
+- [ ] configuration
+- [ ] dependency update
+- [ ] tech debt (test coverage, refactorings, etc.)
+
+### ✨ What's the context?
+What's the context for the changes?
+
+### 🧠 Rationale behind the change
+Why did you choose to make these changes? Were there any trade-offs you had to consider?
+
+### 🔎 New Bid Adapter Checklist
+- [ ] verify email contact works
+- [ ] NO fully dynamic hostnames
+- [ ] geographic host parameters are NOT required
+- [ ] direct use of HTTP is prohibited - *implement an existing Bidder interface that will do all the job*
+- [ ] if the ORTB is just forwarded to the endpoint, use the generic adapter - *define the new adapter as the alias of the generic adapter*
+- [ ] cover an adapter configuration with an integration test
+
+### 🧪 Test plan
+How do you know the changes are safe to ship to production?
+
+### 🏎 Quality check
+- [ ] Are your changes following [our code style guidelines](https://github.com/prebid/prebid-server-java/blob/master/docs/developers/code-style.md)?
+- [ ] Are there any breaking changes in your code?
+- [ ] Does your test coverage exceed 90%?
+- [ ] Are there any erroneous console logs, debuggers or leftover code in your changes?
diff --git a/.github/workflows/code-path-changes.yml b/.github/workflows/code-path-changes.yml
new file mode 100644
index 00000000000..f818d867441
--- /dev/null
+++ b/.github/workflows/code-path-changes.yml
@@ -0,0 +1,37 @@
+name: Notify Code Path Changes
+
+on:
+ pull_request_target:
+ types: [ opened, synchronize ]
+ paths:
+ - '**'
+
+permissions:
+ contents: read
+
+env:
+ OAUTH2_CLIENT_ID: ${{ secrets.OAUTH2_CLIENT_ID }}
+ OAUTH2_CLIENT_SECRET: ${{ secrets.OAUTH2_CLIENT_SECRET }}
+ OAUTH2_REFRESH_TOKEN: ${{ secrets.OAUTH2_REFRESH_TOKEN }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+jobs:
+ notify:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v5
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v6
+ with:
+ node-version: '18'
+
+ - name: Install dependencies
+ run: npm install axios nodemailer
+
+ - name: Run Notification Script
+ run: |
+ node .github/workflows/scripts/send-notification-on-change.js
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
new file mode 100644
index 00000000000..a6852ae7c92
--- /dev/null
+++ b/.github/workflows/codeql-analysis.yml
@@ -0,0 +1,60 @@
+name: CodeQL
+
+on:
+ pull_request:
+ branches: [ 'master' ]
+ schedule:
+ - cron: '0 3 * * 1'
+
+permissions:
+ security-events: write
+ packages: read
+ actions: read
+ contents: read
+
+jobs:
+ analyze:
+ name: Analyze (${{ matrix.language }})
+ runs-on: ubuntu-latest
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include:
+ - language: actions
+ build-mode: none
+ - language: java-kotlin
+ build-mode: manual
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v5
+
+ - name: Set up JDK
+ uses: actions/setup-java@v5
+ with:
+ distribution: 'temurin'
+ java-version: 21
+
+ - name: Cache Maven packages
+ uses: actions/cache@v5
+ with:
+ path: ~/.m2/repository
+ key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ ${{ runner.os }}-maven-
+
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v4
+ with:
+ languages: ${{ matrix.language }}
+ build-mode: ${{ matrix.build-mode }}
+
+ - name: Build with Maven
+ if: matrix.build-mode == 'manual'
+ run: mvn -B package --file extra/pom.xml
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v4
+ with:
+ category: '/language:${{ matrix.language }}'
diff --git a/.github/workflows/cross-repo-issue.yml b/.github/workflows/cross-repo-issue.yml
index c2288da271a..5d2e512d4c6 100644
--- a/.github/workflows/cross-repo-issue.yml
+++ b/.github/workflows/cross-repo-issue.yml
@@ -2,9 +2,12 @@ name: Cross-repo Issue Creation
on:
pull_request_target:
- types: [closed]
+ types: [ closed ]
branches:
- - "master"
+ - 'master'
+
+permissions:
+ contents: read
jobs:
cross-repo:
@@ -12,7 +15,7 @@ jobs:
steps:
- name: Generate token
id: generate_token
- uses: tibdex/github-app-token@v1
+ uses: tibdex/github-app-token@v2.1.0
with:
app_id: ${{ secrets.XREPO_APP_ID }}
private_key: ${{ secrets.XREPO_PEM }}
@@ -23,9 +26,10 @@ jobs:
github.event.pull_request.merged
env:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
+ PR_TITLE: ${{ github.event.pull_request.title }}
run: |
echo -e "A PR was merged over on PBS-Java\n\n- [https://github.com/prebid/prebid-server-java/pull/${{github.event.number}}](https://github.com/prebid/prebid-server-java/pull/${{github.event.number}})\n- timestamp: ${{ github.event.pull_request.merged_at}}" > msg
export msg=$(cat msg)
- gh issue create --repo prebid/prebid-server --title "Port PR from PBS-Java: ${{ github.event.pull_request.title }}" \
+ gh issue create --repo prebid/prebid-server --title "Port PR from PBS-Java: $PR_TITLE" \
--body "$msg" \
--label auto
diff --git a/.github/workflows/docker-image-publish.yml b/.github/workflows/docker-image-publish.yml
index 63d1961388d..7f993ade73d 100644
--- a/.github/workflows/docker-image-publish.yml
+++ b/.github/workflows/docker-image-publish.yml
@@ -1,10 +1,13 @@
name: Publish Docker image for new tag/release
on:
- workflow_run:
- workflows: [Publish release]
- types:
- - completed
+ push:
+ tags:
+ - '*'
+
+permissions:
+ contents: read
+ packages: write
env:
REGISTRY: ghcr.io
@@ -14,47 +17,57 @@ jobs:
build:
name: Publish Docker image for new tag/release
runs-on: ubuntu-latest
- permissions:
- contents: read
- packages: write
strategy:
matrix:
- java: [ 17 ]
- dockerfile-path: [Dockerfile, extra/Dockerfile]
+ java: [ 21 ]
+ dockerfile-path: [ Dockerfile, Dockerfile-modules ]
include:
- dockerfile-path: Dockerfile
build-cmd: mvn clean package -Dcheckstyle.skip -Dmaven.test.skip=true
package-name: ghcr.io/${{ github.repository }}
- - dockerfile-path: extra/Dockerfile
+
+ - dockerfile-path: Dockerfile-modules
build-cmd: mvn clean package --file extra/pom.xml -Dcheckstyle.skip -Dmaven.test.skip=true
package-name: ghcr.io/${{ github.repository }}-bundle
steps:
+ - name: Check out Repository
+ uses: actions/checkout@v5
+
- name: Set up JDK
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v5
with:
distribution: 'temurin'
cache: 'maven'
java-version: ${{ matrix.java }}
+
- name: Build .jar via Maven
run: ${{ matrix.build-cmd }}
- - name: Checkout repository
- uses: actions/checkout@v4
+
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
+
- name: Extract metadata (tags, labels) for Docker Image
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ matrix.package-name }}
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
- name: Build and push Docker image
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
with:
context: .
file: ${{ matrix.dockerfile-path }}
push: true
+ platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/issue_prioritization.yml b/.github/workflows/issue_prioritization.yml
index 784fe02656b..7b4df73b80b 100644
--- a/.github/workflows/issue_prioritization.yml
+++ b/.github/workflows/issue_prioritization.yml
@@ -1,16 +1,18 @@
name: Issue tracking
+
on:
issues:
types:
- opened
- pinned
+
jobs:
track_issue:
runs-on: ubuntu-latest
steps:
- name: Generate token
id: generate_token
- uses: tibdex/github-app-token@36464acb844fc53b9b8b2401da68844f6b05ebb0
+ uses: tibdex/github-app-token@v2.1.0
with:
app_id: ${{ secrets.PBS_PROJECT_APP_ID }}
private_key: ${{ secrets.PBS_PROJECT_APP_PEM }}
diff --git a/.github/workflows/pr-functional-tests.yml b/.github/workflows/pr-functional-tests.yml
index 610c6693193..d512022413a 100644
--- a/.github/workflows/pr-functional-tests.yml
+++ b/.github/workflows/pr-functional-tests.yml
@@ -11,23 +11,51 @@ on:
types:
- created
+permissions:
+ contents: read
+ actions: read
+ checks: write
+
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
- java: [ 17 ]
+ java: [ 21 ]
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up JDK
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v5
with:
distribution: 'temurin'
cache: 'maven'
java-version: ${{ matrix.java }}
- name: Build with Maven
- run: mvn -B verify -DskipUnitTests=true -DskipModuleFunctionalTests=true -Dtests.max-container-count=5 -DdockerfileName=Dockerfile --file extra/pom.xml
+ id: build
+ run: |
+ mvn -B verify \
+ -DskipUnitTests=true \
+ -DskipModuleFunctionalTests=true \
+ -Dtests.max-container-count=5 \
+ -DdockerfileName=Dockerfile \
+ -Dcheckstyle.skip \
+ --file extra/pom.xml
+
+ - name: Emitting run result of functional test
+ if: always()
+ uses: dorny/test-reporter@v2.5.0
+ with:
+ name: 'Functional tests'
+ working-directory: 'target/failsafe-reports'
+ path: 'TEST-*.xml'
+ reporter: java-junit
+ use-actions-summary: 'true'
+ list-suites: 'failed'
+ list-tests: 'failed'
+ fail-on-error: true
+ fail-on-empty: true
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/pr-java-ci.yml b/.github/workflows/pr-java-ci.yml
index 79a904c3636..d69d222592f 100644
--- a/.github/workflows/pr-java-ci.yml
+++ b/.github/workflows/pr-java-ci.yml
@@ -11,22 +11,28 @@ on:
types:
- created
+permissions:
+ contents: read
+ actions: read
+ checks: write
+
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
- java: [ 17 ]
+ java: [ 21 ]
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up JDK
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v5
with:
distribution: 'temurin'
cache: 'maven'
+ cache-dependency-path: extra/pom.xml
java-version: ${{ matrix.java }}
- name: Build with Maven
diff --git a/.github/workflows/pr-module-functional-tests.yml b/.github/workflows/pr-module-functional-tests.yml
index d8f1e925a07..c3b04858677 100644
--- a/.github/workflows/pr-module-functional-tests.yml
+++ b/.github/workflows/pr-module-functional-tests.yml
@@ -11,26 +11,55 @@ on:
types:
- created
+permissions:
+ contents: read
+ actions: read
+ checks: write
+
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
- java: [ 17 ]
+ java: [ 21 ]
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up JDK
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v5
with:
distribution: 'temurin'
cache: 'maven'
java-version: ${{ matrix.java }}
- name: Build with Maven
- run: mvn package -DskipUnitTests=true --file extra/pom.xml
+ run: mvn package -Dcheckstyle.skip -DskipUnitTests=true --file extra/pom.xml
- name: Run module tests
- run: mvn -B verify -DskipUnitTests=true -DskipFunctionalTests=true -DskipModuleFunctionalTests=false -Dtests.max-container-count=5 -DdockerfileName=Dockerfile-modules --file extra/pom.xml
+ id: build
+ run: |
+ mvn -B verify \
+ -DskipUnitTests=true \
+ -DskipFunctionalTests=true \
+ -DskipModuleFunctionalTests=false \
+ -Dtests.max-container-count=5 \
+ -DdockerfileName=Dockerfile-modules \
+ -Dcheckstyle.skip \
+ --file extra/pom.xml
+
+ - name: Emitting run result of functional test
+ if: always()
+ uses: dorny/test-reporter@v2.5.0
+ with:
+ name: 'Module functional tests'
+ working-directory: 'target/failsafe-reports'
+ path: 'TEST-*.xml'
+ reporter: java-junit
+ use-actions-summary: 'true'
+ list-suites: 'failed'
+ list-tests: 'failed'
+ fail-on-error: true
+ fail-on-empty: true
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/release-asset-publish.yml b/.github/workflows/release-asset-publish.yml
index 1de13751c3a..bfa938bebe9 100644
--- a/.github/workflows/release-asset-publish.yml
+++ b/.github/workflows/release-asset-publish.yml
@@ -2,7 +2,7 @@ name: Publish release .jar
on:
workflow_run:
- workflows: [Publish release]
+ workflows: [ Publish release ]
types:
- completed
@@ -12,11 +12,11 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java: [ 17 ]
+ java: [ 21 ]
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up JDK
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v5
with:
distribution: 'temurin'
cache: 'maven'
diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml
index b34d4827eae..75ea23441de 100644
--- a/.github/workflows/release-drafter.yml
+++ b/.github/workflows/release-drafter.yml
@@ -2,27 +2,25 @@ name: Publish release
on:
push:
- branches:
- - master
+ tags:
+ - '*'
+
+permissions:
+ contents: read
jobs:
update_release_draft:
name: Publish release with notes
+ permissions:
+ contents: write
runs-on: ubuntu-latest
- if: "contains(github.event.head_commit.message, 'Prebid Server prepare release ')"
steps:
- - name: Extract tag from commit message
- run: |
- target_tag=${COMMIT_MSG#"Prebid Server prepare release "}
- echo "TARGET_TAG=$target_tag" >> $GITHUB_ENV
- env:
- COMMIT_MSG: ${{ github.event.head_commit.message }}
- name: Create and publish release
- uses: release-drafter/release-drafter@v5
+ uses: release-drafter/release-drafter@v6
with:
config-name: release-drafter-config.yml
publish: true
- name: "v${{ env.TARGET_TAG }}"
- tag: ${{ env.TARGET_TAG }}
+ name: 'v${{ github.ref_name }}'
+ tag: ${{ github.ref_name }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/scripts/codepath-notification b/.github/workflows/scripts/codepath-notification
new file mode 100644
index 00000000000..371c86fc652
--- /dev/null
+++ b/.github/workflows/scripts/codepath-notification
@@ -0,0 +1,26 @@
+# when a changed file paths matches the regex, send an alert email
+# structure of the file is:
+#
+# javascriptRegex : email address
+#
+# For example, in PBS Java, there are many paths that can belong to bid adapter:
+#
+# /src/main/java/org/prebid/server/bidder/BIDDER
+# /src/main/resources/static/bidder-params/BIDDER.json
+# /src/main/resources/bidder-config/BIDDER.yaml
+# /src//main/java/org/prebid/server/proto/openrtb/ext/request/BIDDER
+# /src/test/resources/org/prebid/server/it/openrtb2/BIDDER
+# /src/test/java/org/prebid/server/it/BIDDERTest.java
+# /src/test/java/org/prebid/server/bidder/BIDDER
+# /src/main/java/org/prebid/server/spring/config/bidder/BIDDERConfiguration.java
+#
+# The aim is to find a minimal set of regex patterns that matches any file in these paths
+
+/ix|Ix|ix.json|ix.yaml: pdu-supply-prebid@indexexchange.com
+appnexus|Appnexus: prebid@microsoft.com
+pubmatic|Pubmatic: header-bidding@pubmatic.com
+openx|OpenX: prebid@openx.com
+medianet|Medianet: prebid@media.net
+thetradedesk|TheTradeDesk: Prebid-Maintainers@thetradedesk.com
+gumgum|GumGum: prebid@gumgum.com
+kargo|Kargo: kraken@kargo.com
diff --git a/.github/workflows/scripts/send-notification-on-change.js b/.github/workflows/scripts/send-notification-on-change.js
new file mode 100644
index 00000000000..f4e4fdcd3ca
--- /dev/null
+++ b/.github/workflows/scripts/send-notification-on-change.js
@@ -0,0 +1,139 @@
+// send-notification-on-change.js
+//
+// called by the code-path-changes.yml workflow, this script queries github for
+// the changes in the current PR, checkes the config file for whether any of those
+// file paths are set to alert an email address, and sends email to multiple
+// parties if needed
+
+const fs = require('fs');
+const path = require('path');
+const axios = require('axios');
+const nodemailer = require('nodemailer');
+
+async function getAccessToken(clientId, clientSecret, refreshToken) {
+ try {
+ const response = await axios.post('https://oauth2.googleapis.com/token', {
+ client_id: clientId,
+ client_secret: clientSecret,
+ refresh_token: refreshToken,
+ grant_type: 'refresh_token',
+ });
+ return response.data.access_token;
+ } catch (error) {
+ console.error('Failed to fetch access token:', error.response?.data || error.message);
+ process.exit(1);
+ }
+}
+
+(async () => {
+ const configFilePath = path.join(__dirname, 'codepath-notification');
+ const repo = process.env.GITHUB_REPOSITORY;
+ const prNumber = process.env.GITHUB_PR_NUMBER;
+ const token = process.env.GITHUB_TOKEN;
+
+ // Generate OAuth2 access token
+ const clientId = process.env.OAUTH2_CLIENT_ID;
+ const clientSecret = process.env.OAUTH2_CLIENT_SECRET;
+ const refreshToken = process.env.OAUTH2_REFRESH_TOKEN;
+
+ // validate params
+ if (!repo || !prNumber || !token || !clientId || !clientSecret || !refreshToken) {
+ console.error('Missing required environment variables.');
+ process.exit(1);
+ }
+
+ // the whole process is in a big try/catch. e.g. if the config file doesn't exist, github is down, etc.
+ try {
+ // Read and process the configuration file
+ const configFileContent = fs.readFileSync(configFilePath, 'utf-8');
+ const configRules = configFileContent
+ .split('\n')
+ .filter(line => line.trim() !== '' && !line.trim().startsWith('#')) // Ignore empty lines and comments
+ .map(line => {
+ const [regex, email] = line.split(':').map(part => part.trim());
+ return { regex: new RegExp(regex), email };
+ });
+
+ // Fetch changed files from github
+ const [owner, repoName] = repo.split('/');
+ const apiUrl = `https://api.github.com/repos/${owner}/${repoName}/pulls/${prNumber}/files`;
+ const response = await axios.get(apiUrl, {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ Accept: 'application/vnd.github.v3+json',
+ },
+ });
+
+ const changedFiles = response.data.map(file => file.filename);
+ console.log('Changed files:', changedFiles);
+
+ // match file pathnames that are in the config and group them by email address
+ const matchesByEmail = {};
+ changedFiles.forEach(file => {
+ configRules.forEach(rule => {
+ if (rule.regex.test(file)) {
+ if (!matchesByEmail[rule.email]) {
+ matchesByEmail[rule.email] = [];
+ }
+ matchesByEmail[rule.email].push(file);
+ }
+ });
+ });
+
+ // Exit successfully if no matches were found
+ if (Object.keys(matchesByEmail).length === 0) {
+ console.log('No matches found. Exiting successfully.');
+ process.exit(0);
+ }
+
+ console.log('Grouped matches by email:', matchesByEmail);
+
+ // get ready to email the changes
+ const accessToken = await getAccessToken(clientId, clientSecret, refreshToken);
+
+ // Configure Nodemailer with OAuth2
+ // service: 'Gmail',
+ const transporter = nodemailer.createTransport({
+ host: "smtp.gmail.com",
+ port: 465,
+ secure: true,
+ auth: {
+ type: 'OAuth2',
+ user: 'info@prebid.org',
+ clientId: clientId,
+ clientSecret: clientSecret,
+ refreshToken: refreshToken,
+ accessToken: accessToken
+ },
+ });
+
+ // Send one email per recipient
+ for (const [email, files] of Object.entries(matchesByEmail)) {
+ const emailBody = `
+ ${email},
+
+ Files owned by you have been changed in open source ${repo}. The pull request is #${prNumber}. These are the files you own that have been modified:
+
+ ${files.map(file => `- ${file}
`).join('')}
+
+ `;
+
+ try {
+ await transporter.sendMail({
+ from: `"Prebid Info" `,
+ to: email,
+ subject: `Files have been changed in open source ${repo}`,
+ html: emailBody,
+ });
+
+ console.log(`Email sent successfully to ${email}`);
+ console.log(`${emailBody}`);
+ } catch (error) {
+ console.error(`Failed to send email to ${email}:`, error.message);
+ }
+ }
+ } catch (error) {
+ console.error('Error:', error.message);
+ process.exit(1);
+ }
+})();
diff --git a/.github/workflows/trivy-security-check.yml b/.github/workflows/trivy-security-check.yml
index 044b7e39af6..b73eda3b40d 100644
--- a/.github/workflows/trivy-security-check.yml
+++ b/.github/workflows/trivy-security-check.yml
@@ -1,27 +1,35 @@
-name: Security Check
+name: Trivy Security Scan
on:
pull_request:
- branches: [master]
+ branches: [ 'master' ]
+ schedule:
+ - cron: '0 3 * * 1'
+
+permissions:
+ contents: read
jobs:
build:
name: Trivy security check
+ permissions:
+ security-events: write
runs-on: ubuntu-latest
steps:
- name: Checkout Code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Run Trivy vulnerability scanner
- uses: aquasecurity/trivy-action@master
+ uses: aquasecurity/trivy-action@0.33.1
with:
scan-type: 'fs'
+ scan-ref: '.'
ignore-unfixed: true
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
- name: Upload Trivy scan results to GitHub Security tab
- uses: github/codeql-action/upload-sarif@v2
+ uses: github/codeql-action/upload-sarif@v4
with:
sarif_file: 'trivy-results.sarif'
diff --git a/.gitignore b/.gitignore
index 3c057d9bda4..5f0817bd269 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,5 +13,4 @@ target/
.DS_Store
-.allure/
src/main/proto/
diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar
deleted file mode 100644
index bf82ff01c6c..00000000000
Binary files a/.mvn/wrapper/maven-wrapper.jar and /dev/null differ
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index dc3affce3dd..d58dfb70bab 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -6,7 +6,7 @@
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
@@ -14,5 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.1/maven-wrapper-3.1.1.jar
+wrapperVersion=3.3.2
+distributionType=only-script
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
diff --git a/Dockerfile b/Dockerfile
index d69d5346506..7de0126d535 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM amazoncorretto:17
+FROM amazoncorretto:21.0.8-al2023
WORKDIR /app/prebid-server
diff --git a/Dockerfile-modules b/Dockerfile-modules
index a9cbfe71b31..1626999164a 100644
--- a/Dockerfile-modules
+++ b/Dockerfile-modules
@@ -1,4 +1,4 @@
-FROM amazoncorretto:17
+FROM amazoncorretto:21.0.8-al2023
WORKDIR /app/prebid-server
diff --git a/README.md b/README.md
index 44d1ffbd92b..b5aa6539aae 100644
--- a/README.md
+++ b/README.md
@@ -42,6 +42,7 @@ Follow next steps to create JAR file which can be deployed locally.
- Install prerequsites
- Java SDK: Oracle's or Corretto. Let us know if there's a distribution PBS-Java doesn't work with.
+ - Java SDK Version: 21
- Maven
- Clone the project:
@@ -73,8 +74,8 @@ For more information how to build the server follow [documentation](docs/build.m
## Configuration
-The source code includes an example configuration file `sample/prebid-config.yaml`.
-Also, check the account settings file `sample/sample-app-settings.yaml`.
+The source code includes an example configuration file `sample/configs/prebid-config.yaml`.
+Also, check the account settings file `sample/configs/sample-app-settings.yaml`.
For more information how to configure the server follow [documentation](docs/config.md). There are many settings you'll want to consider such as which bidders you're going to enable, privacy defaults, admin endpoints, etc.
@@ -83,7 +84,7 @@ For more information how to configure the server follow [documentation](docs/con
Run your local server with the command:
```bash
-java -jar target/prebid-server.jar --spring.config.additional-location=sample/prebid-config.yaml
+java -jar target/prebid-server.jar --spring.config.additional-location=sample/configs/prebid-config.yaml
```
For more options how to start the server, please follow [documentation](docs/run.md).
@@ -100,12 +101,30 @@ There are a couple of 'hello world' test requests described in sample/requests/R
## Running Docker image
-Starting from PBS Java v2.9, you can download prebuilt Docker images from [GitHub Packages](https://github.com/orgs/prebid/packages?repo_name=prebid-server-java) page,
-and use them instead of plain .jar files. This prebuilt images are delivered with or without extra modules.
+Starting from PBS Java v3.11.0, you can download prebuilt Docker images from [GitHub Packages](https://github.com/orgs/prebid/packages?repo_name=prebid-server-java) page,
+and use them instead of plain .jar files. These prebuilt images are delivered in 2 flavors:
+- https://github.com/prebid/prebid-server-java/pkgs/container/prebid-server-java is a bare PBS and doesn't contain modules.
+- https://github.com/prebid/prebid-server-java/pkgs/container/prebid-server-java-bundle is a "bundle" that contains PBS and all the modules.
-In order to run such image correctly, you should attach PBS config file. Easiest way is to mount config file into container,
+To run PBS from image correctly, you should provide the PBS config file. The easiest way is to mount the config file into the container,
using [--mount or --volume (-v) Docker CLI arguments](https://docs.docker.com/engine/reference/commandline/run/).
-Keep in mind, that config file should be mounted into specific location: ```/app/prebid-server/``` or ```/app/prebid-server/conf/```.
+Keep in mind that the config file should be mounted into a specific location: ```/app/prebid-server/conf/``` or ```/app/prebid-server/```.
+
+PBS follows the regular Spring Boot config load hierarchy and type.
+For simple configuration, a single `application.yaml` mounted to `/app/prebid-server/conf/` will be enough.
+Please consult [Spring Externalized Configuration](https://docs.spring.io/spring-boot/reference/features/external-config.html) for all possible ways to configure PBS.
+
+You can also supply command-line parameters through `JAVA_OPTS` environment variable which will be appended to the `java` command before the `-jar ...` parameter.
+Please pay attention to line breaks and escape them if needed.
+
+Example execution using sample configuration:
+```shell
+docker run --rm -v ./sample:/app/prebid-server/sample:ro -p 8060:8060 -p 8080:8080 ghcr.io/prebid/prebid-server-java:latest --spring.config.additional-location=sample/configs/prebid-config.yaml
+```
+or
+```shell
+docker run --rm -v ./sample:/app/prebid-server/sample:ro -p 8060:8060 -p 8080:8080 -e JAVA_OPTS=-Dspring.config.additional-location=sample/configs/prebid-config.yaml ghcr.io/prebid/prebid-server-java:latest
+```
# Documentation
diff --git a/checkstyle.xml b/checkstyle.xml
index aac9ec01cfe..aa8274c29a7 100644
--- a/checkstyle.xml
+++ b/checkstyle.xml
@@ -68,6 +68,7 @@
autovalue.shaded.com.google,
org.inferred.freebuilder.shaded.com.google,
org.apache.commons.lang"/>
+
@@ -75,7 +76,7 @@
-
+
diff --git a/docs/admin-endpoints.md b/docs/admin-endpoints.md
new file mode 100644
index 00000000000..b3176a4379c
--- /dev/null
+++ b/docs/admin-endpoints.md
@@ -0,0 +1,209 @@
+# Admin enpoints
+
+Prebid Server Java offers a set of admin endpoints for managing and monitoring the server's health, configurations, and
+metrics. Below is a detailed description of each endpoint, including HTTP methods, paths, parameters, and responses.
+
+## General settings
+
+Each endpoint can be either enabled or disabled by changing `admin-endpoints..enabled` toggle. Defaults to
+`false`.
+
+Each endpoint can be configured to serve either on application port (configured via `server.http.port` setting) or
+admin port (configured via `admin.port` setting) by changing `admin-endpoints..on-application-port`
+setting.
+By default, all admin endpoints reside on admin port.
+
+Each endpoint can be configured to serve on a certain path by setting `admin-endpoints..path`.
+
+Each endpoint can be configured to either require basic authorization or not by changing
+`admin-endpoints..protected` setting,
+defaults to `true`. Allowed credentials are globally configured for all admin endpoints with
+`admin-endpoints.credentials.`
+setting.
+
+## Endpoints
+
+1. Version info
+
+- Name: version
+- Endpoint: Configured via `admin-endpoints.version.path` setting
+- Methods:
+ - `GET`:
+ - Description: Returns the version information for the Prebid Server Java instance.
+ - Parameters: None
+ - Responses:
+ - 200 OK: JSON containing version details
+ ```json
+ {
+ "version": "x.x.x",
+ "revision": "commit-hash"
+ }
+ ```
+
+2. Currency rates
+
+- Name: currency-rates
+- Methods:
+ - `GET`:
+ - Description: Returns the latest information about currency rates used by server instance.
+ - Parameters: None
+ - Responses:
+ - 200 OK: JSON containing version details
+ ```json
+ {
+ "active": "true",
+ "source": "http://currency-source"
+ "fetchingIntervalNs": 200,
+ "lastUpdated": "02/01/2018 - 13:45:30 UTC"
+ ... Rates ...
+ }
+ ```
+
+3. Cache notification endpoint
+
+- Name: storedrequest
+- Methods:
+ - `POST`:
+ - Description: Updates stored requests/imps data stored in server instance cache.
+ - Parameters:
+ - body:
+ ```json
+ {
+ "requests": {
+ "": "",
+ ... Requests data ...
+ },
+ "imps": {
+ "": "",
+ ... Imps data ...
+ }
+ }
+ ```
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+ - 405 METHOD NOT ALLOWED
+ - `DELETE`:
+ - Description: Invalidates stored requests/imps data stored in server instance cache.
+ - Parameters:
+ - body:
+ ```json
+ {
+ "requests": ["", ... Request names ...],
+ "imps": ["", ... Imp names ...]
+ }
+ ```
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+ - 405 METHOD NOT ALLOWED
+
+4. Amp cache notification endpoint
+
+- Name: storedrequest-amp
+- Methods:
+ - `POST`:
+ - Description: Updates stored requests/imps data for amp, stored in server instance cache.
+ - Parameters:
+ - body:
+ ```json
+ {
+ "requests": {
+ "": "",
+ ... Requests data ...
+ },
+ "imps": {
+ "": "",
+ ... Imps data ...
+ }
+ }
+ ```
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+ - 405 METHOD NOT ALLOWED
+ - `DELETE`:
+ - Description: Invalidates stored requests/imps data for amp, stored in server instance cache.
+ - Parameters:
+ - body:
+ ```json
+ {
+ "requests": ["", ... Request names ...],
+ "imps": ["", ... Imp names ...]
+ }
+ ```
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+ - 405 METHOD NOT ALLOWED
+
+5. Account cache notification endpoint
+
+- Name: cache-invalidation
+- Methods:
+ - any:
+ - Description: Invalidates cached data for a provided account in server instance cache.
+ - Parameters:
+ - `account`: Account id.
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+
+
+6. Http interaction logging endpoint
+
+- Name: logging-httpinteraction
+- Methods:
+ - any:
+ - Description: Changes request logging specification in server instance.
+ - Parameters:
+ - `endpoint`: Endpoint. Should be either: `auction` or `amp`.
+ - `statusCode`: Status code for logging spec.
+ - `account`: Account id.
+ - `bidder`: Bidder code.
+ - `limit`: Limit of requests for specification to be valid.
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+- Additional settings:
+ - `logging.http-interaction.max-limit` - max limit for logging specification limit.
+
+7. Logging level control endpoint
+
+- Name: logging-changelevel
+- Methods:
+ - any:
+ - Description: Changes request logging level for specified amount of time in server instance.
+ - Parameters:
+ - `level`: Logging level. Should be one of: `all`, `trace`, `debug`, `info`, `warn`, `error`, `off`.
+ - `duration`: Duration of logging level (in millis) before reset to original one.
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+- Additional settings:
+ - `logging.change-level.max-duration-ms` - max duration of changed logger level.
+
+8. Tracer log endpoint
+
+- Name: tracelog
+- Methods:
+ - any:
+ - Description: Adds trace logging specification for specified amount of time in server instance.
+ - Parameters:
+ - `account`: Account id.
+ - `bidderCode`: Bidder code.
+ - `level`: Log level. Should be one of: `info`, `warn`, `trace`, `error`, `fatal`, `debug`.
+ - `duration`: Duration of logging specification (in seconds).
+ - Responses:
+ - 200 OK
+ - 400 BAD REQUEST
+
+9. Collected metrics endpoint
+
+- Name: collected-metrics
+- Methods:
+ - any:
+ - Description: Adds trace logging specification for specified amount of time in server instance.
+ - Parameters: None
+ - Responses:
+ - 200 OK: JSON containing metrics data.
diff --git a/docs/application-settings.md b/docs/application-settings.md
index 4999cd293a5..bf89c1c3d83 100644
--- a/docs/application-settings.md
+++ b/docs/application-settings.md
@@ -11,6 +11,8 @@ There are two ways to configure application settings: database and file. This do
- `auction.video-cache-ttl`- how long (in seconds) video creative will be available via the external Cache Service.
- `auction.truncate-target-attr` - Maximum targeting attributes size. Values between 1 and 255.
- `auction.default-integration` - Default integration to assume.
+- `auction.debug-allow` - enables debug output in the auction response. Default `true`.
+- `auction.impression-limit` - a max number of impressions allowed for the auction, impressions that exceed this limit will be dropped, 0 means no limit.
- `auction.bid-validations.banner-creative-max-size` - Overrides creative max size validation for banners. Valid values
are:
- "skip": don't do anything about creative max size for this publisher
@@ -18,8 +20,29 @@ There are two ways to configure application settings: database and file. This do
operational warning.
- "enforce": if a bidder returns a creative that's larger in height or width than any of the allowed sizes, reject
the bid and log an operational warning.
+- `auction.bidadjustments` - configuration JSON for default bid adjustments
+- `auction.bidadjustments.mediatype.{banner, video-instream, video-outstream, audio, native, *}.{, *}.{, *}[]` - array of bid adjustment to be applied to any bid of the provided mediatype, and (`*` means ANY)
+- `auction.bidadjustments.mediatype.*.*.*[].adjtype` - type of the bid adjustment (cpm, multiplier, static)
+- `auction.bidadjustments.mediatype.*.*.*[].value` - value of the bid adjustment
+- `auction.bidadjustments.mediatype.*.*.*[].currency` - currency of the bid adjustment
- `auction.events.enabled` - enables events for account if true
-- `auction.debug-allow` - enables debug output in the auction response. Default `true`.
+- `auction.bid-rounding` - bid rounding options are:
+ - **down** - rounding down to the lower price bucket
+ - **up** - rounding up to the higher price bucket
+ - **timesplit** - 50% of the time rounding down to the lower PB and 50% of the time rounding up to the higher price bucket
+ - **true** - if the price >= 50% of the range, rounding up to the higher price bucket, otherwise rounding down
+- `auction.price-floors.enabled` - enables price floors for account if true. Defaults to true.
+- `auction.price-floors.fetch.enabled`- enables data fetch for price floors for account if true. Defaults to false.
+- `auction.price-floors.fetch.url` - url to fetch price floors data from.
+- `auction.price-floors.fetch.timeout-ms` - timeout for fetching price floors data. Defaults to 5000.
+- `auction.price-floors.fetch.max-file-size-kb` - maximum size of price floors data to be fetched. Defaults to 200.
+- `auction.price-floors.fetch.max-rules` - maximum number of rules per model group. Defaults to 0.
+- `auction.price-floors.fetch.max-age-sec` - maximum time that fetched price floors data remains in cache. Defaults to 86400.
+- `auction.price-floors.fetch.period-sec` - time between two consecutive fetches. Defaults to 3600.
+- `auction.price-floors.enforce-floors-rate` - what percentage of the time a defined floor is enforced. Default is 100.
+- `auction.price-floors.adjust-for-bid-adjustment` - boolean for whether to use the bidAdjustment function to adjust the floor per bidder. Defaults to true.
+- `auction.price-floors.enforce-deal-floors` - boolean for whether to enforce floors on deals. Defaults to true.
+- `auction.price-floors.use-dynamic-data` - boolean that can be used as an emergency override to start ignoring dynamic floors data if something goes wrong. Defaults to true.
- `auction.targeting.includewinners` - whether to include targeting for the winning bids in response. Default `false`.
- `auction.targeting.includebidderkeys` - whether to include targeting for the best bid from each bidder in response. Default `false`.
- `auction.targeting.includeformat` - whether to include the “hb_format” targeting key. Default `false`.
@@ -30,39 +53,63 @@ Keep in mind following restrictions:
- this prefix value may be overridden by correspond property from bid request
- prefix length is limited by `auction.truncate-target-attr`
- if custom prefix may produce keywords that exceed `auction.truncate-target-attr`, prefix value will drop to default `hb`
-- `privacy.ccpa.enabled` - enables gdpr verifications if true. Has higher priority than configuration in application.yaml.
-- `privacy.ccpa.channel-enabled.web` - overrides `ccpa.enforce` property behaviour for web requests type.
-- `privacy.ccpa.channel-enabled.amp` - overrides `ccpa.enforce` property behaviour for amp requests type.
-- `privacy.ccpa.channel-enabled.app` - overrides `ccpa.enforce` property behaviour for app requests type.
-- `privacy.ccpa.channel-enabled.video` - overrides `ccpa.enforce` property behaviour for video requests type.
+- `auction.preferredmediatype..` - that will be left for that doesn't support multi-format. Other media types will be removed. Acceptable values: `banner`, `video`, `audio`, `native`.
+- `auction.privacysandbox.cookiedeprecation.enabled` - boolean that turns on setting and reading of the Chrome Privacy Sandbox testing label header. Defaults to false.
+- `auction.privacysandbox.cookiedeprecation.ttlsec` - if the above setting is true, how long to set the receive-cookie-deprecation cookie's expiration
+- `auction.cache.enabled` - enables bids caching for account if true. Defaults to true.
- `privacy.gdpr.enabled` - enables gdpr verifications if true. Has higher priority than configuration in
application.yaml.
+- `privacy.gdpr.eea-countries` - overrides the host-level list of 2-letter country codes where TCF processing is applied
- `privacy.gdpr.channel-enabled.web` - overrides `privacy.gdpr.enabled` property behaviour for web requests type.
- `privacy.gdpr.channel-enabled.amp` - overrides `privacy.gdpr.enabled` property behaviour for amp requests type.
- `privacy.gdpr.channel-enabled.app` - overrides `privacy.gdpr.enabled` property behaviour for app requests type.
- `privacy.gdpr.channel-enabled.video` - overrides `privacy.gdpr.enabled` property behaviour for video requests
type.
+- `privacy.gdpr.channel-enabled.dooh` - overrides `privacy.gdpr.enabled` property behaviour for dooh requests
+ type.
- `privacy.gdpr.purposes.[p1-p10].enforce-purpose` - define type of enforcement confirmation: `no`/`basic`/`full`.
Default `full`
- `privacy.gdpr.purposes.[p1-p10].enforce-vendors` - if equals to `true`, user must give consent to use vendors.
Purposes will be omitted. Default `true`
- `privacy.gdpr.purposes.[p1-p10].vendor-exceptions[]` - bidder names that will be treated opposite
to `pN.enforce-vendors` value.
-- `privacy.gdpr.special-features.[f1-f2].enforce`- if equals to `true`, special feature will be enforced for purpose.
+- `privacy.gdpr.purposes.p4.eid.activity_transition` - defaults to `true`. If `true` and transmitEids is not specified, but transmitUfpd is specified, then the logic of transmitUfpd is used. This is to avoid breaking changes to existing configurations. The default value of the flag will be changed in a future release.
+- `privacy.gdpr.purposes.p4.eid.require_consent` - if equals to `true`, transmitting EIDs require P4 legal basis unless excepted.
+- `privacy.gdpr.purposes.p4.eid.exceptions` - list of EID sources that are excepted from P4 enforcement and will be transmitted if any P2-P10 is consented.
+- `privacy.gdpr.special-features.[sf1-sf2].enforce`- if equals to `true`, special feature will be enforced for purpose.
Default `true`
-- `privacy.gdpr.special-features.[f1-f2].vendor-exceptions` - bidder names that will be treated opposite
+- `privacy.gdpr.special-features.[sf1-sf2].vendor-exceptions` - bidder names that will be treated opposite
to `sfN.enforce` value.
- `privacy.gdpr.purpose-one-treatment-interpretation` - option that allows to skip the Purpose one enforcement workflow.
Values: ignore, no-access-allowed, access-allowed.
-- `metrics.verbosity-level` - defines verbosity level of metrics for this account, overrides `metrics.accounts` application settings configuration.
+- `privacy.gdpr.basic-enforcement-vendors` - bypass vendor-level checks for these biddercodes.
+- `privacy.ccpa.enabled` - enables gdpr verifications if true. Has higher priority than configuration in application.yaml.
+- `privacy.ccpa.channel-enabled.web` - overrides `ccpa.enforce` property behaviour for web requests type.
+- `privacy.ccpa.channel-enabled.amp` - overrides `ccpa.enforce` property behaviour for amp requests type.
+- `privacy.ccpa.channel-enabled.app` - overrides `ccpa.enforce` property behaviour for app requests type.
+- `privacy.ccpa.channel-enabled.video` - overrides `ccpa.enforce` property behaviour for video requests type.
+- `privacy.ccpa.channel-enabled.dooh` - overrides `ccpa.enforce` property behaviour for dooh requests type.
+- `privacy.dsa.default.dsarequired` - inject this dsarequired value for this account. See https://github.com/InteractiveAdvertisingBureau/openrtb/blob/main/extensions/community_extensions/dsa_transparency.md for details.
+- `privacy.dsa.default.pubrender` - inject this pubrender value for this account. See https://github.com/InteractiveAdvertisingBureau/openrtb/blob/main/extensions/community_extensions/dsa_transparency.md for details.
+- `privacy.dsa.default.datatopub` - inject this datatopub value for this account. See https://github.com/InteractiveAdvertisingBureau/openrtb/blob/main/extensions/community_extensions/dsa_transparency.md for details.
+- `privacy.dsa.default.transparency[].domain` - inject this domain value for this account. See https://github.com/InteractiveAdvertisingBureau/openrtb/blob/main/extensions/community_extensions/dsa_transparency.md for details.
+- `privacy.dsa.default.transparency[].dsaparams` - inject this dsaparams value for this account. See https://github.com/InteractiveAdvertisingBureau/openrtb/blob/main/extensions/community_extensions/dsa_transparency.md for details.
+- `privacy.dsa.gdpr-only` - When true, DSA default injection only happens when in GDPR scope. Defaults to false, meaning all the time.
+- `privacy.allowactivities` - configuration for Activity Infrastructure. For further details, see: https://docs.prebid.org/prebid-server/features/pbs-activitycontrols.html
+- `privacy.modules` - configuration for Privacy Modules. Each privacy module have own configuration.
+- `analytics.allow-client-details` - when true, this boolean setting allows responses to transmit the server-side analytics tags to support client-side analytics adapters. Defaults to false.
- `analytics.auction-events.` - defines which channels are supported by analytics for this account
- `analytics.modules..*` - space for `module-name` analytics module specific configuration, may be of any shape
-- `cookie-sync.default-timeout-ms` - overrides host level config
+- `analytics.modules..*` - a space for specific data for the analytics adapter, which may include an enabled property to control whether the adapter should be triggered, along with other adapter-specific properties. These will be merged under `ext.prebid.analytics.` in the request.
+- `metrics.verbosity-level` - defines verbosity level of metrics for this account, overrides `metrics.accounts` application settings configuration.
- `cookie-sync.default-limit` - if the "limit" isn't specified in the `/cookie_sync` request, this is what to use
-- `cookie-sync.pri` - a list of prioritized bidder codes
- `cookie-sync.max-limit` - if the "limit" is specified in the `/cookie_sync` request, it can't be greater than this
value
+- `cookie-sync.pri` - a list of prioritized bidder codes
- `cookie-sync.coop-sync.default` - if the "coopSync" value isn't specified in the `/cookie_sync` request, use this
+- `hooks` - configuration for Prebid Server Modules. For further details, see: https://docs.prebid.org/prebid-server/pbs-modules/index.html#2-define-an-execution-plan
+- `hooks.admin.module-execution` - a key-value map, where a key is a module name and a value is a boolean, that defines whether modules hooks should/should not be always executed; if the module is not specified it is executed by default when it's present in the execution plan
+- `settings.geo-lookup` - enables geo lookup for account if true. Defaults to false.
Here are the definitions of the "purposes" that can be defined in the GDPR setting configurations:
@@ -226,6 +273,59 @@ Here's an example YAML file containing account-specific settings:
default: true
```
+## Setting Account Configuration in S3
+
+This is identical to the account configuration in a file system, with the main difference that your file system is
+[AWS S3](https://aws.amazon.com/de/s3/) or any S3 compatible storage, such as [MinIO](https://min.io/).
+
+
+The general idea is that you'll place all the account-specific settings in a separate YAML file and point to that file.
+
+```yaml
+settings:
+ s3:
+ accessKeyId: # optional
+ secretAccessKey: #optional
+ endpoint: # http://s3.storage.com
+ bucket: # prebid-application-settings
+ region: # if not provided AWS_GLOBAL will be used. Example value: 'eu-central-1'
+ accounts-dir: accounts
+ stored-imps-dir: stored-impressions
+ stored-requests-dir: stored-requests
+ stored-responses-dir: stored-responses
+
+ # recommended to configure an in memory cache, but this is optional
+ in-memory-cache:
+ # example settings, tailor to your needs
+ cache-size: 100000
+ ttl-seconds: 1200 # 20 minutes
+ # recommended to configure
+ s3-update:
+ refresh-rate: 900000 # Refresh every 15 minutes
+ timeout: 5000
+```
+
+If `accessKeyId` and `secretAccessKey` are not specified in the Prebid Server configuration then AWS credentials will be looked up in this order:
+- Java System Properties - `aws.accessKeyId` and `aws.secretAccessKey`
+- Environment Variables - `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
+- Web Identity Token credentials from system properties or environment variables
+- Credential profiles file at the default location (`~/.aws/credentials`) shared by all AWS SDKs and the AWS CLI
+- Credentials delivered through the Amazon EC2 container service if "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI" environment variable is set and security manager has permission to access the variable,
+- Instance profile credentials delivered through the Amazon EC2 metadata service
+
+### File format
+
+We recommend using the `json` format for your account configuration. A minimal configuration may look like this.
+
+```json
+{
+ "id" : "979c7116-1f5a-43d4-9a87-5da3ccc4f52c",
+ "status" : "active"
+}
+```
+
+This pairs nicely if you have a default configuration defined in your prebid server config under `settings.default-account-config`.
+
## Setting Account Configuration in the Database
In database approach account properties are stored in database table(s).
diff --git a/docs/build-aws.md b/docs/build-aws.md
index c6e64d93630..2dd19ed6811 100644
--- a/docs/build-aws.md
+++ b/docs/build-aws.md
@@ -1,3 +1,6 @@
+## Deploying through _Prebid Server Deployment on AWS_ Solution
+Prebid Server can be automatically deployed into an AWS account using the [Prebid Server Deployment on AWS](https://aws.amazon.com/solutions/implementations/prebid-server-deployment-on-aws/) Solution. Users retain full control over bidding decision logic and transaction data for real-time ad monetization, within their own AWS environment. It also offers enterprise-grade scalability to handle a variety of requests and enhances data protection using the robust security capabilities of the AWS Cloud. It is [open-source](https://github.com/aws-solutions/prebid-server-deployment-on-aws) and includes a comprehensive [Implementation Guide](https://docs.aws.amazon.com/pdfs/solutions/latest/prebid-server-deployment-on-aws/prebid-server-deployment-on-aws.pdf) and the accompanying [AWS CloudFormation template](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?templateURL=https://solutions-reference.s3.amazonaws.com/prebid-server-deployment-on-aws/latest/prebid-server-deployment-on-aws.template&redirectId=SolutionWeb) for a one-click launch.
+
## Creating project ZIP package and deploying it to AWS Elastic Beanstalk
Follow next steps to create zip which can be deployed to AWS Elastic Beanstalk.
@@ -44,7 +47,7 @@ where
If you follow same naming convention, your `run.sh` script should be similar to:
```
-exec java -jar prebid-server.jar -Dlogging.config=prebid-logging.xml --spring.config.additional-location=sample/prebid-config.yaml
+exec java -jar prebid-server.jar -Dlogging.config=prebid-logging.xml --spring.config.additional-location=sample/configs/prebid-config.yaml
```
Make run.sh executable using the next command:
diff --git a/docs/build.md b/docs/build.md
index 67b0b8af26e..ed2c18b7e0a 100644
--- a/docs/build.md
+++ b/docs/build.md
@@ -1,9 +1,15 @@
# Build project
To build the project, you will need at least
-[Java 11](https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz)
+[Java 21](https://whichjdk.com/)
and [Maven](https://maven.apache.org/) installed.
+If for whatever reason this Java reference will be stale,
+you can always get the current project Java version from `pom.xml` property
+```xml
+...
+```
+
To verify the installed Java run in console:
```bash
@@ -13,9 +19,9 @@ java -version
which should show something like (yours may be different):
```
-openjdk version "11.0.2" 2019-01-15
-OpenJDK Runtime Environment 18.9 (build 11.0.2+9)
-OpenJDK 64-Bit Server VM 18.9 (build 11.0.2+9, mixed mode)
+openjdk version "21.0.5" 2024-10-15 LTS
+OpenJDK Runtime Environment Corretto-21.0.5.11.1 (build 21.0.5+11-LTS)
+OpenJDK 64-Bit Server VM Corretto-21.0.5.11.1 (build 21.0.5+11-LTS, mixed mode, sharing)
```
Follow next steps to create JAR which can be deployed locally.
diff --git a/docs/config-app.md b/docs/config-app.md
index 79cbf9f85ef..a661f5a74a2 100644
--- a/docs/config-app.md
+++ b/docs/config-app.md
@@ -14,18 +14,24 @@ This section can be extended against standard [Spring configuration](https://doc
This parameter exists to allow to change the location of the directory Vert.x will create because it will and there is no way to make it not.
- `vertx.init-timeout-ms` - time to wait for asynchronous initialization steps completion before considering them stuck. When exceeded - exception is thrown and Prebid Server stops.
- `vertx.enable-per-client-endpoint-metrics` - enables HTTP client metrics per destination endpoint (`host:port`)
+- `vertx.round-robin-inet-address` - enables round-robin inet address selection of the ip address to use
## Server
- `server.max-headers-size` - set the maximum length of all headers.
- `server.ssl` - enable SSL/TLS support.
- `server.jks-path` - path to the java keystore (if ssl is enabled).
- `server.jks-password` - password for the keystore (if ssl is enabled).
+- `server.cpu-load-monitoring.measurement-interval-ms` - the CPU load monitoring interval (milliseconds)
## HTTP Server
-- `http.max-headers-size` - set the maximum length of all headers, deprecated(use server.max-headers-size instead).
-- `http.ssl` - enable SSL/TLS support, deprecated(use server.ssl instead).
-- `http.jks-path` - path to the java keystore (if ssl is enabled), deprecated(use server.jks-path instead).
-- `http.jks-password` - password for the keystore (if ssl is enabled), deprecated(use server.jks-password instead).
+- `server.max-headers-size` - set the maximum length of all headers, deprecated(use server.max-headers-size instead).
+- `server.ssl` - enable SSL/TLS support, deprecated(use server.ssl instead).
+- `server.jks-path` - path to the java keystore (if ssl is enabled), deprecated(use server.jks-path instead).
+- `server.jks-password` - password for the keystore (if ssl is enabled), deprecated(use server.jks-password instead).
+- `server.max-initial-line-length` - set the maximum length of the initial line
+- `server.idle-timeout` - set the maximum time idle connections could exist before being reaped
+- `server.enable-quickack` - enables the TCP_QUICKACK option - only with linux native transport.
+- `server.enable-reuseport` - set the value of reuse port
- `server.http.server-instances` - how many http server instances should be created.
This parameter affects how many CPU cores will be utilized by the application. Rough assumption - one http server instance will keep 1 CPU core busy.
- `server.http.enabled` - if set to `true` enables http server
@@ -61,6 +67,10 @@ Removes and downloads file again if depending service cant process probably corr
- `.remote-file-syncer.tmp-filepath` - full path to the temporary file.
- `.remote-file-syncer.retry-count` - how many times try to download.
- `.remote-file-syncer.retry-interval-ms` - how long to wait between failed retries.
+- `.remote-file-syncer.retry.delay-millis` - initial time of how long to wait between failed retries.
+- `.remote-file-syncer.retry.max-delay-millis` - maximum allowed value for `delay-millis`.
+- `.remote-file-syncer.retry.factor` - factor for the `delay-millis` value, that will be applied after each failed retry to modify `delay-millis` value.
+- `.remote-file-syncer.retry.jitter` - jitter (multiplicative) for `delay-millis` parameter.
- `.remote-file-syncer.timeout-ms` - default operation timeout for obtaining database file.
- `.remote-file-syncer.update-interval-ms` - time interval between updates of the usable file.
- `.remote-file-syncer.http-client.connect-timeout-ms` - set the connect timeout.
@@ -75,9 +85,8 @@ Removes and downloads file again if depending service cant process probably corr
- `default-request.file.path` - path to a JSON file containing the default request
## Auction (OpenRTB)
-- `auction.blacklisted-accounts` - comma separated list of blacklisted account IDs.
-- `auction.blacklisted-apps` - comma separated list of blacklisted applications IDs, requests from which should not be processed.
-- `auction.max-timeout-ms` - maximum operation timeout for OpenRTB Auction requests. Deprecated.
+- `auction.blocklisted-accounts` - comma separated list of blocklisted account IDs.
+- `auction.blocklisted-apps` - comma separated list of blocklisted applications IDs, requests from which should not be processed.
- `auction.biddertmax.min` - minimum operation timeout for OpenRTB Auction requests.
- `auction.biddertmax.max` - maximum operation timeout for OpenRTB Auction requests.
- `auction.biddertmax.percent` - adjustment factor for `request.tmax` for bidders.
@@ -88,10 +97,12 @@ Removes and downloads file again if depending service cant process probably corr
- `auction.cache.expected-request-time-ms` - approximate value in milliseconds for Cache Service interacting.
- `auction.cache.only-winning-bids` - if equals to `true` only the winning bids would be cached. Has lower priority than request-specific flags.
- `auction.generate-bid-id` - whether to generate seatbid[].bid[].ext.prebid.bidid in the OpenRTB response.
+- `auction.enforce-random-bid-id` - whether to enforce generating a robust random seatbid[].bid[].id in the OpenRTB response if the initial value is less than 17 characters.
- `auction.validations.banner-creative-max-size` - enables creative max size validation for banners. Possible values: `skip`, `enforce`, `warn`. Default is `skip`.
- `auction.validations.secure-markup` - enables secure markup validation. Possible values: `skip`, `enforce`, `warn`. Default is `skip`.
- `auction.host-schain-node` - defines global schain node that will be appended to `request.source.ext.schain.nodes` passed to bidders
- `auction.category-mapping-enabled` - if equals to `true` the category mapping feature will be active while auction.
+- `auction.strict-app-site-dooh` - if set to `true`, it will reject requests that contain more than one of app/site/dooh. Defaults to `false`.
## Event
- `event.default-timeout-ms` - timeout for event notifications
@@ -103,14 +114,15 @@ Removes and downloads file again if depending service cant process probably corr
- `auction.timeout-notification.log-sampling-rate` - instructs apply sampling when logging bidder timeout notification results
## Video
-- `auction.video.stored-required` - flag forces to merge with stored request
-- `auction.blacklisted-accounts` - comma separated list of blacklisted account IDs.
+- `video.stored-request-required` - flag forces to merge with stored request
- `video.stored-requests-timeout-ms` - timeout for stored requests fetching.
-- `auction.ad-server-currency` - default currency for video auction, if its value was not specified in request. Important note: PBS uses ISO-4217 codes for the representation of currencies.
+- `auction.blocklisted-accounts` - comma separated list of blocklisted account IDs.
- `auction.video.escape-log-cache-regex` - regex to remove from cache debug log xml.
+- `auction.ad-server-currency` - default currency for video auction, if its value was not specified in request. Important note: PBS uses ISO-4217 codes for the representation of currencies.
## Setuid
- `setuid.default-timeout-ms` - default operation timeout for requests to `/setuid` endpoint.
+- `setuid.number-of-uid-cookies` - specifies the maximum number of UID cookies that can be returned in the `/setuid` endpoint response. If it's not specified `1` will be taken as the default value.
## Cookie Sync
- `cookie-sync.default-timeout-ms` - default operation timeout for requests to `/cookie_sync` endpoint.
@@ -122,6 +134,7 @@ Removes and downloads file again if depending service cant process probably corr
## Vtrack
- `vtrack.allow-unknown-bidder` - flag that allows servicing requests with bidders who were not configured in Prebid Server.
- `vtrack.modify-vast-for-unknown-bidder` - flag that allows modifying the VAST value and adding the impression tag to it, for bidders who were not configured in Prebid Server.
+- `vtrack.default-timeout-ms` - a default timeout in ms for the vtrack request
## Adapters
- `adapters.*` - the section for bidder specific configuration options.
@@ -143,6 +156,7 @@ There are several typical keys:
- `adapters..usersync.type` - usersync type (i.e. redirect, iframe).
- `adapters..usersync.support-cors` - flag signals if CORS supported by usersync.
- `adapters..debug.allow` - enables debug output in the auction response for the given bidder. Default `true`.
+- `adapters..tmax-deduction-ms` - adjusts the tmax sent to the bidder by deducting the provided value (ms). Default `0 ms` - no deduction.
In addition, each bidder could have arbitrary aliases configured that will look and act very much the same as the bidder itself.
Aliases are configured by adding child configuration object at `adapters..aliases..`, aliases
@@ -156,9 +170,8 @@ Also, each bidder could have its own bidder-specific options.
## Logging
- `logging.http-interaction.max-limit` - maximum value for the number of interactions to log in one take.
-
-## Logging
- `logging.change-level.max-duration-ms` - maximum duration (in milliseconds) for which logging level could be changed.
+- `logging.sampling-rate` - a percentage of messages that are logged
## Currency Converter
- `currency-converter.external-rates.enabled` - if equals to `true` the currency conversion service will be enabled to fetch updated rates and convert bid currencies from external source. Also enables `/currency-rates` endpoint on admin port.
@@ -202,32 +215,17 @@ Also, each bidder could have its own bidder-specific options.
- `admin-endpoints.tracelog.enabled` - if equals to `true` the endpoint will be available.
- `admin-endpoints.tracelog.path` - the server context path where the endpoint will be accessible.
- `admin-endpoints.tracelog.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
-- `admin-endpoints.tracelog.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
-
-- `admin-endpoints.deals-status.enabled` - if equals to `true` the endpoint will be available.
-- `admin-endpoints.deals-status.path` - the server context path where the endpoint will be accessible.
-- `admin-endpoints.deals-status.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
-- `admin-endpoints.deals-status.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
-
-- `admin-endpoints.lineitem-status.enabled` - if equals to `true` the endpoint will be available.
-- `admin-endpoints.lineitem-status.path` - the server context path where the endpoint will be accessible.
-- `admin-endpoints.lineitem-status.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
-- `admin-endpoints.lineitem-status.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
-
-- `admin-endpoints.e2eadmin.enabled` - if equals to `true` the endpoint will be available.
-- `admin-endpoints.e2eadmin.path` - the server context path where the endpoint will be accessible.
-- `admin-endpoints.e2eadmin.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
-- `admin-endpoints.e2eadmin.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
+- `admin-endpoints.tracelog.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
- `admin-endpoints.collected-metrics.enabled` - if equals to `true` the endpoint will be available.
- `admin-endpoints.collected-metrics.path` - the server context path where the endpoint will be accessible.
- `admin-endpoints.collected-metrics.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
- `admin-endpoints.collected-metrics.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
-- `admin-endpoints.force-deals-update.enabled` - if equals to `true` the endpoint will be available.
-- `admin-endpoints.force-deals-update.path` - the server context path where the endpoint will be accessible.
-- `admin-endpoints.force-deals-update.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
-- `admin-endpoints.force-deals-update.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
+- `admin-endpoints.logging-changelevel.enabled` - if equals to `true` the endpoint will be available.
+- `admin-endpoints.logging-changelevel.path` - the server context path where the endpoint will be accessible
+- `admin-endpoints.logging-changelevel.on-application-port` - when equals to `false` endpoint will be bound to `admin.port`.
+- `admin-endpoints.logging-changelevel.protected` - when equals to `true` endpoint will be protected by basic authentication configured in `admin-endpoints.credentials`
- `admin-endpoints.credentials` - user and password for access to admin endpoints if `admin-endpoints.[NAME].protected` is true`.
@@ -237,6 +235,12 @@ Also, each bidder could have its own bidder-specific options.
So far metrics cannot be submitted simultaneously to many backends. Currently we support `graphite` and `influxdb`.
Also, for debug purposes you can use `console` as metrics backend.
+For `logback` backend type available next options:
+- `metrics.logback.enabled` - if equals to `true` then logback reporter will be started.
+- `metrics.logback.name` - name of logger element in the logback configuration file.
+- `metrics.logback.interval` - interval in seconds between successive sending metrics.
+
+
For `graphite` backend type available next options:
- `metrics.graphite.enabled` - if equals to `true` then `graphite` will be used to submit metrics.
- `metrics.graphite.prefix` - the prefix of all metric names.
@@ -274,10 +278,20 @@ See [metrics documentation](metrics.md) for complete list of metrics submitted a
- `metrics.accounts.basic-verbosity` - a list of accounts for which only basic metrics will be submitted.
- `metrics.accounts.detailed-verbosity` - a list of accounts for which all metrics will be submitted.
+For `JVM` metrics
+- `metrics.jmx.enabled` - if equals to `true` then `jvm.gc` and `jvm.memory` metrics will be submitted
+
## Cache
- `cache.scheme` - set the external Cache Service protocol: `http`, `https`, etc.
- `cache.host` - set the external Cache Service destination in format `host:port`.
- `cache.path` - set the external Cache Service path, for example `/cache`.
+- `cache.internal.scheme` - set the internal Cache Service protocol: `http`, `https`, etc., the internal scheme get priority over the external one when provided.
+- `cache.internal.host` - set the internal Cache Service destination in format `host:port`, the internal port get priority over the external one when provided.
+- `cache.internal.path` - set the internal Cache Service path, for example `/cache`, the internal path get priority over the external one when provided.
+- `storage.pbc.enabled` - If set to true, this will allow storing modules’ data in third-party storage.
+- `storage.pbc.path` - set the external Cache Service path for module caching, for example `/pbc-storage`.
+- `cache.api-key-secured` - if set to `true`, will cause Prebid Server to add a special API key header to Prebid Cache requests.
+- `pbc.api.key` - set the external Cache Service api key for secured calls.
- `cache.query` - appends to the cache path as query string params (used for legacy Auction requests).
- `cache.banner-ttl-seconds` - how long (in seconds) banner will be available via the external Cache Service.
- `cache.video-ttl-seconds` - how long (in seconds) video creative will be available via the external Cache Service.
@@ -285,6 +299,8 @@ See [metrics documentation](metrics.md) for complete list of metrics submitted a
for particular publisher account. Overrides `cache.banner-ttl-seconds` property.
- `cache.account..video-ttl-seconds` - how long (in seconds) video creative will be available in Cache Service
for particular publisher account. Overrides `cache.video-ttl-seconds` property.
+- `cache.default-ttl-seconds.{banner, video, audio, native}` - a default value how long (in seconds) a creative of the specific type will be available in Cache Service
+- `cache.append-trace-info-to-cache-id` - if set to `true`, causes the addition account ID and datacenter to cache UUID: _ACCOUNT-DATACENTER-remainderOfUUID_. Implies that cache UUID will be generated by the Prebid Server.
## Application settings (account configuration, stored ad unit configurations, stored requests)
Preconfigured application settings can be obtained from multiple data sources consequently:
@@ -294,6 +310,10 @@ Preconfigured application settings can be obtained from multiple data sources co
Warning! Application will not start in case of no one data source is defined and you'll get an exception in logs.
+For requests validation mode available next options:
+- `settings.fail-on-unknown-bidders` - fail with validation error or just make warning for unknown bidders.
+- `settings.fail-on-disabled-bidders` - fail with validation error or just make warning for disabled bidders.
+
For filesystem data source available next options:
- `settings.filesystem.settings-filename` - location of file settings.
- `settings.filesystem.stored-requests-dir` - directory with stored requests.
@@ -309,8 +329,10 @@ For database data source available next options:
- `settings.database.user` - database user.
- `settings.database.password` - database password.
- `settings.database.pool-size` - set the initial/min/max pool size of database connections.
+- `settings.database.idle-connection-timeout` - Set the idle timeout, time unit is seconds. Zero means don't timeout. This determines if a connection will timeout and be closed and get back to the pool if no data is received nor sent within the timeout.
+- `settings.database.enable-prepared-statement-caching` - Enable caching of the prepared statements so that they can be reused. Defaults to `false`. Please be vary of the DB server limitations as cache instances is per-database-connection.
+- `settings.database.max-prepared-statement-cache-size` - Set the maximum size of the prepared statement cache. Defaults to `256`. Has any effect only when `settings.database.enable-prepared-statement-caching` is set to `true`. Please note that the cache size is multiplied by `settings.database.pool-size`.
- `settings.database.account-query` - the SQL query to fetch account.
-- `settings.database.provider-class` - type of connection pool to be used: `hikari` or `c3p0`.
- `settings.database.stored-requests-query` - the SQL query to fetch stored requests.
- `settings.database.amp-stored-requests-query` - the SQL query to fetch AMP stored requests.
- `settings.database.stored-responses-query` - the SQL query to fetch stored responses.
@@ -324,9 +346,10 @@ For HTTP data source available next options:
- `settings.http.amp-endpoint` - the url to fetch AMP stored requests.
- `settings.http.video-endpoint` - the url to fetch video stored requests.
- `settings.http.category-endpoint` - the url to fetch categories for long form video.
+- `settings.http.rfc3986-compatible` - if equals to `true` the url will be build according to RFC 3986, `false` by default
For account processing rules available next options:
-- `settings.enforce-valid-account` - if equals to `true` then request without account id will be rejected with 401.
+- `settings.enforce-valid-account` - if equals to `true` then request without account id will be rejection with 401.
- `settings.generate-storedrequest-bidrequest-id` - overrides `bidrequest.id` in amp or app stored request with generated UUID if true. Default value is false. This flag can be overridden by setting `bidrequest.id` as `{{UUID}}` placeholder directly in stored request.
It is possible to specify default account configuration values that will be assumed if account config have them
@@ -353,6 +376,7 @@ See [application settings](application-settings.md) for full reference of availa
For caching available next options:
- `settings.in-memory-cache.ttl-seconds` - how long (in seconds) data will be available in LRU cache.
- `settings.in-memory-cache.cache-size` - the size of LRU cache.
+- `settings.in-memory-cache.jitter-seconds` - jitter (in seconds) for `settings.in-memory-cache.ttl-seconds` parameter.
- `settings.in-memory-cache.notification-endpoints-enabled` - if equals to `true` two additional endpoints will be
available: [/storedrequests/openrtb2](endpoints/storedrequests/openrtb2.md) and [/storedrequests/amp](endpoints/storedrequests/amp.md).
- `settings.in-memory-cache.account-invalidation-enabled` - if equals to `true` additional admin protected endpoints will be
@@ -361,14 +385,36 @@ available: `/cache/invalidate?account={accountId}` which remove account from the
- `settings.in-memory-cache.http-update.amp-endpoint` - the url to fetch AMP stored request updates.
- `settings.in-memory-cache.http-update.refresh-rate` - refresh period in ms for stored request updates.
- `settings.in-memory-cache.http-update.timeout` - timeout for obtaining stored request updates.
-- `settings.in-memory-cache.jdbc-update.init-query` - initial query for fetching all stored requests at the startup.
-- `settings.in-memory-cache.jdbc-update.update-query` - a query for periodical update of stored requests, that should
-contain 'WHERE last_updated > ?' to fetch only the records that were updated since previous check.
-- `settings.in-memory-cache.jdbc-update.amp-init-query` - initial query for fetching all AMP stored requests at the startup.
-- `settings.in-memory-cache.jdbc-update.amp-update-query` - a query for periodical update of AMP stored requests, that should
-contain 'WHERE last_updated > ?' to fetch only the records that were updated since previous check.
-- `settings.in-memory-cache.jdbc-update.refresh-rate` - refresh period in ms for stored request updates.
-- `settings.in-memory-cache.jdbc-update.timeout` - timeout for obtaining stored request updates.
+- `settings.in-memory-cache.database-update.init-query` - initial query for fetching all stored requests at the startup.
+- `settings.in-memory-cache.database-update.update-query` - a query for periodical update of stored requests, that should
+contain 'WHERE last_updated > ?' for MySQL and 'WHERE last_updated > $1' for Postgresql to fetch only the records that were updated since previous check.
+- `settings.in-memory-cache.database-update.amp-init-query` - initial query for fetching all AMP stored requests at the startup.
+- `settings.in-memory-cache.database-update.amp-update-query` - a query for periodical update of AMP stored requests, that should
+contain 'WHERE last_updated > ?' for MySQL and 'WHERE last_updated > $1' for Postgresql to fetch only the records that were updated since previous check.
+- `settings.in-memory-cache.database-update.refresh-rate` - refresh period in ms for stored request updates.
+- `settings.in-memory-cache.database-update.timeout` - timeout for obtaining stored request updates.
+
+For S3 storage configuration
+- `settings.in-memory-cache.s3-update.refresh-rate` - refresh period in ms for stored request updates in S3
+- `settings.s3.access-key-id` - an access key (optional)
+- `settings.s3.secret-access-key` - a secret access key (optional)
+- `settings.s3.region` - a region, AWS_GLOBAL by default
+- `settings.s3.endpoint` - an endpoint
+- `settings.s3.bucket` - a bucket name
+- `settings.s3.force-path-style` - forces the S3 client to use path-style addressing for buckets.
+- `settings.s3.accounts-dir` - a directory with stored accounts
+- `settings.s3.stored-imps-dir` - a directory with stored imps
+- `settings.s3.stored-requests-dir` - a directory with stored requests
+- `settings.s3.stored-responses-dir` - a directory with stored responses
+
+If `settings.s3.access-key-id` and `settings.s3.secret-access-key` are not specified in the Prebid Server configuration then AWS credentials will be looked up in this order:
+- Java System Properties - `aws.accessKeyId` and `aws.secretAccessKey`
+- Environment Variables - `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`
+- Web Identity Token credentials from system properties or environment variables
+- Credential profiles file at the default location (`~/.aws/credentials`) shared by all AWS SDKs and the AWS CLI
+- Credentials delivered through the Amazon EC2 container service if "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI" environment variable is set and security manager has permission to access the variable,
+- Instance profile credentials delivered through the Amazon EC2 metadata service
+
For targeting available next options:
- `settings.targeting.truncate-attr-chars` - set the max length for names of targeting keywords (0 means no truncation).
@@ -402,6 +448,7 @@ If not defined in config all other Health Checkers would be disabled and endpoin
- `gdpr.eea-countries` - comma separated list of countries in European Economic Area (EEA).
- `gdpr.default-value` - determines GDPR in scope default value (if no information in request and no geolocation data).
- `gdpr.host-vendor-id` - the organization running a cluster of Prebid Servers.
+- `datacenter-region` - the datacenter region of a cluster of Prebid Servers
- `gdpr.enabled` - gdpr feature switch. Default `true`.
- `gdpr.purposes.pN.enforce-purpose` - define type of enforcement confirmation: `no`/`basic`/`full`. Default `full`
- `gdpr.purposes.pN.enforce-vendors` - if equals to `true`, user must give consent to use vendors. Purposes will be omitted. Default `true`
@@ -431,8 +478,30 @@ If not defined in config all other Health Checkers would be disabled and endpoin
- `geolocation.type` - set the geo location service provider, can be `maxmind` or custom provided by hosting company.
- `geolocation.maxmind` - section for [MaxMind](https://www.maxmind.com) configuration as geo location service provider.
- `geolocation.maxmind.remote-file-syncer` - use RemoteFileSyncer component for downloading/updating MaxMind database file. See [RemoteFileSyncer](#remote-file-syncer) section for its configuration.
+- `geolocation.configurations[]` - a list of geo-lookup configurations for the `configuration` `geolocation.type`
+- `geolocation.configurations[].address-pattern` - an address pattern for matching an IP to look up
+- `geolocation.configurations[].geo-info.continent` - a continent to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.country` - a country to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.region` - a region to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.region-code` - a region code to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.city` - a city to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.metro-google` - a metro Google to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.metro-nielsen` - a metro Nielsen to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.zip` - a zip to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.connection-speed` - a connection-speed to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.lat` - a lat to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.lon` - a lon to return on the `configuration` geo-lookup
+- `geolocation.configurations[].geo-info.time-zone` - a time zone to return on the `configuration` geo-lookup
+
+## IPv6
+- `ipv6.always-mask-right` - a bit mask for masking an IPv6 address of the device
+- `ipv6.anon-left-mask-bits` - a bit mask for anonymizing an IPv6 address of the device
+- `ipv6.private-networks` - a list of known private/local networks to skip masking of an IP address of the device
## Analytics
+- `analytics.global.adapters` - Names of analytics adapters that will work for each request, except those disabled at the account level.
+
+For the `pubstack` analytics adapter
- `analytics.pubstack.enabled` - if equals to `true` the Pubstack analytics module will be enabled. Default value is `false`.
- `analytics.pubstack.endpoint` - url for reporting events and fetching configuration.
- `analytics.pubstack.scopeid` - defined the scope provided by the Pubstack Support Team.
@@ -442,40 +511,29 @@ If not defined in config all other Health Checkers would be disabled and endpoin
- `analytics.pubstack.buffers.count` - threshold in events count for buffer to send events
- `analytics.pubstack.buffers.report-ttl-ms` - max period between two reports.
-## Programmatic Guaranteed Delivery
-- `deals.planner.plan-endpoint` - planner endpoint to get plans from.
-- `deals.planner.update-period` - cron expression to start job for requesting Line Item metadata updates from the Planner.
-- `deals.planner.plan-advance-period` - cron expression to start job for advancing Line Items to the next plan.
-- `deals.planner.retry-period-sec` - how long (in seconds) to wait before re-sending a request to the Planner that previously failed with 5xx HTTP error code.
-- `deals.planner.timeout-ms` - default operation timeout for requests to planner's endpoints.
-- `deals.planner.register-endpoint` - register endpoint to get plans from.
-- `deals.planner.register-period-sec` - time period (in seconds) to send register request to the Planner.
-- `deals.planner.username` - username for planner BasicAuth.
-- `deals.planner.password` - password for planner BasicAuth.
-- `deals.delivery-stats.delivery-period` - cron expression to start job for sending delivery progress to planner.
-- `deals.delivery-stats.cached-reports-number` - how many reports to cache while planner is unresponsive.
-- `deals.delivery-stats.timeout-ms` - default operation timeout for requests to delivery progress endpoints.
-- `deals.delivery-stats.username` - username for delivery progress BasicAuth.
-- `deals.delivery-stats.password` - password for delivery progress BasicAuth.
-- `deals.delivery-stats.line-items-per-report` - max number of line items in each report to split for batching. Default is 25.
-- `deals.delivery-stats.reports-interval-ms` - interval in ms between consecutive reports. Default is 0.
-- `deals.delivery-stats.batches-interval-ms` - interval in ms between consecutive batches. Default is 1000.
-- `deals.delivery-stats.request-compression-enabled` - enables request gzip compression when set to true.
-- `deals.delivery-progress.line-item-status-ttl-sec` - how long to store line item's metrics after it was expired.
-- `deals.delivery-progress.cached-plans-number` - how many plans to store in metrics per line item.
-- `deals.delivery-progress.report-reset-period`- cron expression to start job for closing current delivery progress and starting new one.
-- `deals.delivery-progress-report.competitors-number`- number of line items top competitors to send in delivery progress report.
-- `deals.user-data.user-details-endpoint` - user Data Store endpoint to get user details from.
-- `deals.user-data.win-event-endpoint` - user Data Store endpoint to which win events should be sent.
-- `deals.user-data.timeout` - time to wait (in milliseconds) for User Data Service response.
-- `deals.user-data.user-ids` - list of Rules for determining user identifiers to send to User Data Store.
-- `deals.max-deals-per-bidder` - maximum number of deals to send to each bidder.
-- `deals.alert-proxy.enabled` - enable alert proxy service if `true`.
-- `deals.alert-proxy.url` - alert service endpoint to send alerts to.
-- `deals.alert-proxy.timeout-sec` - default operation timeout for requests to alert service endpoint.
-- `deals.alert-proxy.username` - username for alert proxy BasicAuth.
-- `deals.alert-proxy.password` - password for alert proxy BasicAuth.
-- `deals.alert-proxy.alert-types` - key value pair of alert type and sampling factor to send high priority alert.
+For the `greenbids` analytics adapter
+- `analytics.greenbids.enabled` - if equals to `true` the Greenbids analytics module will be enabled. Default value is `false`.
+- `analytics.greenbids.analytics-server-version` - a server version to add to the event
+- `analytics.greenbids.analytics-server` - url for reporting events
+- `analytics.greenbids.timeout-ms` - timeout in milliseconds for report requests.
+- `analytics.greenbids.exploratory-sampling-split` - a sampling rate for report requests
+- `analytics.greenbids.default-sampling-rate` - a default sampling rate for report requests
+
+For the `agma` analytics adapter
+- `analytics.agma.enabled` - if equals to `true` the Agma analytics module will be enabled. Default value is `false`.
+- `analytics.agma.endpoint.url` - url for reporting events
+- `analytics.agma.endpoint.timeout-ms` - timeout in milliseconds for report requests.
+- `analytics.agma.endpoint.gzip` - if equals to `true` the Agma analytics module enables gzip encoding. Default value is `false`.
+- `analytics.agma.buffers.size-bytes` - threshold in bytes for buffer to send events.
+- `analytics.agma.buffers.count` - threshold in events count for buffer to send events.
+- `analytics.agma.buffers.timeout-ms` - max period between two reports.
+- `analytics.agma.accounts[].code` - an account code to send with an event
+- `analytics.agma.accounts[].publisher-id` - a publisher id to match an event to send
+- `analytics.agma.accounts[].site-app-id` - a site or app id to match an event to send
+
+## Modules
+- `hooks.admin.module-execution` - a key-value map, where a key is a module name and a value is a boolean, that defines whether modules hooks should/should not be always executed; if the module is not specified it is executed by default when it's present in the execution plan
+- `settings.modules.require-config-to-invoke` - when enabled it requires a runtime config to exist for a module.
## Debugging
- `debug.override-token` - special string token for overriding Prebid Server account and/or adapter debug information presence in the auction response.
@@ -483,3 +541,20 @@ If not defined in config all other Health Checkers would be disabled and endpoin
To override (force enable) account and/or bidder adapter debug setting, a client must include `x-pbs-debug-override`
HTTP header in the auction call containing same token as in the `debug.override-token` property. This will make Prebid
Server ignore account `auction.debug-allow` and/or `adapters..debug.allow` properties.
+
+## Privacy Sandbox
+- `auction.privacysandbox.topicsdomain` - the list of Sec-Browsing-Topics for the Privacy Sandbox
+
+## AMP
+- `amp.custom-targeting` - a list of bidders that support custom targeting
+
+## Hooks
+- `hooks.host-execution-plan` - a host execution plan for modules
+- `hooks.default-account-execution-plan` - a default account execution plan
+
+## Price Floors Debug
+- `price-floors.enabled` - enables price floors for account if true. Defaults to true.
+- `price-floors.min-max-age-sec` - a price floors fetch data time to live in cache.
+- `price-floors.min-period-sec` - a refresh period for fetching price floors data.
+- `price-floors.min-timeout-ms` - a min timeout in ms for fetching price floors data.
+- `price-floors.max-timeout-ms` - a max timeout in ms for fetching price floors data.
diff --git a/docs/deals.md b/docs/deals.md
deleted file mode 100644
index fca8c585e26..00000000000
--- a/docs/deals.md
+++ /dev/null
@@ -1,152 +0,0 @@
-# Deals
-
-## Planner and Register services
-
-### Planner service
-
-Periodically request Line Item metadata from the Planner. Line Item metadata includes:
-1. Line Item details
-2. Targeting
-3. Frequency caps
-4. Delivery schedule
-
-### Register service
-
-Each Prebid Server instance register itself with the General Planner with a health index
-(QoS indicator based on its internal counters like circuit breaker trip counters, timeouts, etc.)
-and KPI like ad requests per second.
-
-Also allows planner send command to PBS admin endpoint to stored request caches and tracelogs.
-
-### Planner and register service configuration
-
-```yaml
-planner:
- register-endpoint:
- plan-endpoint:
- update-period: "0 */1 * * * *"
- register-period-sec: 60
- timeout-ms: 8000
- username:
- password:
-```
-
-## Deals stats service
-
-Supports sending reports to delivery stats serving with following metrics:
-
-1. Number of client requests seen since start-up
-2. For each Line Item
-- Number of tokens spent so far at each token class within active and expired plans
-- Number of times the account made requests (this will be the same across all LineItem for the account)
-- Number of win notifications
-- Number of times the domain part of the target matched
-- Number of times impressions matched whole target
-- Number of times impressions matched the target but was frequency capped
-- Number of times impressions matched the target but the fcap lookup failed
-- Number of times LineItem was sent to the bidder
-- Number of times LineItem was sent to the bidder as the top match
-- Number of times LineItem came back from the bidder
-- Number of times the LineItem response was invalidated
-- Number of times the LineItem was sent to the client
-- Number of times the LineItem was sent to the client as the top match
-- Array of top 10 competing LineItems sent to client
-
-### Deals stats service configuration
-
-```yaml
-delivery-stats:
- endpoint:
- delivery-period: "0 */1 * * * *"
- cached-reports-number: 20
- line-item-status-ttl-sec: 3600
- timeout-ms: 8000
- username:
- password:
-```
-
-## Alert service
-
-Sends out alerts when PBS cannot talk to general planner and other critical situations. Alerts are simply JSON messages
-over HTTP sent to a central proxy server.
-
-```yaml
- alert-proxy:
- enabled: truew
- timeout-sec: 10
- url:
- username:
- password:
- alert-types:
- :
- pbs-planner-empty-response-error: 15
-```
-
-## GeoLocation service
-
-This service currently has 1 implementation:
-- MaxMind
-
-In order to support targeting by geographical attributes the service will provide the following information:
-
-1. `continent` - Continent code
-2. `region` - Region code using ISO-3166-2
-3. `metro` - Nielsen DMAs
-4. `city` - city using provider specific encoding
-5. `lat` - latitude from -90.0 to +90.0, where negative is south
-6. `lon` - longitude from -180.0 to +180.0, where negative is west
-
-### GeoLocation service configuration for MaxMind
-
-```yaml
-geolocation:
- enabled: true
- type: maxmind
- maxmind:
- remote-file-syncer:
- download-url:
- save-filepath:
- tmp-filepath:
- retry-count: 3
- retry-interval-ms: 3000
- timeout-ms: 300000
- update-interval-ms: 0
- http-client:
- connect-timeout-ms: 2500
- max-redirects: 3
-```
-
-## User Service
-
-This service is responsible for:
-- Requesting user targeting segments and frequency capping status from the User Data Store
-- Reporting to User Data Store when users finally see ads to aid in correctly enforcing frequency caps
-
-### User service configuration
-
-```yaml
- user-data:
- win-event-endpoint:
- user-details-endpoint:
- timeout: 1000
- user-ids:
- - location: rubicon
- source: uid
- type: khaos
-```
-1. khaos, adnxs - types of the ids that will be specified in requests to User Data Store
-2. source - source of the id, the only supported value so far is “uids” which stands for uids cookie
-3. location - where exactly in the source to look for id
-
-## Device Info Service
-
-DeviceInfoService returns device-related attributes based on User-Agent for use in targeting:
-- deviceClass: desktop, tablet, phone, ctv
-- os: windows, ios, android, osx, unix, chromeos
-- osVersion
-- browser: chrome, firefox, edge, safari
-- browserVersion
-
-## See also
-
-- [Configuration](config.md)
diff --git a/docs/developers/bid-adapter-porting-guide.md b/docs/developers/bid-adapter-porting-guide.md
new file mode 100644
index 00000000000..ba0c900fdfb
--- /dev/null
+++ b/docs/developers/bid-adapter-porting-guide.md
@@ -0,0 +1,72 @@
+# Porting Guide
+
+## Overview
+
+First, thank you for taking on the migration of an adapter from Go to Java. But really the best way to think of it is not as straight port. Instead, we recommend treat this task as a re-implementation. It will take a few adapters before you fully get the hang of it, and that's okay—everyone goes through a learning curve.
+
+Keep in mind that the PBS-Go team is more lenient about what they allow in adapters compared to the PBS-Java team.
+
+## Pull Request Requirements
+
+We would appreciate it if your porting PR title follows these patterns:
+
+- `Port : New Adapter` – For porting a completely new adapter to the project (e.g., `Port Kobler: New Adapter`).
+- `Port : ` – For porting a specific update to an existing adapter (e.g., `Port OpenX: Native Support`).
+- `Port : New alias for ` – For porting an alias of an existing adapter to the project (e.g., `Port Artechnology: New alias of StartHub`).
+
+Additionally, we kindly ask that you:
+
+- Link any existing GitHub issues that your PR resolves. This ensures the issue will be automatically closed when your PR is merged.
+- Add the label `do not port` to your PR.
+
+## Porting Requirements
+
+1. **Feature Parity**: A Java adapter should have the same functionality as the Go adapter.
+2. **Java Adapter Code Should:**
+ - Follow the code style of the PBS-Java repository (see [the code style page](code-style.md)).
+ - Be well-written Java code: clear, readable, optimized, and following best practices.
+ - Maintain a structure similar to existing adapters (see below).
+3. **The adapter should be covered with tests:**
+ - Unit tests for implementation details.
+ - A simple integration test to ensure the adapter is reachable, can send requests to the bidder, and that its configuration works.
+
+### What does "having a similar structure to existing adapters" mean?
+
+The PBS-Java codebase has evolved over time. While existing adapters may not be perfect and could contain legacy issues (e.g., using outdated Java syntax), they still serve as a valuable reference for learning, inspiration, and even reuse.
+
+Each adapter is unique, but most share common patterns. For example, nearly every adapter includes:
+
+1. **A `makeHttpRequests(...)` method**
+ - Iterates over the `imps` in the bid request:
+ - Parses `imp[].ext.prebid.bidder` (i.e., bidder static parameters).
+ - Modifies the `imp`.
+ - Collects errors encountered during `imp` processing.
+ - Prepares outgoing request(s):
+ - Constructs headers.
+ - Builds the request URL.
+ - Modifies the incoming bid request based on the updated `imps`.
+
+2. **A `makeBids(...)` method**
+ - Parses the `BidResponse`.
+ - Iterates over `seatBids` and `bids`.
+ - Creates a list of `BidderBid` objects.
+
+### Ensuring Structural Consistency
+
+To maintain consistency across adapters:
+- Fit the Go adapter functionality into the Java adapter structure.
+- Use the same or similar method and variable names where applicable.
+- Reuse existing solutions for common functionality (e.g., use `BidderUtil`, `HttpUtil` classes).
+- Ensure unit tests follow a similar structure, with comparable test cases and code patterns.
+
+## Specific Rules and Tips for Porting
+
+1. Begin by determining how the Go adapter's functionality fits into the Java adapter structure.
+2. Go adapters deserialize JSON objects in-place, while Java adapters work with pre-deserialized objects. As a result, many errors thrown in the Go version do not apply in Java.
+3. **No hardcoded URLs.** If an adapter has a "test URL," it must be defined in the YAML file. See `org.prebid.server.spring.config.bidder.NextMillenniumConfiguration.NextMillenniumConfigurationProperties` for an example of how to handle special YAML entries.
+4. The structure of Go and Java bidder configuration files differs—do not copy and paste directly. Pay attention to details such as macros in the endpoint and redirect/iframe URLs.
+5. **Prohibited in bidder adapters:**
+ - Blocking code.
+ - Fully dynamic hostnames in URLs.
+ - Non-thread-safe code (bidder adapters should not store state internally).
+6. If an adapter has no special logic, consider using an alias to `Generic` instead. In this case, there will still need to be an integration test for this bidder. e.g. `src/test/java/org/prebid/server/it/BidderNameTest.java`
diff --git a/docs/developers/code-reviews.md b/docs/developers/code-reviews.md
index 78728fef18a..ba7fb0ee526 100644
--- a/docs/developers/code-reviews.md
+++ b/docs/developers/code-reviews.md
@@ -3,33 +3,21 @@
## Standards
Anyone is free to review and comment on any [open pull requests](https://github.com/prebid/prebid-server-java/pulls).
-All pull requests must be reviewed and approved by at least one [core member](https://github.com/orgs/prebid/teams/core/members) before merge.
-
-Very small pull requests may be merged with just one review if they:
-
-1. Do not change the public API.
-2. Have low risk of bugs, in the opinion of the reviewer.
-3. Introduce no new features, or impact the code architecture.
-
-Larger pull requests must meet at least one of the following two additional requirements.
-
-1. Have a second approval from a core member
-2. Be open for 5 business days with no new changes requested.
+1. PRs that touch only adapters and modules can be approved by one reviewer before merge.
+2. PRs that touch PBS-core must be reviewed and approved by at least two 'core' reviewers before merge.
## Process
-New pull requests should be [assigned](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/)
-to a core member for review within 3 business days of being opened.
-That person should either approve the changes or request changes within 4 business days of being assigned.
-If they're too busy, they should assign it to someone else who can review it within that timeframe.
+New pull requests must be [assigned](https://help.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/)
+to a reviewer within 5 business days of being opened. That person must either approve the changes or request changes within 5 business days of being assigned.
+
+If a reviewer is too busy, they should re-assign it to someone else as soon as possible so that person has enough time to take over the review and still meet the 5-day goal. Please tag the new reviewer in the PR. If you don't know who to assign it to, use the #prebid-server-java-dev Slack channel to ask for help in re-assigning.
-If the changes are small, that member can merge the PR once the changes are complete. Otherwise, they should
-assign the pull request to another member for a second review.
+If a reviewer is going to be unavailable for more than a few days, they should update the notes column of the duty spreadsheet or drop a note about their availability into the Slack channel.
-The pull request can then be merged whenever the second reviewer approves, or if 5 business days pass with no farther
-changes requested by anybody, whichever comes first.
+After the review, if the PR touches PBS-core, it must be assigned to a second reviewer.
-## Priorities
+## Review Priorities
Code reviews should focus on things which cannot be validated by machines.
@@ -43,3 +31,10 @@ explaining it. Are there better ways to achieve those goals?
- Does the code use any global, mutable state? [Inject dependencies](https://en.wikipedia.org/wiki/Dependency_injection) instead!
- Can the code be organized into smaller, more modular pieces?
- Is there dead code which can be deleted? Or TODO comments which should be resolved?
+- Look for code used by other adapters. Encourage adapter submitter to utilize common code.
+- Specific bid adapter rules:
+ - The email contact must work and be a group, not an individual.
+ - Host endpoints cannot be fully dynamic. i.e. they can utilize "https://REGION.example.com", but not "https://HOST".
+ - They cannot _require_ a "region" parameter. Region may be an optional parameter, but must have a default.
+ - No direct use of HTTP is prohibited - *implement an existing Bidder interface that will do all the job*
+ - If the ORTB is just forwarded to the endpoint, use the generic adapter - *define the new adapter as the alias of the generic adapter*
diff --git a/docs/developers/code-style.md b/docs/developers/code-style.md
index 14704d20799..de42811030f 100644
--- a/docs/developers/code-style.md
+++ b/docs/developers/code-style.md
@@ -28,7 +28,7 @@ in `pom.xml` directly.
It is recommended to define version of library to separate property in `pom.xml`:
-```
+```xml
2.6.2
@@ -48,7 +48,7 @@ It is recommended to define version of library to separate property in `pom.xml`
Do not use wildcard in imports because they hide what exactly is required by the class.
-```
+```java
// bad
import java.util.*;
@@ -61,7 +61,7 @@ import java.util.Map;
Prefer to use `camelCase` naming convention for variables and methods.
-```
+```java
// bad
String account_id = "id";
@@ -71,7 +71,7 @@ String accountId = "id";
Name of variable should be self-explanatory:
-```
+```java
// bad
String s = resolveParamA();
@@ -83,7 +83,7 @@ This helps other developers flesh your code out better without additional questi
For `Map`s it is recommended to use `To` between key and value designation:
-```
+```java
// bad
Map map = getData();
@@ -97,7 +97,7 @@ Make data transfer object(DTO) classes immutable with static constructor.
This can be achieved by using Lombok and `@Value(staticConstructor="of")`. When constructor uses multiple(more than 4) arguments, use builder instead(`@Builder`).
If dto must be modified somewhere, use builders annotation `toBuilder=true` parameter and rebuild instance by calling `toBuilder()` method.
-```
+```java
// bad
public class MyDto {
@@ -138,7 +138,7 @@ final MyDto updatedDto = myDto.toBuilder().value("newValue").build();
Although Java supports the `var` keyword at the time of writing this documentation, the maintainers have chosen not to utilize it within the PBS codebase.
Instead, write full variable type.
-```
+```java
// bad
final var result = getResult();
@@ -150,7 +150,7 @@ final Data result = getResult();
Enclosing parenthesis should be placed on expression end.
-```
+```java
// bad
methodCall(
long list of arguments
@@ -163,7 +163,7 @@ methodCall(
This also applies for nested expressions.
-```
+```java
// bad
methodCall(
nestedCall(
@@ -181,7 +181,7 @@ methodCall(
Please, place methods inside a class in call order.
-```
+```java
// bad
public interface Test {
@@ -249,7 +249,7 @@ Define interface first method, then all methods that it is calling, then second
Not strict, but methods with long parameters list, that cannot be placed on single line,
should add empty line before body definition.
-```
+```java
// bad
public static void method(
parameters definitions) {
@@ -266,7 +266,7 @@ public static void method(
Use collection literals where it is possible to define and initialize collections.
-```
+```java
// bad
final List foo = new ArrayList();
foo.add("foo");
@@ -278,7 +278,7 @@ final List foo = List.of("foo", "bar");
Also, use special methods of Collections class for empty or single-value one-line collection creation. This makes developer intention clear and code less error-prone.
-```
+```java
// bad
return List.of();
@@ -296,7 +296,7 @@ return Collections.singletonList("foo");
It is recommended to declare variable as `final`- not strict but rather project convention to keep the code safe.
-```
+```java
// bad
String value = "value";
@@ -308,7 +308,7 @@ final String value = "value";
Results of long ternary operators should be on separate lines:
-```
+```java
// bad
boolean result = someVeryVeryLongConditionThatForcesLineWrap ? firstResult
: secondResult;
@@ -321,7 +321,7 @@ boolean result = someVeryVeryLongConditionThatForcesLineWrap
Not so strict, but short ternary operations should be on one line:
-```
+```java
// bad
boolean result = someShortCondition
? firstResult
@@ -335,7 +335,7 @@ boolean result = someShortCondition ? firstResult : secondResult;
Do not rely on operator precedence in boolean logic, use parenthesis instead. This will make code simpler and less error-prone.
-```
+```java
// bad
final boolean result = a && b || c;
@@ -347,7 +347,7 @@ final boolean result = (a && b) || c;
Try to avoid hard-readable multiple nested method calls:
-```
+```java
// bad
int resolvedValue = resolveValue(fetchExternalJson(url, httpClient), populateAdditionalKeys(mainKeys, keyResolver));
@@ -361,7 +361,7 @@ int resolvedValue = resolveValue(externalJson, additionalKeys);
Try not to retrieve same data more than once:
-```
+```java
// bad
if (getData() != null) {
final Data resolvedData = resolveData(getData());
@@ -380,7 +380,7 @@ if (data != null) {
If you're dealing with incoming data, please be sure to check if the nested object is not null before chaining.
-```
+```java
// bad
final ExtRequestTargeting targeting = bidRequest.getExt().getPrebid().getTargeting();
@@ -400,7 +400,7 @@ We are trying to get rid of long chains of null checks, which are described in s
Don't leave commented code (don't think about the future).
-```
+```java
// bad
// String iWillUseThisLater = "never";
```
@@ -426,7 +426,7 @@ The code should be covered over 90%.
The common way for writing tests has to comply with `given-when-then` style.
-```
+```java
// given
final BidRequest bidRequest = BidRequest.builder().id("").build();
@@ -451,7 +451,7 @@ The team decided to use name `target` for class instance under test.
Unit tests should be as granular as possible. Try to split unit tests into smaller ones until this is impossible to do.
-```
+```java
// bad
@Test
public void testFooBar() {
@@ -487,7 +487,7 @@ public void testBar() {
This also applies to cases where same method is tested with different arguments inside single unit test.
Note: This represents the replacement we have selected for parameterized testing.
-```
+```java
// bad
@Test
public void testFooFirstSecond() {
@@ -527,7 +527,7 @@ It is also recommended to structure test method names with this scheme:
name of method that is being tested, word `should`, what a method should return.
If a method should return something based on a certain condition, add word `when` and description of a condition.
-```
+```java
// bad
@Test
public void doSomethingTest() {
@@ -547,7 +547,7 @@ public void processDataShouldReturnResultWhenInputIsData() {
Place data used in test as close as possible to test code. This will make tests easier to read, review and understand.
-```
+```java
// bad
@Test
public void testFoo() {
@@ -576,7 +576,7 @@ This point also implies the next one.
Since we are trying to improve test simplicity and readability and place test data close to tests, we decided to avoid usage of top level constants where it is possible.
Instead, just inline constant values.
-```
+```java
// bad
public class TestClass {
@@ -609,7 +609,7 @@ public class TestClass {
Don't use real information in tests, like existing endpoint URLs, account IDs, etc.
-```
+```java
// bad
String ENDPOINT_URL = "https://prebid.org";
diff --git a/docs/developers/functional-tests.md b/docs/developers/functional-tests.md
index fd216eb89c5..523466fb0b0 100644
--- a/docs/developers/functional-tests.md
+++ b/docs/developers/functional-tests.md
@@ -70,7 +70,7 @@ Functional tests need to have name template **.\*Spec.groovy**
**Properties:**
`launchContainers` - responsible for starting the MockServer and the MySQLContainer container. Default value is false to not launch containers for unit tests.
-`tests.max-container-count` - maximum number of simultaneously running PBS containers. Default value is 2.
+`tests.max-container-count` - maximum number of simultaneously running PBS containers. Default value is 5.
`skipFunctionalTests` - allow to skip funtional tests. Default value is false.
`skipUnitTests` - allow to skip unit tests. Default value is false.
@@ -131,7 +131,16 @@ Container for mocking different calls from PBS: prebid cache, bidders, currency
Container for Mysql database.
-- Use `org/prebid/server/functional/db_schema.sql` script for scheme.
+- Use `org/prebid/server/functional/db_mysql_schema.sql` script for scheme.
+- DataBase: `prebid`
+- Username: `prebid`
+- Password: `prebid`
+
+#### PostgreSQLContainer
+
+Container for PostgreSQL database.
+
+- Use `org/prebid/server/functional/db_psql_schema.sql` script for scheme.
- DataBase: `prebid`
- Username: `prebid`
- Password: `prebid`
diff --git a/docs/metrics.md b/docs/metrics.md
index 41dd45cc916..c07e0660598 100644
--- a/docs/metrics.md
+++ b/docs/metrics.md
@@ -11,7 +11,7 @@ Other available metrics not mentioned here can found at
where:
- `[IP]` should be equal to IP address of bound network interface on cluster node for Prebid Server (for example: `0.0.0.0`)
-- `[PORT]` should be equal to `http.port` configuration property
+- `[PORT]` should be equal to `server.http.port` configuration property
### HTTP client metrics
- `vertx.http.clients.connections.{min,max,mean,p95,p99}` - how long connections live
@@ -37,6 +37,7 @@ where `[DATASOURCE]` is a data source name, `DEFAULT_DS` by defaul.
## General auction metrics
- `app_requests` - number of requests received from applications
+- `debug_requests` - number of requests received (when debug mode is enabled)
- `no_cookie_requests` - number of requests without `uids` cookie or with one that didn't contain at least one live UID
- `request_time` - timer tracking how long did it take for Prebid Server to serve a request
- `imps_requested` - number if impressions requested
@@ -44,7 +45,9 @@ where `[DATASOURCE]` is a data source name, `DEFAULT_DS` by defaul.
- `imps_video` - number of video impressions
- `imps_native` - number of native impressions
- `imps_audio` - number of audio impressions
-- `requests.(ok|badinput|err|networkerr|blacklisted_account|blacklisted_app).(openrtb2-web|openrtb-app|amp|legacy)` - number of requests broken down by status and type
+- `disabled_bidder` - number of disabled bidders received within requests
+- `unknown_bidder` - number of unknown bidders received within requests
+- `requests.(ok|badinput|err|networkerr|blocklisted_account|blocklisted_app).(openrtb2-web|openrtb-app|amp|legacy)` - number of requests broken down by status and type
- `bidder-cardinality..requests` - number of requests targeting `` of bidders
- `connection_accept_errors` - number of errors occurred while establishing HTTP connection
- `db_query_time` - timer tracking how long did it take for database client to obtain the result for a query
@@ -89,7 +92,10 @@ Following metrics are collected and submitted if account is configured with `bas
Following metrics are collected and submitted if account is configured with `detailed` verbosity:
- `account..requests.type.(openrtb2-web,openrtb-app,amp,legacy)` - number of requests received from account with `` broken down by type of incoming request
-- `account..requests.rejected` - number of rejected requests caused by incorrect `accountId`
+- `account..debug_requests` - number of requests received from account with `` broken down by type of incoming request (when debug mode is enabled)
+- `account..requests.rejection` - number of rejection requests caused by incorrect `accountId`
+- `account..requests.disabled_bidder` - number of disabled bidders received within requests from account with ``
+- `account..requests.unknown_bidder` - number of unknown bidder names received within requests from account with ``
- `account..adapter..request_time` - timer tracking how long did it take to make a request to `` when incoming request was from ``
- `account..adapter..bids_received` - number of bids received from `` when incoming request was from ``
- `account..adapter..requests.(gotbids|nobid)` - number of requests made to `` broken down by result status when incoming request was from ``
@@ -98,11 +104,13 @@ Following metrics are collected and submitted if account is configured with `det
- `prebid_cache.requests.ok` - timer tracking how long did successful cache requests take
- `prebid_cache.requests.err` - timer tracking how long did failed cache requests take
- `prebid_cache.creative_size.` - histogram tracking creative sizes for specific type
+- `prebid_cache.creative_ttl.` - histogram tracking creative TTL for specific type
## Prebid Cache per-account metrics
- `account..prebid_cache.requests.ok` - timer tracking how long did successful cache requests take when incoming request was from ``
- `account..prebid_cache.requests.err` - timer tracking how long did failed cache requests take when incoming request was from ``
- `account..prebid_cache.creative_size.` - histogram tracking creative sizes for specific type when incoming request was from ``
+- `account..prebid_cache.creative_ttl.` - histogram tracking creative TTL for specific type when incoming request was from ``
## /cookie_sync endpoint metrics
- `cookie_sync_requests` - number of requests received
@@ -132,30 +140,16 @@ Following metrics are collected and submitted if account is configured with `det
- `analytics..(auction|amp|video|cookie_sync|event|setuid).ok` - number of succeeded processed event requests
- `analytics..(auction|amp|video|cookie_sync|event|setuid).timeout` - number of event requests, failed with timeout cause
- `analytics..(auction|amp|video|cookie_sync|event|setuid).err` - number of event requests, failed with errors
-- `analytics..(auction|amp|video|cookie_sync|event|setuid).badinput` - number of event requests, rejected with bad input cause
-
-## win notifications
-- `win_notifications` - total number of win notifications.
-- `win_requests` - total number of requests sent to user service for win notifications.
-- `win_request_preparation_failed` - number of request failed validation and were not sent.
-- `win_request_time` - latency between request to user service and response for win notifications.
-- `win_request_failed` - number of failed request sent to user service for win notifications.
-- `win_request_successful` - number of successful request sent to user service for win notifications.
-
-## user details
-- `user_details_requests` - total number of requests sent to user service to get user details.
-- `user_details_request_preparation_failed` - number of request failed validation and were not sent.
-- `user_details_request_time` - latency between request to user service and response to get user details.
-- `user_details_request_failed` - number of failed request sent to user service to get user details.
-- `user_details_request_successful` - number of successful request sent to user service to get user details.
-
-## Programmatic guaranteed metrics
-- `pg.planner_lineitems_received` - number of line items received from general planner.
-- `pg.planner_requests` - total number of requests sent to general planner.
-- `pg.planner_request_failed` - number of failed request sent to general planner.
-- `pg.planner_request_successful` - number of successful requests sent to general planner.
-- `pg.planner_request_time` - latency between request to general planner and its successful (200 OK) response.
-- `pg.delivery_requests` - total number of requests to delivery stats service.
-- `pg.delivery_request_failed` - number of failed requests to delivery stats service.
-- `pg.delivery_request_successful` - number of successful requests to delivery stats service.
-- `pg.delivery_request_time` - latency between request to delivery stats and its successful (200 OK) response.
+- `analytics..(auction|amp|video|cookie_sync|event|setuid).badinput` - number of event requests, rejection with bad input cause
+
+## Modules metrics
+- `modules.module..stage..hook..call` - number of times the hook is called
+- `modules.module..stage..hook..duration` - timer tracking the called hook execution time
+- `modules.module..stage..hook..success.(noop|update|reject|no-invocation)` - number of times the hook is called successfully with the action applied
+- `modules.module..stage..hook..(failure|timeout|execution-error)` - number of times the hook execution is failed
+
+## Modules per-account metrics
+- `account..modules.module..call` - number of times the module is called
+- `account..modules.module..duration` - timer tracking the called module execution time
+- `account..modules.module..success.(noop|update|reject|no-invocation)` - number of times the module is called successfully with the action applied
+- `account..modules.module..failure` - number of times the module execution is failed
diff --git a/extra/bundle/pom.xml b/extra/bundle/pom.xml
index b48eaf7780d..5d44255ca2d 100644
--- a/extra/bundle/pom.xml
+++ b/extra/bundle/pom.xml
@@ -5,7 +5,7 @@
org.prebid
prebid-server-aggregator
- 2.13.0-SNAPSHOT
+ 3.39.0-SNAPSHOT
../../extra/pom.xml
@@ -14,15 +14,6 @@
prebid-server-bundle
Creates bundle (fat jar) with PBS-Core and other submodules listed as dependency
-
- UTF-8
- UTF-8
- 17
- ${java.version}
- ${java.version}
- 2.5.6
-
-
org.prebid
@@ -34,6 +25,11 @@
confiant-ad-quality
${project.version}
+
+ org.prebid.server.hooks.modules
+ fiftyone-devicedetection
+ ${project.version}
+
org.prebid.server.hooks.modules
ortb2-blocking
@@ -44,6 +40,41 @@
pb-richmedia-filter
${project.version}
+
+ org.prebid.server.hooks.modules
+ pb-response-correction
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ greenbids-real-time-data
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ pb-request-correction
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ optable-targeting
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ wurfl-devicedetection
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ live-intent-omni-channel-identity
+ ${project.version}
+
+
+ org.prebid.server.hooks.modules
+ pb-rule-engine
+ ${project.version}
+
diff --git a/extra/modules/confiant-ad-quality/pom.xml b/extra/modules/confiant-ad-quality/pom.xml
index e04ca09ea57..1d86482129b 100644
--- a/extra/modules/confiant-ad-quality/pom.xml
+++ b/extra/modules/confiant-ad-quality/pom.xml
@@ -5,7 +5,7 @@
org.prebid.server.hooks.modules
all-modules
- 2.13.0-SNAPSHOT
+ 3.39.0-SNAPSHOT
confiant-ad-quality
@@ -17,7 +17,6 @@
io.vertx
vertx-redis-client
- 3.9.10
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/config/ConfiantAdQualityModuleConfiguration.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/config/ConfiantAdQualityModuleConfiguration.java
index 7978153c34a..37ed7c9ec10 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/config/ConfiantAdQualityModuleConfiguration.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/config/ConfiantAdQualityModuleConfiguration.java
@@ -34,7 +34,8 @@ public class ConfiantAdQualityModuleConfiguration {
ConfiantAdQualityModule confiantAdQualityModule(
@Value("${hooks.modules.confiant-ad-quality.api-key}") String apiKey,
@Value("${hooks.modules.confiant-ad-quality.scan-state-check-interval}") int scanStateCheckInterval,
- @Value("${hooks.modules.confiant-ad-quality.bidders-to-exclude-from-scan}") List biddersToExcludeFromScan,
+ @Value("${hooks.modules.confiant-ad-quality.bidders-to-exclude-from-scan}")
+ List biddersToExcludeFromScan,
RedisConfig redisConfig,
RedisRetryConfig retryConfig,
Vertx vertx,
@@ -43,13 +44,24 @@ ConfiantAdQualityModule confiantAdQualityModule(
final RedisConnectionConfig writeNodeConfig = redisConfig.getWriteNode();
final RedisClient writeRedisNode = new RedisClient(
- vertx, writeNodeConfig.getHost(), writeNodeConfig.getPort(), writeNodeConfig.getPassword(), retryConfig, "write node");
+ vertx,
+ writeNodeConfig.getHost(),
+ writeNodeConfig.getPort(),
+ writeNodeConfig.getPassword(),
+ retryConfig,
+ "write node");
final RedisConnectionConfig readNodeConfig = redisConfig.getReadNode();
final RedisClient readRedisNode = new RedisClient(
- vertx, readNodeConfig.getHost(), readNodeConfig.getPort(), readNodeConfig.getPassword(), retryConfig, "read node");
+ vertx,
+ readNodeConfig.getHost(),
+ readNodeConfig.getPort(),
+ readNodeConfig.getPassword(),
+ retryConfig,
+ "read node");
final BidsScanner bidsScanner = new BidsScanner(writeRedisNode, readRedisNode, apiKey, objectMapper);
- final RedisScanStateChecker redisScanStateChecker = new RedisScanStateChecker(bidsScanner, scanStateCheckInterval, vertx);
+ final RedisScanStateChecker redisScanStateChecker = new RedisScanStateChecker(
+ bidsScanner, scanStateCheckInterval, vertx);
final Promise scannerPromise = Promise.promise();
scannerPromise.future().onComplete(r -> redisScanStateChecker.run());
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapper.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapper.java
index 57eac3d3620..47c73e0077c 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapper.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapper.java
@@ -3,10 +3,10 @@
import com.iab.openrtb.response.Bid;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.bidder.model.BidderBid;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ActivityImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.AppliedToImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ResultImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.TagsImpl;
+import org.prebid.server.hooks.execution.v1.analytics.ActivityImpl;
+import org.prebid.server.hooks.execution.v1.analytics.AppliedToImpl;
+import org.prebid.server.hooks.execution.v1.analytics.ResultImpl;
+import org.prebid.server.hooks.execution.v1.analytics.TagsImpl;
import org.prebid.server.hooks.v1.analytics.AppliedTo;
import org.prebid.server.hooks.v1.analytics.Result;
import org.prebid.server.hooks.v1.analytics.Tags;
@@ -24,6 +24,9 @@ public class AnalyticsMapper {
private static final String INSPECTED_HAS_ISSUE = "inspected-has-issue";
private static final String INSPECTED_NO_ISSUES = "inspected-no-issues";
+ private AnalyticsMapper() {
+ }
+
public static Tags toAnalyticsTags(List bidderResponsesWithIssues,
List bidderResponsesWithoutIssues,
List bidderResponsesNotScanned) {
@@ -31,7 +34,10 @@ public static Tags toAnalyticsTags(List bidderResponsesWithIssue
return TagsImpl.of(Collections.singletonList(ActivityImpl.of(
AD_QUALITY_SCAN,
SUCCESS_STATUS,
- toActivityResults(bidderResponsesWithIssues, bidderResponsesWithoutIssues, bidderResponsesNotScanned))));
+ toActivityResults(
+ bidderResponsesWithIssues,
+ bidderResponsesWithoutIssues,
+ bidderResponsesNotScanned))));
}
private static List toActivityResults(List bidderResponsesWithIssues,
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapper.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapper.java
index cf4f3557862..094b7e6b494 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapper.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapper.java
@@ -13,16 +13,19 @@
public class BidsMapper {
- public static RedisBidsData toRedisBidsFromBidResponses(
- BidRequest bidRequest,
- List bidderResponses) {
+ private BidsMapper() {
+ }
+
+ public static RedisBidsData toRedisBidsFromBidResponses(BidRequest bidRequest,
+ List bidderResponses) {
- final List confiantBidResponses = bidderResponses
- .stream().map(bidResponse -> RedisBidResponseData
+ final List confiantBidResponses = bidderResponses.stream()
+ .map(bidResponse -> RedisBidResponseData
.builder()
.dspId(bidResponse.getBidder())
.bidresponse(toBidResponseFromBidderResponse(bidRequest, bidResponse))
- .build()).toList();
+ .build())
+ .toList();
return RedisBidsData.builder()
.breq(bidRequest)
@@ -30,13 +33,12 @@ public static RedisBidsData toRedisBidsFromBidResponses(
.build();
}
- private static BidResponse toBidResponseFromBidderResponse(
- BidRequest bidRequest,
- BidderResponse bidderResponse) {
+ private static BidResponse toBidResponseFromBidderResponse(BidRequest bidRequest,
+ BidderResponse bidderResponse) {
return BidResponse.builder()
.id(bidRequest.getId())
- .cur(bidRequest.getCur().get(0))
+ .cur(bidRequest.getCur().getFirst())
.seatbid(Collections.singletonList(SeatBid.builder()
.bid(bidderResponse.getSeatBid().getBids().stream().map(BidderBid::getBid).toList())
.build()))
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanner.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanner.java
index d8b9657e22d..1b3afe3092d 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanner.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanner.java
@@ -59,13 +59,18 @@ public Future submitBids(RedisBidsData bids) {
final RedisAPI readRedisNodeAPI = this.readRedisNode.getRedisAPI();
final boolean shouldSubmit = !isScanDisabled
- && readRedisNodeAPI != null && bids.getBresps().size() > 0;
+ && readRedisNodeAPI != null && !bids.getBresps().isEmpty();
if (shouldSubmit) {
readRedisNodeAPI.get("function_submit_bids", submitHash -> {
final Object submitHashResult = submitHash.result();
if (submitHashResult != null) {
- final List readArgs = List.of(submitHashResult.toString(), "0", toBidsAsJson(bids), apiKey, "true");
+ final List readArgs = List.of(
+ submitHashResult.toString(),
+ "0",
+ toBidsAsJson(bids),
+ apiKey,
+ "true");
readRedisNodeAPI.evalsha(readArgs, response -> {
if (response.result() != null) {
@@ -120,7 +125,7 @@ public Future isScanDisabledFlag() {
if (redisAPI != null) {
redisAPI.get("scan-disabled", scanDisabledValue -> {
final Response scanDisabled = scanDisabledValue.result();
- isDisabled.complete(scanDisabled != null && scanDisabled.toString().equals("true"));
+ isDisabled.complete(scanDisabled != null && "true".equals(scanDisabled.toString()));
});
return isDisabled.future();
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisClient.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisClient.java
index f03d07ca33c..d1b424f314e 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisClient.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisClient.java
@@ -4,13 +4,13 @@
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.Vertx;
-import io.vertx.core.logging.Logger;
-import io.vertx.core.logging.LoggerFactory;
import io.vertx.redis.client.Redis;
import io.vertx.redis.client.RedisAPI;
import io.vertx.redis.client.RedisConnection;
import io.vertx.redis.client.RedisOptions;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisRetryConfig;
+import org.prebid.server.log.Logger;
+import org.prebid.server.log.LoggerFactory;
public class RedisClient {
@@ -45,7 +45,7 @@ public RedisClient(
public void start(Promise startFuture) {
createRedisClient(onCreate -> {
if (onCreate.succeeded()) {
- logger.info("Confiant Redis {0} connection is established", type);
+ logger.info("Confiant Redis {} connection is established", type);
startFuture.tryComplete();
}
}, false);
@@ -92,7 +92,7 @@ private void attemptReconnect(int retry, Handler> h
if (retry > (retryConfig.getShortIntervalAttempts() + retryConfig.getLongIntervalAttempts())) {
logger.info("Confiant Redis connection is not established");
} else {
- long backoff = retry < retryConfig.getShortIntervalAttempts()
+ final long backoff = retry < retryConfig.getShortIntervalAttempts()
? retryConfig.getShortInterval()
: retryConfig.getLongInterval();
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParser.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParser.java
index a516497146d..4dfca9b2449 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParser.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParser.java
@@ -2,10 +2,10 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
-import io.vertx.core.logging.Logger;
-import io.vertx.core.logging.LoggerFactory;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.BidScanResult;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisError;
+import org.prebid.server.log.Logger;
+import org.prebid.server.log.LoggerFactory;
import java.util.Arrays;
import java.util.Collections;
@@ -36,7 +36,7 @@ public BidsScanResult parseBidsScanResult(String redisResponse) {
} catch (JsonProcessingException resultParse) {
String message;
try {
- RedisError errorResponse = objectMapper.readValue(redisResponse, RedisError.class);
+ final RedisError errorResponse = objectMapper.readValue(redisResponse, RedisError.class);
message = String.format("Redis error - %s: %s", errorResponse.getCode(), errorResponse.getMessage());
} catch (JsonProcessingException errorParse) {
message = String.format("Error during parse redis response: %s", redisResponse);
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/model/RedisRetryConfig.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/model/RedisRetryConfig.java
index 60034a8345d..3c7f83d7164 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/model/RedisRetryConfig.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/model/RedisRetryConfig.java
@@ -5,15 +5,26 @@
@Data
public class RedisRetryConfig {
- /** Maximum attempts with short interval value to try to reconnect to Confiant's Redis server in case any connection error happens */
+ /**
+ * Maximum attempts with short interval value to try to reconnect to
+ * Confiant's Redis server in case any connection error happens
+ */
int shortIntervalAttempts;
- /** Short time interval in milliseconds after which another one attempt to connect to Redis will be executed */
+ /**
+ * Short time interval in milliseconds after which another one attempt to connect to Redis will be executed
+ */
int shortInterval;
- /** Maximum attempts with long interval value to try to reconnect to Confiant's Redis server in case any connection error happens. This attempts are used when short-attempts were not successful */
+ /**
+ * Maximum attempts with long interval value to try to reconnect to
+ * Confiant's Redis server in case any connection error happens.
+ * This attempts are used when short-attempts were not successful
+ */
int longIntervalAttempts;
- /** Long time interval in milliseconds after which another one attempt to connect to Redis will be executed */
+ /**
+ * Long time interval in milliseconds after which another one attempt to connect to Redis will be executed
+ */
int longInterval;
}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHook.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHook.java
index 4cf66880bef..d9a2146852e 100644
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHook.java
+++ b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHook.java
@@ -4,6 +4,7 @@
import com.iab.openrtb.request.Device;
import com.iab.openrtb.request.User;
import io.vertx.core.Future;
+import org.apache.commons.collections4.ListUtils;
import org.prebid.server.activity.Activity;
import org.prebid.server.activity.ComponentType;
import org.prebid.server.activity.infrastructure.payload.ActivityInvocationPayload;
@@ -12,13 +13,13 @@
import org.prebid.server.auction.model.AuctionContext;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.auction.privacy.enforcement.mask.UserFpdActivityMask;
+import org.prebid.server.hooks.execution.v1.InvocationResultImpl;
import org.prebid.server.hooks.execution.v1.bidder.AllProcessedBidResponsesPayloadImpl;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.AnalyticsMapper;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsMapper;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsScanResult;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsScanner;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.GroupByIssues;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.InvocationResultImpl;
import org.prebid.server.hooks.v1.InvocationAction;
import org.prebid.server.hooks.v1.InvocationResult;
import org.prebid.server.hooks.v1.InvocationStatus;
@@ -31,7 +32,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
-import java.util.stream.Stream;
public class ConfiantAdQualityBidResponsesScanHook implements AllProcessedBidResponsesHook {
@@ -81,7 +81,7 @@ private BidRequest getBidRequest(AuctionInvocationContext auctionInvocationConte
final boolean disallowTransmitGeo = !auctionContext.getActivityInfrastructure()
.isAllowed(Activity.TRANSMIT_GEO, activityInvocationPayload);
- final User maskedUser = userFpdActivityMask.maskUser(bidRequest.getUser(), true, true, disallowTransmitGeo);
+ final User maskedUser = userFpdActivityMask.maskUser(bidRequest.getUser(), true, true);
final Device maskedDevice = userFpdActivityMask.maskDevice(bidRequest.getDevice(), true, disallowTransmitGeo);
return bidRequest.toBuilder()
@@ -117,7 +117,7 @@ private InvocationResult toInvocationResult(
.analyticsTags(AnalyticsMapper.toAnalyticsTags(
bidderResponsesWithIssues, bidderResponsesWithoutIssues, notScannedBidderResponses))
.payloadUpdate(payload -> AllProcessedBidResponsesPayloadImpl.of(
- Stream.concat(bidderResponsesWithoutIssues.stream(), notScannedBidderResponses.stream()).toList()));
+ ListUtils.union(bidderResponsesWithoutIssues, notScannedBidderResponses)));
return resultBuilder.build();
}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/InvocationResultImpl.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/InvocationResultImpl.java
deleted file mode 100644
index 76fa5759644..00000000000
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/InvocationResultImpl.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package org.prebid.server.hooks.modules.com.confiant.adquality.v1.model;
-
-import lombok.Builder;
-import lombok.Value;
-import lombok.experimental.Accessors;
-import org.prebid.server.hooks.v1.InvocationAction;
-import org.prebid.server.hooks.v1.InvocationResult;
-import org.prebid.server.hooks.v1.InvocationStatus;
-import org.prebid.server.hooks.v1.PayloadUpdate;
-import org.prebid.server.hooks.v1.analytics.Tags;
-
-import java.util.List;
-
-@Accessors(fluent = true)
-@Builder
-@Value
-public class InvocationResultImpl implements InvocationResult {
-
- InvocationStatus status;
-
- String message;
-
- InvocationAction action;
-
- PayloadUpdate payloadUpdate;
-
- List errors;
-
- List warnings;
-
- List debugMessages;
-
- Object moduleContext;
-
- Tags analyticsTags;
-}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ActivityImpl.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ActivityImpl.java
deleted file mode 100644
index 4453cb34e12..00000000000
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ActivityImpl.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics;
-
-import lombok.Value;
-import lombok.experimental.Accessors;
-import org.prebid.server.hooks.v1.analytics.Activity;
-import org.prebid.server.hooks.v1.analytics.Result;
-
-import java.util.List;
-
-@Accessors(fluent = true)
-@Value(staticConstructor = "of")
-public class ActivityImpl implements Activity {
-
- String name;
-
- String status;
-
- List results;
-}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/AppliedToImpl.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/AppliedToImpl.java
deleted file mode 100644
index 34beae0b73b..00000000000
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/AppliedToImpl.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics;
-
-import lombok.Builder;
-import lombok.Value;
-import lombok.experimental.Accessors;
-import org.prebid.server.hooks.v1.analytics.AppliedTo;
-
-import java.util.List;
-
-@Accessors(fluent = true)
-@Value
-@Builder
-public class AppliedToImpl implements AppliedTo {
-
- List impIds;
-
- List bidders;
-
- boolean request;
-
- boolean response;
-
- List bidIds;
-}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ResultImpl.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ResultImpl.java
deleted file mode 100644
index 439552f562f..00000000000
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/ResultImpl.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics;
-
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import lombok.Value;
-import lombok.experimental.Accessors;
-import org.prebid.server.hooks.v1.analytics.AppliedTo;
-import org.prebid.server.hooks.v1.analytics.Result;
-
-@Accessors(fluent = true)
-@Value(staticConstructor = "of")
-public class ResultImpl implements Result {
-
- String status;
-
- ObjectNode values;
-
- AppliedTo appliedTo;
-}
diff --git a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/TagsImpl.java b/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/TagsImpl.java
deleted file mode 100644
index 1c01790d6b8..00000000000
--- a/extra/modules/confiant-ad-quality/src/main/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/model/analytics/TagsImpl.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics;
-
-import lombok.Value;
-import lombok.experimental.Accessors;
-import org.prebid.server.hooks.v1.analytics.Activity;
-import org.prebid.server.hooks.v1.analytics.Tags;
-
-import java.util.List;
-
-@Accessors(fluent = true)
-@Value(staticConstructor = "of")
-public class TagsImpl implements Tags {
-
- List activities;
-}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapperTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapperTest.java
index 0a017e08df1..16caae6a684 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapperTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/AnalyticsMapperTest.java
@@ -1,17 +1,18 @@
package org.prebid.server.hooks.modules.com.confiant.adquality.core;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.prebid.server.auction.model.BidderResponse;
+import org.prebid.server.hooks.execution.v1.analytics.ActivityImpl;
+import org.prebid.server.hooks.execution.v1.analytics.AppliedToImpl;
+import org.prebid.server.hooks.execution.v1.analytics.ResultImpl;
import org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ActivityImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.AppliedToImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ResultImpl;
import org.prebid.server.hooks.v1.analytics.Tags;
import java.util.List;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.prebid.server.hooks.modules.com.confiant.adquality.core.AnalyticsMapper.toAnalyticsTags;
public class AnalyticsMapperTest {
@@ -29,7 +30,10 @@ public void shouldMapBidsScanResultToAnalyticsTags() {
AdQualityModuleTestUtils.getBidderResponse("bidder_d", "imp_d", "bid_id_d"));
// when
- final Tags tags = AnalyticsMapper.toAnalyticsTags(bidderResponsesWithIssues, bidderResponsesWithoutIssues, bidderResponsesNotScanned);
+ final Tags tags = toAnalyticsTags(
+ bidderResponsesWithIssues,
+ bidderResponsesWithoutIssues,
+ bidderResponsesNotScanned);
// then
assertThat(tags.activities()).isEqualTo(singletonList(ActivityImpl.of(
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapperTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapperTest.java
index e0e1405403f..3d168eaeedd 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapperTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsMapperTest.java
@@ -3,7 +3,7 @@
import com.iab.openrtb.request.BidRequest;
import com.iab.openrtb.request.Imp;
import com.iab.openrtb.response.SeatBid;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisBidResponseData;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisBidsData;
@@ -41,9 +41,10 @@ public void shouldMapBidResponsesToRedisBids() {
assertThat(redisBidResponseData1.getBidresponse().getId()).isEqualTo(bidRequest.getId());
assertThat(redisBidResponseData1.getBidresponse().getCur()).isEqualTo(bidRequest.getCur().get(0));
assertThat(redisBidResponseData1.getBidresponse().getSeatbid()).hasSize(1);
- SeatBid seatBid1 = redisBidResponseData1.getBidresponse().getSeatbid().get(0);
+ final SeatBid seatBid1 = redisBidResponseData1.getBidresponse().getSeatbid().get(0);
assertThat(seatBid1.getBid()).hasSize(1);
- assertThat(seatBid1.getBid().get(0).getId()).isEqualTo(bidderResponse1.getSeatBid().getBids().get(0).getBid().getId());
+ assertThat(seatBid1.getBid().getFirst().getId())
+ .isEqualTo(bidderResponse1.getSeatBid().getBids().getFirst().getBid().getId());
final RedisBidResponseData redisBidResponseData2 = result.getBresps().get(1);
assertThat(redisBidResponseData2.getDspId()).isEqualTo(bidderResponse2.getBidder());
@@ -53,6 +54,7 @@ public void shouldMapBidResponsesToRedisBids() {
final SeatBid seatBid2 = redisBidResponseData2.getBidresponse().getSeatbid().get(0);
assertThat(seatBid2.getBid()).hasSize(1);
- assertThat(seatBid2.getBid().get(0).getId()).isEqualTo(bidderResponse2.getSeatBid().getBids().get(0).getBid().getId());
+ assertThat(seatBid2.getBid().getFirst().getId())
+ .isEqualTo(bidderResponse2.getSeatBid().getBids().getFirst().getBid().getId());
}
}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanResultTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanResultTest.java
index 7ebc109907e..fd6377f9a18 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanResultTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScanResultTest.java
@@ -1,14 +1,14 @@
package org.prebid.server.hooks.modules.com.confiant.adquality.core;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.GroupByIssues;
-import org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils.getBidderResponse;
public class BidsScanResultTest {
@@ -17,7 +17,16 @@ public class BidsScanResultTest {
@Test
public void shouldProperlyGetIssuesMessage() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"issues\": [{ \"value\": \"ads.deceivenetworks.net\", \"spec_name\": \"malicious_domain\", \"first_adinstance\": \"e91e8da982bb8b7f80100426\"}]}]]]";
+ final String redisResponse = """
+ [[[{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""";
final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(redisResponse);
// when
@@ -25,7 +34,11 @@ public void shouldProperlyGetIssuesMessage() {
// then
assertThat(issues.size()).isEqualTo(1);
- assertThat(issues.get(0)).isEqualTo("key_a: [Issue(specName=malicious_domain, value=ads.deceivenetworks.net, firstAdinstance=e91e8da982bb8b7f80100426)]");
+ assertThat(issues.getFirst()).isEqualTo("""
+ key_a: [\
+ Issue(specName=malicious_domain, \
+ value=ads.deceivenetworks.net, \
+ firstAdinstance=e91e8da982bb8b7f80100426)]""");
}
@Test
@@ -39,16 +52,31 @@ public void shouldProperlyGetDebugMessage() {
// then
assertThat(messages.size()).isEqualTo(1);
- assertThat(messages.get(0)).isEqualTo("Error during parse redis response: invalid redis response");
+ assertThat(messages.getFirst()).isEqualTo("Error during parse redis response: invalid redis response");
}
@Test
public void shouldProperlyGroupBiddersByIssues() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"issues\": [{ \"value\": \"ads.deceivenetworks.net\", \"spec_name\": \"malicious_domain\", \"first_adinstance\": \"e91e8da982bb8b7f80100426\"}]}],[{\"tag_key\": \"key_b\", \"imp_id\": \"imp_b\"}]]]";
+ final String redisResponse = """
+ [[
+ [{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }],
+ [{
+ "tag_key": "key_b",
+ "imp_id": "imp_b"
+ }]
+ ]]""";
final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(redisResponse);
- final BidderResponse br1 = AdQualityModuleTestUtils.getBidderResponse("critio1", "1", "11");
- final BidderResponse br2 = AdQualityModuleTestUtils.getBidderResponse("critio2", "2", "12");
+ final BidderResponse br1 = getBidderResponse("critio1", "1", "11");
+ final BidderResponse br2 = getBidderResponse("critio2", "2", "12");
// when
final GroupByIssues groupByIssues = bidsScanResult.toGroupByIssues(List.of(br1, br2));
@@ -63,10 +91,20 @@ public void shouldProperlyGroupBiddersByIssues() {
@Test
public void shouldProperlyGroupBiddersByIssuesWithoutIssues() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\"}],[{\"tag_key\": \"key_b\", \"imp_id\": \"imp_b\"}]]]";
+ final String redisResponse = """
+ [[
+ [{
+ "tag_key": "key_a",
+ "imp_id": "imp_a"
+ }],
+ [{
+ "tag_key": "key_b",
+ "imp_id": "imp_b"
+ }]
+ ]]""";
final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(redisResponse);
- final BidderResponse br1 = AdQualityModuleTestUtils.getBidderResponse("critio1", "1", "11");
- final BidderResponse br2 = AdQualityModuleTestUtils.getBidderResponse("critio2", "2", "12");
+ final BidderResponse br1 = getBidderResponse("critio1", "1", "11");
+ final BidderResponse br2 = getBidderResponse("critio2", "2", "12");
// when
final GroupByIssues groupByIssues = bidsScanResult.toGroupByIssues(List.of(br1, br2));
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScannerTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScannerTest.java
index bd436d81f61..eaedc93e170 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScannerTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/BidsScannerTest.java
@@ -9,17 +9,15 @@
import io.vertx.redis.client.RedisAPI;
import io.vertx.redis.client.Response;
import io.vertx.redis.client.ResponseType;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnit;
-import org.mockito.junit.MockitoRule;
+import org.mockito.junit.jupiter.MockitoExtension;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.GroupByIssues;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisBidResponseData;
import org.prebid.server.hooks.modules.com.confiant.adquality.model.RedisBidsData;
-import org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils;
import java.util.List;
@@ -27,12 +25,11 @@
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.verify;
+import static org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils.getBidderResponse;
+@ExtendWith(MockitoExtension.class)
public class BidsScannerTest {
- @Rule
- public final MockitoRule mockitoRule = MockitoJUnit.rule();
-
@Mock
private RedisClient writeRedisNode;
@@ -44,7 +41,7 @@ public class BidsScannerTest {
private BidsScanner bidsScannerTest;
- @Before
+ @BeforeEach
public void setUp() {
bidsScannerTest = new BidsScanner(writeRedisNode, readRedisNode, "api-key", new ObjectMapper());
}
@@ -121,9 +118,10 @@ public void shouldReturnEmptyScanResultWhenApiIsNotInitialized() {
public void shouldReturnEmptyScanResultWhenThereIsNoBidderResponses() {
// given
doReturn(redisAPI).when(readRedisNode).getRedisAPI();
+ final RedisBidsData redisBidsData = RedisBidsData.builder().bresps(List.of()).build();
// when
- final Future scanResult = bidsScannerTest.submitBids(RedisBidsData.builder().bresps(List.of()).build());
+ final Future scanResult = bidsScannerTest.submitBids(redisBidsData);
final GroupByIssues groupByIssues = scanResult.result().toGroupByIssues(List.of());
// then
@@ -135,7 +133,16 @@ public void shouldReturnEmptyScanResultWhenThereIsNoBidderResponses() {
@Test()
public void shouldReturnEmptyScanResultWhenThereIsSomeBidderResponseAndScanIsDisabled() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"issues\": [{ \"value\": \"ads.deceivenetworks.net\", \"spec_name\": \"malicious_domain\", \"first_adinstance\": \"e91e8da982bb8b7f80100426\"}]}]]]";
+ final String redisResponse = """
+ [[[{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""";
final RedisAPI redisAPI = getRedisEmulationWithAnswer(redisResponse);
final RedisBidsData bidsData = RedisBidsData.builder()
.breq(BidRequest.builder().build())
@@ -148,7 +155,7 @@ public void shouldReturnEmptyScanResultWhenThereIsSomeBidderResponseAndScanIsDis
// when
final Future scanResult = bidsScannerTest.submitBids(bidsData);
final GroupByIssues groupByIssues = scanResult.result()
- .toGroupByIssues(List.of(AdQualityModuleTestUtils.getBidderResponse("bidder-a", "imp-a", "imp-id-a")));
+ .toGroupByIssues(List.of(getBidderResponse("bidder-a", "imp-a", "imp-id-a")));
// then
assertThat(scanResult.succeeded()).isTrue();
@@ -159,7 +166,22 @@ public void shouldReturnEmptyScanResultWhenThereIsSomeBidderResponseAndScanIsDis
@Test()
public void shouldReturnRedisScanResultFromReadNodeWhenThereAreSomeBidderResponsesAndScanIsEnabled() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"issues\": [{ \"value\": \"ads.deceivenetworks.net\", \"spec_name\": \"malicious_domain\", \"first_adinstance\": \"e91e8da982bb8b7f80100426\"}]}],[{\"tag_key\": \"key_b\", \"imp_id\": \"imp_b\"}]]]";
+ final String redisResponse = """
+ [[
+ [{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }],
+ [{
+ "tag_key": "key_b",
+ "imp_id": "imp_b"
+ }]
+ ]]""";
final RedisAPI redisAPI = getRedisEmulationWithAnswer(redisResponse);
final RedisBidsData bidsData = RedisBidsData.builder()
.breq(BidRequest.builder().build())
@@ -174,8 +196,8 @@ public void shouldReturnRedisScanResultFromReadNodeWhenThereAreSomeBidderRespons
final Future scanResult = bidsScannerTest.submitBids(bidsData);
final GroupByIssues groupByIssues = scanResult.result()
.toGroupByIssues(List.of(
- AdQualityModuleTestUtils.getBidderResponse("bidder-a", "imp-a", "imp-id-a"),
- AdQualityModuleTestUtils.getBidderResponse("bidder-b", "imp-b", "imp-id-b")));
+ getBidderResponse("bidder-a", "imp-a", "imp-id-a"),
+ getBidderResponse("bidder-b", "imp-b", "imp-id-b")));
// then
assertThat(scanResult.succeeded()).isTrue();
@@ -186,7 +208,12 @@ public void shouldReturnRedisScanResultFromReadNodeWhenThereAreSomeBidderRespons
@Test()
public void shouldReturnRedisScanResultFromWriteNodeWhenReadNodeHasMissingResults() {
// given
- final String readRedisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"ro_skipped\": \"true\"}]]]";
+ final String readRedisResponse = """
+ [[[{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "ro_skipped": "true"
+ }]]]""";
final RedisAPI readRedisAPI = getRedisEmulationWithAnswer(readRedisResponse);
final RedisBidsData bidsData = RedisBidsData.builder()
.breq(BidRequest.builder().build())
@@ -197,14 +224,23 @@ public void shouldReturnRedisScanResultFromWriteNodeWhenReadNodeHasMissingResult
bidsScannerTest.enableScan();
doReturn(readRedisAPI).when(readRedisNode).getRedisAPI();
- final String writeRedisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\", \"issues\": [{ \"value\": \"ads.deceivenetworks.net\", \"spec_name\": \"malicious_domain\", \"first_adinstance\": \"e91e8da982bb8b7f80100426\"}]}]]]";
+ final String writeRedisResponse = """
+ [[[{
+ "tag_key": "key_a",
+ "imp_id": "imp_a",
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""";
final RedisAPI writeRedisAPI = getRedisEmulationWithAnswer(writeRedisResponse);
doReturn(writeRedisAPI).when(writeRedisNode).getRedisAPI();
// when
final Future scanResult = bidsScannerTest.submitBids(bidsData);
final GroupByIssues groupByIssues = scanResult.result()
- .toGroupByIssues(List.of(AdQualityModuleTestUtils.getBidderResponse("bidder-a", "imp-a", "imp-id-a")));
+ .toGroupByIssues(List.of(getBidderResponse("bidder-a", "imp-a", "imp-id-a")));
// then
assertThat(scanResult.succeeded()).isTrue();
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParserTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParserTest.java
index 3fdf2e62236..28d889c48b6 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParserTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisParserTest.java
@@ -1,7 +1,7 @@
package org.prebid.server.hooks.modules.com.confiant.adquality.core;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
@@ -12,7 +12,17 @@ public class RedisParserTest {
@Test
public void shouldParseBidsScanResult() {
// given
- final String redisResponse = "[[[{\"tag_key\": \"key_a\", \"imp_id\": \"imp_a\"}]],[[{\"tag_key\": \"key_b\", \"imp_id\": \"imp_b\"}]]]";
+ final String redisResponse = """
+ [
+ [[{
+ "tag_key": "key_a",
+ "imp_id": "imp_a"
+ }]],
+ [[{
+ "tag_key": "key_b",
+ "imp_id": "imp_b"
+ }]]
+ ]""";
// when
final BidsScanResult actualScanResults = redisParser.parseBidsScanResult(redisResponse);
@@ -28,58 +38,59 @@ public void shouldParseBidsScanResult() {
@Test
public void shouldParseFullBidsScanResult() {
// given
- final String redisResponse = "[[[{\n" +
- " \"tag_key\": \"tg\",\n" +
- " \"imp_id\": \"123\",\n" +
- " \"known_creative\": true,\n" +
- " \"ro_skipped\": false,\n" +
- " \"issues\": [{\n" +
- " \"value\": \"ads.deceivenetworks.net\",\n" +
- " \"spec_name\": \"malicious_domain\",\n" +
- " \"first_adinstance\": \"e91e8da982bb8b7f80100426\"\n" +
- " }],\n" +
- " \"attributes\": {\n" +
- " \"is_ssl\": true,\n" +
- " \"ssl_error\": false,\n" +
- " \"width\": 600,\n" +
- " \"height\": 300,\n" +
- " \"anim\": 5,\n" +
- " \"network_load_startup\": 1024,\n" +
- " \"network_load_polite\": 1024,\n" +
- " \"vast\": {\n" +
- " \"redirects\": 3\n" +
- " },\n" +
- " \"brands\": [\n" +
- " \"Pfizer\"\n" +
- " ],\n" +
- " \"categories\": [\n" +
- " {\n" +
- " \"code\": \"CAT-2\",\n" +
- " \"name\": \"Health and Medical Services\"\n" +
- " },\n" +
- " {\n" +
- " \"code\": \"CAT-75\",\n" +
- " \"name\": \"Pharmaceutical Drugs\"\n" +
- " }\n" +
- " ]\n" +
- " },\n" +
- " \"metrics\": {\n" +
- " \"submitted\": \"2017-05-10T13:29:28-04:00\",\n" +
- " \"fetched\":\"2017-05-10T13:29:29-04:00\",\n" +
- " \"scanned\":\"2017-07-22T11:49:40-04:00\",\n" +
- " \"synchronized\": {\n" +
- " \"first\":\"2017-05-10T13:29:55-04:00\",\n" +
- " \"last\":\"2017-07-24T00:52:04-04:00\"\n" +
- " }\n" +
- " },\n" +
- " \"adinstance\": \"qwerty\"\n" +
- "}]]]";
+ final String redisResponse = """
+ [[[{
+ "tag_key": "tg",
+ "imp_id": "123",
+ "known_creative": true,
+ "ro_skipped": false,
+ "issues": [{
+ "value": "ads.deceivenetworks.net",
+ "spec_name": "malicious_domain",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }],
+ "attributes": {
+ "is_ssl": true,
+ "ssl_error": false,
+ "width": 600,
+ "height": 300,
+ "anim": 5,
+ "network_load_startup": 1024,
+ "network_load_polite": 1024,
+ "vast": {
+ "redirects": 3
+ },
+ "brands": [
+ "Pfizer"
+ ],
+ "categories": [
+ {
+ "code": "CAT-2",
+ "name": "Health and Medical Services"
+ },
+ {
+ "code": "CAT-75",
+ "name": "Pharmaceutical Drugs"
+ }
+ ]
+ },
+ "metrics": {
+ "submitted": "2017-05-10T13:29:28-04:00",
+ "fetched":"2017-05-10T13:29:29-04:00",
+ "scanned":"2017-07-22T11:49:40-04:00",
+ "synchronized": {
+ "first":"2017-05-10T13:29:55-04:00",
+ "last":"2017-07-24T00:52:04-04:00"
+ }
+ },
+ "adinstance": "qwerty"
+ }]]]""";
// when
final BidsScanResult actualScanResults = redisParser.parseBidsScanResult(redisResponse);
// then
- assertThat(actualScanResults.getBidScanResults().get(0).getTagKey()).isEqualTo("tg");
+ assertThat(actualScanResults.getBidScanResults().getFirst().getTagKey()).isEqualTo("tg");
assertThat(actualScanResults.getBidScanResults().size()).isEqualTo(1);
assertThat(actualScanResults.getDebugMessages().size()).isEqualTo(0);
}
@@ -87,14 +98,15 @@ public void shouldParseFullBidsScanResult() {
@Test
public void shouldParseBidsScanResultWithError() {
// given
- final String redisResponse = "{\"code\": \"123\", \"message\": \"error message\", \"error\": true, \"dsp_id\": \"cri\"}";
+ final String redisResponse = """
+ {"code": "123", "message": "error message", "error": true, "dsp_id": "cri"}""";
// when
final BidsScanResult actualScanResults = redisParser.parseBidsScanResult(redisResponse);
// then
assertThat(actualScanResults.getBidScanResults().size()).isEqualTo(0);
- assertThat(actualScanResults.getDebugMessages().get(0)).isEqualTo("Redis error - 123: error message");
+ assertThat(actualScanResults.getDebugMessages().getFirst()).isEqualTo("Redis error - 123: error message");
}
@Test
@@ -107,6 +119,7 @@ public void shouldParseBidsScanResultWithInvalidResponse() {
// then
assertThat(actualScanResults.getBidScanResults().size()).isEqualTo(0);
- assertThat(actualScanResults.getDebugMessages().get(0)).isEqualTo("Error during parse redis response: invalid redis response");
+ assertThat(actualScanResults.getDebugMessages().getFirst())
+ .isEqualTo("Error during parse redis response: invalid redis response");
}
}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisScanStateCheckerTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisScanStateCheckerTest.java
index a7f5b10ca08..b42d3e38fa3 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisScanStateCheckerTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/core/RedisScanStateCheckerTest.java
@@ -2,28 +2,25 @@
import io.vertx.core.Future;
import io.vertx.core.Vertx;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnit;
-import org.mockito.junit.MockitoRule;
+import org.mockito.junit.jupiter.MockitoExtension;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
+@ExtendWith(MockitoExtension.class)
public class RedisScanStateCheckerTest {
- @Rule
- public final MockitoRule mockitoRule = MockitoJUnit.rule();
-
@Mock
private BidsScanner bidsScanner;
private RedisScanStateChecker scanStateChecker;
- @Before
+ @BeforeEach
public void setUp() {
scanStateChecker = new RedisScanStateChecker(bidsScanner, 1000L, Vertx.vertx());
}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/util/AdQualityModuleTestUtils.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/util/AdQualityModuleTestUtils.java
index 865214fd648..594bbb08445 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/util/AdQualityModuleTestUtils.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/util/AdQualityModuleTestUtils.java
@@ -12,18 +12,24 @@
public class AdQualityModuleTestUtils {
+ private AdQualityModuleTestUtils() {
+ }
+
public static BidderResponse getBidderResponse(String bidderName, String impId, String bidId) {
- return BidderResponse.of(bidderName, BidderSeatBid.builder()
- .bids(Collections.singletonList(BidderBid.builder()
- .type(BidType.banner)
- .bid(Bid.builder()
- .id(bidId)
- .price(BigDecimal.valueOf(11))
- .impid(impId)
- .adm("adm")
- .adomain(List.of("www.goog.com", "www.gumgum.com"))
- .build())
- .build()))
- .build(), 11);
+ return BidderResponse.of(
+ bidderName,
+ BidderSeatBid.builder()
+ .bids(Collections.singletonList(BidderBid.builder()
+ .type(BidType.banner)
+ .bid(Bid.builder()
+ .id(bidId)
+ .price(BigDecimal.valueOf(11))
+ .impid(impId)
+ .adm("adm")
+ .adomain(List.of("www.goog.com", "www.gumgum.com"))
+ .build())
+ .build()))
+ .build(),
+ 11);
}
}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHookTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHookTest.java
index 8746a3e10b2..47ca8d9b86f 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHookTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityBidResponsesScanHookTest.java
@@ -6,26 +6,24 @@
import com.iab.openrtb.request.Geo;
import com.iab.openrtb.request.User;
import io.vertx.core.Future;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnit;
-import org.mockito.junit.MockitoRule;
+import org.mockito.junit.jupiter.MockitoExtension;
import org.prebid.server.activity.infrastructure.ActivityInfrastructure;
import org.prebid.server.auction.model.AuctionContext;
import org.prebid.server.auction.model.BidderResponse;
import org.prebid.server.auction.privacy.enforcement.mask.UserFpdActivityMask;
import org.prebid.server.bidder.model.BidderSeatBid;
+import org.prebid.server.hooks.execution.v1.analytics.ActivityImpl;
+import org.prebid.server.hooks.execution.v1.analytics.AppliedToImpl;
+import org.prebid.server.hooks.execution.v1.analytics.ResultImpl;
import org.prebid.server.hooks.execution.v1.bidder.AllProcessedBidResponsesPayloadImpl;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsMapper;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsScanResult;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.BidsScanner;
import org.prebid.server.hooks.modules.com.confiant.adquality.core.RedisParser;
-import org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ActivityImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.AppliedToImpl;
-import org.prebid.server.hooks.modules.com.confiant.adquality.v1.model.analytics.ResultImpl;
import org.prebid.server.hooks.v1.InvocationAction;
import org.prebid.server.hooks.v1.InvocationResult;
import org.prebid.server.hooks.v1.InvocationStatus;
@@ -42,12 +40,11 @@
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.verify;
+import static org.prebid.server.hooks.modules.com.confiant.adquality.util.AdQualityModuleTestUtils.getBidderResponse;
+@ExtendWith(MockitoExtension.class)
public class ConfiantAdQualityBidResponsesScanHookTest {
- @Rule
- public final MockitoRule mockitoRule = MockitoJUnit.rule();
-
@Mock
private BidsScanner bidsScanner;
@@ -67,18 +64,14 @@ public class ConfiantAdQualityBidResponsesScanHookTest {
private final RedisParser redisParser = new RedisParser(new ObjectMapper());
- @Before
+ @BeforeEach
public void setUp() {
target = new ConfiantAdQualityBidResponsesScanHook(bidsScanner, List.of(), userFpdActivityMask);
}
@Test
public void codeShouldHaveValidConfigsWhenInitialized() {
- // given
-
- // when
-
- // then
+ // when and then
assertThat(target.code()).isEqualTo("confiant-ad-quality-bid-responses-scan-hook");
}
@@ -112,12 +105,19 @@ public void callShouldReturnResultWithNoActionWhenRedisHasNoAnswer() {
@Test
public void callShouldReturnResultWithUpdateActionWhenRedisHasFoundSomeIssues() {
// given
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]]]");
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [[[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""");
doReturn(Future.succeededFuture(bidsScanResult)).when(bidsScanner).submitBids(any());
doReturn(getAuctionContext()).when(auctionInvocationContext).auctionContext();
- doReturn(List.of(AdQualityModuleTestUtils.getBidderResponse("bidder_a", "imp_a", "bid_id_a")))
+ doReturn(List.of(getBidderResponse("bidder_a", "imp_a", "bid_id_a")))
.when(allProcessedBidResponsesPayload).bidResponses();
// when
@@ -132,7 +132,12 @@ public void callShouldReturnResultWithUpdateActionWhenRedisHasFoundSomeIssues()
assertThat(result).isNotNull();
assertThat(result.status()).isEqualTo(InvocationStatus.success);
assertThat(result.action()).isEqualTo(InvocationAction.update);
- assertThat(result.errors().get(0)).isEqualTo("tag: [Issue(specName=malicious_domain, value=ads.deceivenetworks.net, firstAdinstance=e91e8da982bb8b7f80100426)]");
+ assertThat(result.errors().getFirst())
+ .isEqualTo("""
+ tag: [\
+ Issue(specName=malicious_domain, \
+ value=ads.deceivenetworks.net, \
+ firstAdinstance=e91e8da982bb8b7f80100426)]""");
assertThat(result.debugMessages()).isNull();
assertThat(result.analyticsTags().activities()).isEqualTo(singletonList(ActivityImpl.of(
"ad-scan", "success", List.of(
@@ -147,8 +152,15 @@ public void callShouldReturnResultWithUpdateActionWhenRedisHasFoundSomeIssues()
@Test
public void callShouldSubmitBidsToScanWhenBidsCome() {
// given
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]]]");
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [[[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""");
doReturn(Future.succeededFuture(bidsScanResult)).when(bidsScanner).submitBids(any());
doReturn(getAuctionContext()).when(auctionInvocationContext).auctionContext();
@@ -161,16 +173,32 @@ public void callShouldSubmitBidsToScanWhenBidsCome() {
}
@Test
- public void callShouldSubmitToScanBidsWhichAreNotPartOfTheExcludeToScanListWhenHookIsConfiguredWithExcludeToScanList() {
+ public void callShouldSubmitBidsWhichAreNotPartOfTheExcludeToScanListWhenHookIsConfiguredWithExcludeToScanList() {
// given
final String secureBidderName = "securebidder";
final String notSecureBadBidderName = "notsecurebadbidder";
final String notSecureGoodBidderName = "notsecuregoodbidder";
- final BidderResponse secureBidderResponse = AdQualityModuleTestUtils.getBidderResponse(secureBidderName, "imp_a", "bid_id_a");
- final BidderResponse notSecureBadBidderResponse = AdQualityModuleTestUtils.getBidderResponse(notSecureBadBidderName, "imp_b", "bid_id_b");
- final BidderResponse notSecureGoodBidderResponse = AdQualityModuleTestUtils.getBidderResponse(notSecureGoodBidderName, "imp_c", "bid_id_c");
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]],[[{\"tag_key\": \"key_b\", \"imp_id\": \"imp_b\", \"issues\": []}]]]]");
+ final BidderResponse secureBidderResponse = getBidderResponse(secureBidderName, "imp_a", "bid_id_a");
+ final BidderResponse notSecureBadBidderResponse =
+ getBidderResponse(notSecureBadBidderName, "imp_b", "bid_id_b");
+ final BidderResponse notSecureGoodBidderResponse =
+ getBidderResponse(notSecureGoodBidderName, "imp_c", "bid_id_c");
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [
+ [[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]],
+ [[{
+ "tag_key": "key_b",
+ "imp_id": "imp_b",
+ "issues": []
+ }]]
+ ]""");
final AuctionContext auctionContext = AuctionContext.builder()
.activityInfrastructure(activityInfrastructure)
.bidRequest(BidRequest.builder().cur(List.of("USD")).build())
@@ -178,7 +206,8 @@ public void callShouldSubmitToScanBidsWhichAreNotPartOfTheExcludeToScanListWhenH
target = new ConfiantAdQualityBidResponsesScanHook(bidsScanner, List.of(secureBidderName), userFpdActivityMask);
- doReturn(List.of(secureBidderResponse, notSecureBadBidderResponse, notSecureGoodBidderResponse)).when(allProcessedBidResponsesPayload).bidResponses();
+ doReturn(List.of(secureBidderResponse, notSecureBadBidderResponse, notSecureGoodBidderResponse))
+ .when(allProcessedBidResponsesPayload).bidResponses();
doReturn(Future.succeededFuture(bidsScanResult)).when(bidsScanner).submitBids(any());
doReturn(auctionContext).when(auctionInvocationContext).auctionContext();
@@ -187,9 +216,9 @@ public void callShouldSubmitToScanBidsWhichAreNotPartOfTheExcludeToScanListWhenH
.call(allProcessedBidResponsesPayload, auctionInvocationContext);
// then
- verify(bidsScanner).submitBids(
- BidsMapper.toRedisBidsFromBidResponses(auctionContext.getBidRequest(), List.of(notSecureBadBidderResponse, notSecureGoodBidderResponse))
- );
+ verify(bidsScanner).submitBids(BidsMapper.toRedisBidsFromBidResponses(
+ auctionContext.getBidRequest(),
+ List.of(notSecureBadBidderResponse, notSecureGoodBidderResponse)));
final PayloadUpdate payloadUpdate = invocationResult.result().payloadUpdate();
final AllProcessedBidResponsesPayloadImpl initPayloadToUpdate = AllProcessedBidResponsesPayloadImpl.of(
@@ -224,11 +253,19 @@ public void callShouldSubmitToScanOnlyBidsWithDataWhenSomeBiddersRespondWithEmpt
final String secureBidderName = "securebidder";
final String notSecureBadBidderName = "notsecurebadbidder";
final String emptyBidderName = "emptybidder";
- final BidderResponse secureBidderResponse = AdQualityModuleTestUtils.getBidderResponse(secureBidderName, "imp_a", "bid_id_a");
- final BidderResponse notSecureBadBidderResponse = AdQualityModuleTestUtils.getBidderResponse(notSecureBadBidderName, "imp_b", "bid_id_b");
- final BidderResponse emptyBidderResponse = getEmptyBidderResponse(emptyBidderName);
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]]]");
+ final BidderResponse secureBidderResponse = getBidderResponse(secureBidderName, "imp_a", "bid_id_a");
+ final BidderResponse notSecureBadBidderResponse =
+ getBidderResponse(notSecureBadBidderName, "imp_b", "bid_id_b");
+ final BidderResponse emptyBidderResponse = getEmptyBidderResponse();
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [[[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance":"e91e8da982bb8b7f80100426"
+ }]
+ }]]]""");
final AuctionContext auctionContext = AuctionContext.builder()
.activityInfrastructure(activityInfrastructure)
.bidRequest(BidRequest.builder().cur(List.of("USD")).build())
@@ -236,7 +273,8 @@ public void callShouldSubmitToScanOnlyBidsWithDataWhenSomeBiddersRespondWithEmpt
target = new ConfiantAdQualityBidResponsesScanHook(bidsScanner, List.of(secureBidderName), userFpdActivityMask);
- doReturn(List.of(secureBidderResponse, notSecureBadBidderResponse, emptyBidderResponse)).when(allProcessedBidResponsesPayload).bidResponses();
+ doReturn(List.of(secureBidderResponse, notSecureBadBidderResponse, emptyBidderResponse))
+ .when(allProcessedBidResponsesPayload).bidResponses();
doReturn(Future.succeededFuture(bidsScanResult)).when(bidsScanner).submitBids(any());
doReturn(auctionContext).when(auctionInvocationContext).auctionContext();
@@ -245,9 +283,8 @@ public void callShouldSubmitToScanOnlyBidsWithDataWhenSomeBiddersRespondWithEmpt
.call(allProcessedBidResponsesPayload, auctionInvocationContext);
// then
- verify(bidsScanner).submitBids(
- BidsMapper.toRedisBidsFromBidResponses(auctionContext.getBidRequest(), List.of(notSecureBadBidderResponse))
- );
+ verify(bidsScanner).submitBids(BidsMapper.toRedisBidsFromBidResponses(
+ auctionContext.getBidRequest(), List.of(notSecureBadBidderResponse)));
final PayloadUpdate payloadUpdate = invocationResult.result().payloadUpdate();
final AllProcessedBidResponsesPayloadImpl initPayloadToUpdate = AllProcessedBidResponsesPayloadImpl.of(
@@ -267,18 +304,23 @@ public void callShouldSubmitToScanOnlyBidsWithDataWhenSomeBiddersRespondWithEmpt
.bidders(List.of(notSecureBadBidderName))
.impIds(List.of("imp_b"))
.bidIds(List.of("bid_id_b"))
- .build()))
- )));
+ .build())))));
}
@Test
public void callShouldSubmitBidsWithoutMaskedGeoInfoWhenTransmitGeoIsAllowed() {
// given
final Boolean transmitGeoIsAllowed = true;
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]]]");
- final User user = userFpdActivityMask.maskUser(
- getUser(), true, true, !transmitGeoIsAllowed);
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [[[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""");
+ final User user = userFpdActivityMask.maskUser(getUser(), true, true);
final Device device = userFpdActivityMask.maskDevice(
getDevice(), true, !transmitGeoIsAllowed);
@@ -304,10 +346,16 @@ public void callShouldSubmitBidsWithoutMaskedGeoInfoWhenTransmitGeoIsAllowed() {
public void callShouldSubmitBidsWithMaskedGeoInfoWhenTransmitGeoIsNotAllowed() {
// given
final Boolean transmitGeoIsAllowed = false;
- final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult(
- "[[[{\"tag_key\": \"tag\", \"issues\":[{\"spec_name\":\"malicious_domain\",\"value\":\"ads.deceivenetworks.net\",\"first_adinstance\":\"e91e8da982bb8b7f80100426\"}]}]]]");
- final User user = userFpdActivityMask.maskUser(
- getUser(), true, true, !transmitGeoIsAllowed);
+ final BidsScanResult bidsScanResult = redisParser.parseBidsScanResult("""
+ [[[{
+ "tag_key": "tag",
+ "issues": [{
+ "spec_name": "malicious_domain",
+ "value": "ads.deceivenetworks.net",
+ "first_adinstance": "e91e8da982bb8b7f80100426"
+ }]
+ }]]]""");
+ final User user = userFpdActivityMask.maskUser(getUser(), true, true);
final Device device = userFpdActivityMask.maskDevice(
getDevice(), true, !transmitGeoIsAllowed);
@@ -351,7 +399,7 @@ public void callShouldReturnResultWithDebugInfoWhenDebugIsEnabledAndRequestIsBro
assertThat(result.status()).isEqualTo(InvocationStatus.success);
assertThat(result.action()).isEqualTo(InvocationAction.no_action);
assertThat(result.errors()).isNull();
- assertThat(result.debugMessages().get(0)).isEqualTo("Error during parse redis response: [[[{\"t");
+ assertThat(result.debugMessages().getFirst()).isEqualTo("Error during parse redis response: [[[{\"t");
}
@Test
@@ -398,8 +446,8 @@ private static Device getDevice() {
return Device.builder().geo(Geo.builder().country("country-d").region("region-d").build()).build();
}
- private static BidderResponse getEmptyBidderResponse(String bidderName) {
- return BidderResponse.of(bidderName, BidderSeatBid.builder()
+ private static BidderResponse getEmptyBidderResponse() {
+ return BidderResponse.of("emptybidder", BidderSeatBid.builder()
.bids(Collections.emptyList())
.build(), 5);
}
diff --git a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityModuleTest.java b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityModuleTest.java
index 16fe689b6ff..41e63920319 100644
--- a/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityModuleTest.java
+++ b/extra/modules/confiant-ad-quality/src/test/java/org/prebid/server/hooks/modules/com/confiant/adquality/v1/ConfiantAdQualityModuleTest.java
@@ -1,6 +1,6 @@
package org.prebid.server.hooks.modules.com.confiant.adquality.v1;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
@@ -8,11 +8,7 @@ public class ConfiantAdQualityModuleTest {
@Test
public void shouldHaveValidInitialConfigs() {
- // given
-
- // when
-
- // then
+ // when and then
assertThat(ConfiantAdQualityModule.CODE).isEqualTo("confiant-ad-quality");
}
}
diff --git a/extra/modules/fiftyone-devicedetection/README.md b/extra/modules/fiftyone-devicedetection/README.md
new file mode 100644
index 00000000000..fbe254b28c1
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/README.md
@@ -0,0 +1,181 @@
+# Overview
+
+51Degrees module enriches an incoming OpenRTB request [51Degrees Device Data](https://51degrees.com/documentation/_device_detection__overview.html).
+
+51Degrees module sets the following fields of the device object: `make`, `model`, `os`, `osv`, `h`, `w`, `ppi`, `pixelratio` - interested bidder adapters may use these fields as needed. In addition the module sets `device.ext.fiftyonedegrees_deviceId` to a permanent device ID which can be rapidly looked up in on premise data exposing over 250 properties including the device age, chip set, codec support, and price, operating system and app/browser versions, age, and embedded features.
+
+## Setup
+
+The 51Degrees module operates using a data file. You can get started with a free Lite data file that can be downloaded here: [https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash](https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash). The Lite file is capable of detecting limited device information, so if you need in-depth device data, please contact 51Degrees to obtain a license: [https://51degrees.com/contact-us](https://51degrees.com/contact-us?ContactReason=Free%20Trial).
+
+Put the data file in a file system location writable by the user that is running the Prebid Server module and specify that directory location in the configuration parameters. The location needs to be writable if you would like to enable [automatic data file updates](https://51degrees.com/documentation/_features__automatic_datafile_updates.html).
+
+## Configuration
+
+To start using current module you have to enable module and add `fiftyone-devicedetection-entrypoint-hook` and `fiftyone-devicedetection-raw-auction-request-hook` into hooks execution plan inside your yaml file:
+
+```yaml
+hooks:
+ fiftyone-devicedetection:
+ enabled: true
+ host-execution-plan: >
+ {
+ "endpoints": {
+ "/openrtb2/auction": {
+ "stages": {
+ "entrypoint": {
+ "groups": [
+ {
+ "timeout": 100,
+ "hook-sequence": [
+ {
+ "module-code": "fiftyone-devicedetection",
+ "hook-impl-code": "fiftyone-devicedetection-entrypoint-hook"
+ }
+ ]
+ }
+ ]
+ },
+ "raw-auction-request": {
+ "groups": [
+ {
+ "timeout": 100,
+ "hook-sequence": [
+ {
+ "module-code": "fiftyone-devicedetection",
+ "hook-impl-code": "fiftyone-devicedetection-raw-auction-request-hook"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+ }
+```
+
+And configure
+
+## List of module configuration options
+
+- `account-filter`
+ - `allow-list` - _(list of strings)_ - A list of account IDs that are allowed to use this module. If empty, everyone is allowed. Full-string match is performed (whitespaces and capitalization matter). Defaults to empty.
+- `data-file`
+ - `path` - _(string, **REQUIRED**)_ - The full path to the device detection data file. Sample file can be downloaded from [[data repo on GitHub](https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash)].
+ - `make-temp-copy` - _(boolean)_ - If true, the engine will create a temporary copy of the data file rather than using the data file directly. Defaults to false.
+ - `update`
+ - `auto` - _(boolean)_ - Enable/Disable auto update. Defaults to enabled. If enabled, the auto update system will automatically download and apply new data files for device detection.
+ - `on-startup` - _(boolean)_ - Enable/Disable update on startup. Defaults to enabled. If enabled, the auto update system will be used to check for an update before the device detection engine is created. If an update is available, it will be downloaded and applied before the pipeline is built and returned for use so this may take some time.
+ - `url` - _(string)_ - Configure the engine to use the specified URL when looking for an updated data file. Default is the 51Degrees update URL.
+ - `license-key` - _(string)_ - Set the license key used when checking for new device detection data files. Defaults to null.
+ - `watch-file-system` - _(boolean)_ - The DataUpdateService has the ability to watch a file on disk and refresh the engine as soon as that file is updated. This setting enables/disables that feature. Defaults to true.
+ - `polling-interval` - _(int, seconds)_ - Set the time between checks for a new data file made by the DataUpdateService in seconds. Default = 30 minutes.
+- `performance`
+ - `profile` - _(string)_ - Set the performance profile for the device detection engine. Must be one of: LowMemory, MaxPerformance, HighPerformance, Balanced, BalancedTemp. Defaults to balanced.
+ - `concurrency` - _(int)_ - Set the expected number of concurrent operations using the engine. This sets the concurrency of the internal caches to avoid excessive locking. Default: 10.
+ - `difference` - _(int)_ - Set the maximum difference to allow when processing HTTP headers. The meaning of difference depends on the Device Detection API being used. The difference is the difference in hash value between the hash that was found, and the hash that is being searched for. By default this is 0. For more information see [51Degrees documentation](https://51degrees.com/documentation/_device_detection__hash.html).
+ - `allow-unmatched` - _(boolean)_ - If set to false, a non-matching User-Agent will result in properties without set values.
+ If set to true, a non-matching User-Agent will cause the 'default profiles' to be returned. This means that properties will always have values (i.e. no need to check .hasValue) but some may be inaccurate. By default, this is false.
+ - `drift` - _(int)_ - Set the maximum drift to allow when matching hashes. If the drift is exceeded, the result is considered invalid and values will not be returned. By default this is 0. For more information see [51Degrees documentation](https://51degrees.com/documentation/_device_detection__hash.html).
+
+```yaml
+hooks:
+ modules:
+ fiftyone-devicedetection:
+ account-filter:
+ allow-list: [] # list of strings, account ids for enabled publishers, or empty for all
+ data-file:
+ path: ~ # string, REQUIRED, download the sample from https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash or Enterprise from https://51degrees.com/pricing
+ make-temp-copy: ~ # boolean
+ update:
+ auto: ~ # boolean
+ on-startup: ~ # boolean
+ url: ~ # string
+ license-key: ~ # string
+ watch-file-system: ~ # boolean
+ polling-interval: ~ # int, seconds
+ performance:
+ profile: ~ # string, one of [LowMemory,MaxPerformance,HighPerformance,Balanced,BalancedTemp]
+ concurrency: ~ # int
+ difference: ~ # int
+ allow-unmatched: ~ # boolean
+ drift: ~ # int
+```
+
+Minimal sample (only required):
+
+```yaml
+ modules:
+ fiftyone-devicedetection:
+ data-file:
+ path: "51Degrees-LiteV4.1.hash" # string, REQUIRED, download the sample from https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash or Enterprise from https://51degrees.com/pricing
+```
+
+## Running the demo
+
+1. Build the server bundle JAR as described in [[Build Project](../../../docs/build.md#build-project)], e.g.
+
+```bash
+mvn clean package --file extra/pom.xml
+```
+
+2. Download `51Degrees-LiteV4.1.hash` from [[GitHub](https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash)] and put it in the project root directory.
+
+```bash
+curl -o 51Degrees-LiteV4.1.hash -L https://github.com/51Degrees/device-detection-data/raw/main/51Degrees-LiteV4.1.hash
+```
+
+3. Start server bundle JAR as described in [[Running project](../../../docs/run.md#running-project)], e.g.
+
+```bash
+java -jar target/prebid-server-bundle.jar --spring.config.additional-location=sample/prebid-config-with-51d-dd.yaml
+```
+
+4. Run sample request against the server as described in [[requests/README](../../../sample/requests/README.txt)], e.g.
+
+```bash
+curl http://localhost:8080/openrtb2/auction --data @extra/modules/fiftyone-devicedetection/sample-requests/data.json
+```
+
+5. See the `device` object be enriched
+
+```diff
+ "device": {
+- "ua": "Mozilla/5.0 (Linux; Android 11; SM-G998W) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36"
++ "ua": "Mozilla/5.0 (Linux; Android 11; SM-G998W) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36",
++ "os": "Android",
++ "osv": "11.0",
++ "h": 3200,
++ "w": 1440,
++ "ext": {
++ "fiftyonedegrees_deviceId": "110698-102757-105219-0"
++ }
+ },
+```
+
+[[Enterprise](https://51degrees.com/pricing)] files can provide even more information:
+
+```diff
+ "device": {
+ "ua": "Mozilla/5.0 (Linux; Android 11; SM-G998W) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36",
++ "devicetype": 1,
++ "make": "Samsung",
++ "model": "SM-G998W",
+ "os": "Android",
+ "osv": "11.0",
+ "h": 3200,
+ "w": 1440,
++ "ppi": 516,
++ "pxratio": 3.44,
+ "ext": {
+- "fiftyonedegrees_deviceId": "110698-102757-105219-0"
++ "fiftyonedegrees_deviceId": "110698-102757-105219-18092"
+ }
+```
+
+## Maintainer contacts
+
+Any suggestions or questions can be directed to [support@51degrees.com](support@51degrees.com) e-mail.
+
+Or just open new [issue](https://github.com/prebid/prebid-server-java/issues/new) or [pull request](https://github.com/prebid/prebid-server-java/pulls) in this repository.
\ No newline at end of file
diff --git a/extra/modules/fiftyone-devicedetection/pom.xml b/extra/modules/fiftyone-devicedetection/pom.xml
new file mode 100644
index 00000000000..aafbfa859ac
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/pom.xml
@@ -0,0 +1,35 @@
+
+
+ 4.0.0
+
+
+ org.prebid.server.hooks.modules
+ all-modules
+ 3.39.0-SNAPSHOT
+
+
+ fiftyone-devicedetection
+
+ fiftyone-devicedetection
+ 51Degrees Device Detection module
+
+
+ 4.4.226
+
+
+
+
+
+ com.51degrees
+ device-detection.hash.engine.on-premise
+ ${fiftyone-device-detection.version}
+
+
+
+
+ com.51degrees
+ device-detection
+ ${fiftyone-device-detection.version}
+
+
+
diff --git a/extra/modules/fiftyone-devicedetection/sample-requests/data.json b/extra/modules/fiftyone-devicedetection/sample-requests/data.json
new file mode 100644
index 00000000000..c87b9876553
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/sample-requests/data.json
@@ -0,0 +1,146 @@
+{
+ "imp":
+ [
+ {
+ "ext":
+ {
+ "data":
+ {
+ "adserver":
+ {
+ "name": "gam",
+ "adslot": "test"
+ },
+ "pbadslot": "test",
+ "gpid": "test"
+ },
+ "gpid": "test",
+ "prebid":
+ {
+ "bidder":
+ {
+ "appnexus":
+ {
+ "placement_id": 1,
+ "use_pmt_rule": false
+ }
+ },
+ "adunitcode": "25e8ad9f-13a4-4404-ba74-f9eebff0e86c",
+ "floors":
+ {
+ "floorMin": 0.01
+ }
+ }
+ },
+ "id": "2529eeea-813e-4da6-838f-f91c28d64867",
+ "banner":
+ {
+ "topframe": 1,
+ "format":
+ [
+ {
+ "w": 728,
+ "h": 90
+ }
+ ],
+ "pos": 1
+ },
+ "bidfloor": 0.01,
+ "bidfloorcur": "USD"
+ }
+ ],
+ "site":
+ {
+ "domain": "test.com",
+ "publisher":
+ {
+ "domain": "test.com",
+ "id": "1"
+ },
+ "page": "https://www.test.com/"
+ },
+ "device":
+ {
+ "ua": "Mozilla/5.0 (Linux; Android 11; SM-G998W) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36"
+ },
+ "id": "fc4670ce-4985-4316-a245-b43c885dc37a",
+ "test": 1,
+ "cur":
+ [
+ "USD"
+ ],
+ "source":
+ {
+ "ext":
+ {
+ "schain":
+ {
+ "ver": "1.0",
+ "complete": 1,
+ "nodes":
+ [
+ {
+ "asi": "example.com",
+ "sid": "1234",
+ "hp": 1
+ }
+ ]
+ }
+ }
+ },
+ "ext":
+ {
+ "prebid":
+ {
+ "cache":
+ {
+ "bids":
+ {
+ "returnCreative": true
+ },
+ "vastxml":
+ {
+ "returnCreative": true
+ }
+ },
+ "auctiontimestamp": 1698390609882,
+ "targeting":
+ {
+ "includewinners": true,
+ "includebidderkeys": false
+ },
+ "schains":
+ [
+ {
+ "bidders":
+ [
+ "appnexus"
+ ],
+ "schain":
+ {
+ "ver": "1.0",
+ "complete": 1,
+ "nodes":
+ [
+ {
+ "asi": "example.com",
+ "sid": "1234",
+ "hp": 1
+ }
+ ]
+ }
+ }
+ ],
+ "floors":
+ {
+ "enabled": false,
+ "floorMin": 0.01,
+ "floorMinCur": "USD"
+ },
+ "createtids": false
+ }
+ },
+ "user":
+ {},
+ "tmax": 1700
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/config/FiftyOneDeviceDetectionModuleConfiguration.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/config/FiftyOneDeviceDetectionModuleConfiguration.java
new file mode 100644
index 00000000000..ee93c1e3a76
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/config/FiftyOneDeviceDetectionModuleConfiguration.java
@@ -0,0 +1,50 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.config;
+
+import fiftyone.devicedetection.DeviceDetectionPipelineBuilder;
+import fiftyone.pipeline.core.flowelements.Pipeline;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.ModuleConfig;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.FiftyOneDeviceDetectionModule;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core.DeviceEnricher;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core.PipelineBuilder;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.hooks.FiftyOneDeviceDetectionEntrypointHook;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.hooks.FiftyOneDeviceDetectionRawAuctionRequestHook;
+import org.prebid.server.hooks.v1.Hook;
+import org.prebid.server.hooks.v1.InvocationContext;
+import org.prebid.server.hooks.v1.Module;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Set;
+
+@Configuration
+@ConditionalOnProperty(prefix = "hooks." + FiftyOneDeviceDetectionModule.CODE, name = "enabled", havingValue = "true")
+public class FiftyOneDeviceDetectionModuleConfiguration {
+
+ @Bean
+ @ConfigurationProperties(prefix = "hooks.modules." + FiftyOneDeviceDetectionModule.CODE)
+ ModuleConfig moduleConfig() {
+ return new ModuleConfig();
+ }
+
+ @Bean
+ Pipeline pipeline(ModuleConfig moduleConfig) throws Exception {
+ return new PipelineBuilder(moduleConfig).build(new DeviceDetectionPipelineBuilder());
+ }
+
+ @Bean
+ DeviceEnricher deviceEnricher(Pipeline pipeline) {
+ return new DeviceEnricher(pipeline);
+ }
+
+ @Bean
+ Module fiftyOneDeviceDetectionModule(ModuleConfig moduleConfig, DeviceEnricher deviceEnricher) {
+ final Set extends Hook, ? extends InvocationContext>> hooks = Set.of(
+ new FiftyOneDeviceDetectionEntrypointHook(),
+ new FiftyOneDeviceDetectionRawAuctionRequestHook(moduleConfig.getAccountFilter(), deviceEnricher)
+ );
+
+ return new FiftyOneDeviceDetectionModule(hooks);
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/boundary/CollectedEvidence.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/boundary/CollectedEvidence.java
new file mode 100644
index 00000000000..ac427e9d9e7
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/boundary/CollectedEvidence.java
@@ -0,0 +1,13 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary;
+
+import lombok.Builder;
+
+import java.util.Collection;
+import java.util.Map;
+
+@Builder(toBuilder = true)
+public record CollectedEvidence(
+ Collection> rawHeaders,
+ String deviceUA,
+ Map secureHeaders) {
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilter.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilter.java
new file mode 100644
index 00000000000..c7cda11450a
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilter.java
@@ -0,0 +1,11 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import lombok.Data;
+
+import java.util.List;
+
+@Data
+public final class AccountFilter {
+
+ List allowList;
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFile.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFile.java
new file mode 100644
index 00000000000..46cf19adf56
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFile.java
@@ -0,0 +1,13 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import lombok.Data;
+
+@Data
+public final class DataFile {
+
+ String path;
+
+ Boolean makeTempCopy;
+
+ DataFileUpdate update;
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdate.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdate.java
new file mode 100644
index 00000000000..8c65b7d4508
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdate.java
@@ -0,0 +1,19 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import lombok.Data;
+
+@Data
+public final class DataFileUpdate {
+
+ Boolean auto;
+
+ Boolean onStartup;
+
+ String url;
+
+ String licenseKey;
+
+ Boolean watchFileSystem;
+
+ Integer pollingInterval;
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfig.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfig.java
new file mode 100644
index 00000000000..80f95f353e5
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfig.java
@@ -0,0 +1,13 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import lombok.Data;
+
+@Data
+public final class ModuleConfig {
+
+ AccountFilter accountFilter;
+
+ DataFile dataFile;
+
+ PerformanceConfig performance;
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfig.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfig.java
new file mode 100644
index 00000000000..7a81b11be5a
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfig.java
@@ -0,0 +1,17 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import lombok.Data;
+
+@Data
+public final class PerformanceConfig {
+
+ String profile;
+
+ Integer concurrency;
+
+ Integer difference;
+
+ Boolean allowUnmatched;
+
+ Integer drift;
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModule.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModule.java
new file mode 100644
index 00000000000..5bc2b8e82ab
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModule.java
@@ -0,0 +1,23 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1;
+
+import org.prebid.server.hooks.v1.Hook;
+import org.prebid.server.hooks.v1.InvocationContext;
+import org.prebid.server.hooks.v1.Module;
+
+import java.util.Collection;
+
+public record FiftyOneDeviceDetectionModule(
+ Collection extends Hook, ? extends InvocationContext>> hooks
+) implements Module {
+ public static final String CODE = "fiftyone-devicedetection";
+
+ @Override
+ public String code() {
+ return CODE;
+ }
+
+ @Override
+ public Collection extends Hook, ? extends InvocationContext>> hooks() {
+ return hooks;
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricher.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricher.java
new file mode 100644
index 00000000000..72b1e04cee2
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricher.java
@@ -0,0 +1,327 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.iab.openrtb.request.Device;
+import fiftyone.devicedetection.shared.DeviceData;
+import fiftyone.pipeline.core.data.FlowData;
+import fiftyone.pipeline.core.flowelements.Pipeline;
+import fiftyone.pipeline.engines.data.AspectPropertyValue;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections4.MapUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary.CollectedEvidence;
+import org.prebid.server.model.UpdateResult;
+import org.prebid.server.proto.openrtb.ext.request.ExtDevice;
+
+import jakarta.annotation.Nonnull;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.function.Function;
+import java.util.stream.Stream;
+
+public class DeviceEnricher {
+
+ private static final String EXT_DEVICE_ID_KEY = "fiftyonedegrees_deviceId";
+
+ private final Pipeline pipeline;
+
+ public DeviceEnricher(@Nonnull Pipeline pipeline) {
+ this.pipeline = Objects.requireNonNull(pipeline);
+ }
+
+ public static boolean shouldSkipEnriching(Device device) {
+ return StringUtils.isNotEmpty(getDeviceId(device));
+ }
+
+ public EnrichmentResult populateDeviceInfo(Device device, CollectedEvidence collectedEvidence) throws Exception {
+ try (FlowData data = pipeline.createFlowData()) {
+ data.addEvidence(pickRelevantFrom(collectedEvidence));
+ data.process();
+ final DeviceData deviceData = data.get(DeviceData.class);
+ if (deviceData == null) {
+ return null;
+ }
+ final Device properDevice = Optional.ofNullable(device).orElseGet(() -> Device.builder().build());
+ return patchDevice(properDevice, deviceData);
+ }
+ }
+
+ private Map pickRelevantFrom(CollectedEvidence collectedEvidence) {
+ final Map evidence = new HashMap<>();
+
+ final String ua = collectedEvidence.deviceUA();
+ if (StringUtils.isNotBlank(ua)) {
+ evidence.put("header.user-agent", ua);
+ }
+ final Map secureHeaders = collectedEvidence.secureHeaders();
+ if (MapUtils.isNotEmpty(secureHeaders)) {
+ evidence.putAll(secureHeaders);
+ }
+ if (!evidence.isEmpty()) {
+ return evidence;
+ }
+
+ Stream.ofNullable(collectedEvidence.rawHeaders())
+ .flatMap(Collection::stream)
+ .forEach(rawHeader -> evidence.put("header." + rawHeader.getKey(), rawHeader.getValue()));
+
+ return evidence;
+ }
+
+ private EnrichmentResult patchDevice(Device device, DeviceData deviceData) {
+ final List updatedFields = new ArrayList<>();
+ final Device.DeviceBuilder deviceBuilder = device.toBuilder();
+
+ final UpdateResult resolvedDeviceType = resolveDeviceType(device, deviceData);
+ if (resolvedDeviceType.isUpdated()) {
+ deviceBuilder.devicetype(resolvedDeviceType.getValue());
+ updatedFields.add("devicetype");
+ }
+
+ final UpdateResult resolvedMake = resolveMake(device, deviceData);
+ if (resolvedMake.isUpdated()) {
+ deviceBuilder.make(resolvedMake.getValue());
+ updatedFields.add("make");
+ }
+
+ final UpdateResult resolvedModel = resolveModel(device, deviceData);
+ if (resolvedModel.isUpdated()) {
+ deviceBuilder.model(resolvedModel.getValue());
+ updatedFields.add("model");
+ }
+
+ final UpdateResult resolvedOs = resolveOs(device, deviceData);
+ if (resolvedOs.isUpdated()) {
+ deviceBuilder.os(resolvedOs.getValue());
+ updatedFields.add("os");
+ }
+
+ final UpdateResult resolvedOsv = resolveOsv(device, deviceData);
+ if (resolvedOsv.isUpdated()) {
+ deviceBuilder.osv(resolvedOsv.getValue());
+ updatedFields.add("osv");
+ }
+
+ final UpdateResult resolvedH = resolveH(device, deviceData);
+ if (resolvedH.isUpdated()) {
+ deviceBuilder.h(resolvedH.getValue());
+ updatedFields.add("h");
+ }
+
+ final UpdateResult resolvedW = resolveW(device, deviceData);
+ if (resolvedW.isUpdated()) {
+ deviceBuilder.w(resolvedW.getValue());
+ updatedFields.add("w");
+ }
+
+ final UpdateResult resolvedPpi = resolvePpi(device, deviceData);
+ if (resolvedPpi.isUpdated()) {
+ deviceBuilder.ppi(resolvedPpi.getValue());
+ updatedFields.add("ppi");
+ }
+
+ final UpdateResult resolvedPixelRatio = resolvePixelRatio(device, deviceData);
+ if (resolvedPixelRatio.isUpdated()) {
+ deviceBuilder.pxratio(resolvedPixelRatio.getValue());
+ updatedFields.add("pxratio");
+ }
+
+ final UpdateResult resolvedDeviceId = resolveDeviceId(device, deviceData);
+ if (resolvedDeviceId.isUpdated()) {
+ setDeviceId(deviceBuilder, device, resolvedDeviceId.getValue());
+ updatedFields.add("ext." + EXT_DEVICE_ID_KEY);
+ }
+
+ if (updatedFields.isEmpty()) {
+ return null;
+ }
+
+ return EnrichmentResult.builder()
+ .enrichedDevice(deviceBuilder.build())
+ .enrichedFields(updatedFields)
+ .build();
+ }
+
+ private UpdateResult resolveDeviceType(Device device, DeviceData deviceData) {
+ final Integer currentDeviceType = device.getDevicetype();
+ if (isPositive(currentDeviceType)) {
+ return UpdateResult.unaltered(currentDeviceType);
+ }
+
+ final String rawDeviceType = getSafe(deviceData, DeviceData::getDeviceType);
+ if (rawDeviceType == null) {
+ return UpdateResult.unaltered(currentDeviceType);
+ }
+
+ final OrtbDeviceType properDeviceType = OrtbDeviceType.resolveFrom(rawDeviceType);
+ return properDeviceType != OrtbDeviceType.UNKNOWN
+ ? UpdateResult.updated(properDeviceType.ordinal())
+ : UpdateResult.unaltered(currentDeviceType);
+ }
+
+ private UpdateResult resolveMake(Device device, DeviceData deviceData) {
+ final String currentMake = device.getMake();
+ if (StringUtils.isNotBlank(currentMake)) {
+ return UpdateResult.unaltered(currentMake);
+ }
+
+ final String make = getSafe(deviceData, DeviceData::getHardwareVendor);
+ return StringUtils.isNotBlank(make)
+ ? UpdateResult.updated(make)
+ : UpdateResult.unaltered(currentMake);
+ }
+
+ private UpdateResult resolveModel(Device device, DeviceData deviceData) {
+ final String currentModel = device.getModel();
+ if (StringUtils.isNotBlank(currentModel)) {
+ return UpdateResult.unaltered(currentModel);
+ }
+
+ final String model = getSafe(deviceData, DeviceData::getHardwareModel);
+ if (StringUtils.isNotBlank(model)) {
+ return UpdateResult.updated(model);
+ }
+
+ final List names = getSafe(deviceData, DeviceData::getHardwareName);
+ return CollectionUtils.isNotEmpty(names)
+ ? UpdateResult.updated(String.join(",", names))
+ : UpdateResult.unaltered(currentModel);
+ }
+
+ private UpdateResult resolveOs(Device device, DeviceData deviceData) {
+ final String currentOs = device.getOs();
+ if (StringUtils.isNotBlank(currentOs)) {
+ return UpdateResult.unaltered(currentOs);
+ }
+
+ final String os = getSafe(deviceData, DeviceData::getPlatformName);
+ return StringUtils.isNotBlank(os)
+ ? UpdateResult.updated(os)
+ : UpdateResult.unaltered(currentOs);
+ }
+
+ private UpdateResult resolveOsv(Device device, DeviceData deviceData) {
+ final String currentOsv = device.getOsv();
+ if (StringUtils.isNotBlank(currentOsv)) {
+ return UpdateResult.unaltered(currentOsv);
+ }
+
+ final String osv = getSafe(deviceData, DeviceData::getPlatformVersion);
+ return StringUtils.isNotBlank(osv)
+ ? UpdateResult.updated(osv)
+ : UpdateResult.unaltered(currentOsv);
+ }
+
+ private UpdateResult resolveH(Device device, DeviceData deviceData) {
+ final Integer currentH = device.getH();
+ if (isPositive(currentH)) {
+ return UpdateResult.unaltered(currentH);
+ }
+
+ final Integer h = getSafe(deviceData, DeviceData::getScreenPixelsHeight);
+ return isPositive(h)
+ ? UpdateResult.updated(h)
+ : UpdateResult.unaltered(currentH);
+ }
+
+ private UpdateResult resolveW(Device device, DeviceData deviceData) {
+ final Integer currentW = device.getW();
+ if (isPositive(currentW)) {
+ return UpdateResult.unaltered(currentW);
+ }
+
+ final Integer w = getSafe(deviceData, DeviceData::getScreenPixelsWidth);
+ return isPositive(w)
+ ? UpdateResult.updated(w)
+ : UpdateResult.unaltered(currentW);
+ }
+
+ private UpdateResult resolvePpi(Device device, DeviceData deviceData) {
+ final Integer currentPpi = device.getPpi();
+ if (isPositive(currentPpi)) {
+ return UpdateResult.unaltered(currentPpi);
+ }
+
+ final Integer pixelsHeight = getSafe(deviceData, DeviceData::getScreenPixelsHeight);
+ if (pixelsHeight == null) {
+ return UpdateResult.unaltered(currentPpi);
+ }
+
+ final Double inchesHeight = getSafe(deviceData, DeviceData::getScreenInchesHeight);
+ return isPositive(inchesHeight)
+ ? UpdateResult.updated((int) Math.round(pixelsHeight / inchesHeight))
+ : UpdateResult.unaltered(currentPpi);
+ }
+
+ private UpdateResult resolvePixelRatio(Device device, DeviceData deviceData) {
+ final BigDecimal currentPixelRatio = device.getPxratio();
+ if (currentPixelRatio != null && currentPixelRatio.intValue() > 0) {
+ return UpdateResult.unaltered(currentPixelRatio);
+ }
+
+ final Double rawRatio = getSafe(deviceData, DeviceData::getPixelRatio);
+ return isPositive(rawRatio)
+ ? UpdateResult.updated(BigDecimal.valueOf(rawRatio))
+ : UpdateResult.unaltered(currentPixelRatio);
+ }
+
+ private UpdateResult resolveDeviceId(Device device, DeviceData deviceData) {
+ final String currentDeviceId = getDeviceId(device);
+ if (StringUtils.isNotBlank(currentDeviceId)) {
+ return UpdateResult.unaltered(currentDeviceId);
+ }
+
+ final String deviceID = getSafe(deviceData, DeviceData::getDeviceId);
+ return StringUtils.isNotBlank(deviceID)
+ ? UpdateResult.updated(deviceID)
+ : UpdateResult.unaltered(currentDeviceId);
+ }
+
+ private static boolean isPositive(Integer value) {
+ return value != null && value > 0;
+ }
+
+ private static boolean isPositive(Double value) {
+ return value != null && value > 0;
+ }
+
+ private static String getDeviceId(Device device) {
+ final ExtDevice ext = device.getExt();
+ if (ext == null) {
+ return null;
+ }
+ final JsonNode savedValue = ext.getProperty(EXT_DEVICE_ID_KEY);
+ return savedValue != null && savedValue.isTextual() ? savedValue.textValue() : null;
+ }
+
+ private static void setDeviceId(Device.DeviceBuilder deviceBuilder, Device device, String deviceId) {
+ ExtDevice ext = null;
+ if (device != null) {
+ ext = device.getExt();
+ }
+ if (ext == null) {
+ ext = ExtDevice.empty();
+ }
+ ext.addProperty(EXT_DEVICE_ID_KEY, new TextNode(deviceId));
+ deviceBuilder.ext(ext);
+ }
+
+ private T getSafe(DeviceData deviceData, Function> propertyGetter) {
+ try {
+ final AspectPropertyValue propertyValue = propertyGetter.apply(deviceData);
+ if (propertyValue != null && propertyValue.hasValue()) {
+ return propertyValue.getValue();
+ }
+ } catch (Exception e) {
+ // nop -- not interested in errors on getting missing values.
+ }
+ return null;
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/EnrichmentResult.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/EnrichmentResult.java
new file mode 100644
index 00000000000..5b0e048f5b9
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/EnrichmentResult.java
@@ -0,0 +1,12 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import com.iab.openrtb.request.Device;
+import lombok.Builder;
+
+import java.util.Collection;
+
+@Builder
+public record EnrichmentResult(
+ Device enrichedDevice,
+ Collection enrichedFields) {
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/OrtbDeviceType.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/OrtbDeviceType.java
new file mode 100644
index 00000000000..fc5a6c8d9ed
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/OrtbDeviceType.java
@@ -0,0 +1,40 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import java.util.Map;
+import java.util.Optional;
+
+// https://github.com/InteractiveAdvertisingBureau/AdCOM/blob/main/AdCOM%20v1.0%20FINAL.md#list--device-types-
+public enum OrtbDeviceType {
+
+ UNKNOWN,
+ MOBILE_TABLET,
+ PERSONAL_COMPUTER,
+ CONNECTED_TV,
+ PHONE,
+ TABLET,
+ CONNECTED_DEVICE,
+ SET_TOP_BOX,
+ OOH_DEVICE;
+
+ private static final Map DEVICE_FIELD_MAPPING = Map.ofEntries(
+ Map.entry("Phone", OrtbDeviceType.PHONE),
+ Map.entry("Console", OrtbDeviceType.SET_TOP_BOX),
+ Map.entry("Desktop", OrtbDeviceType.PERSONAL_COMPUTER),
+ Map.entry("EReader", OrtbDeviceType.PERSONAL_COMPUTER),
+ Map.entry("IoT", OrtbDeviceType.CONNECTED_DEVICE),
+ Map.entry("Kiosk", OrtbDeviceType.OOH_DEVICE),
+ Map.entry("MediaHub", OrtbDeviceType.SET_TOP_BOX),
+ Map.entry("Mobile", OrtbDeviceType.MOBILE_TABLET),
+ Map.entry("Router", OrtbDeviceType.CONNECTED_DEVICE),
+ Map.entry("SmallScreen", OrtbDeviceType.CONNECTED_DEVICE),
+ Map.entry("SmartPhone", OrtbDeviceType.PHONE),
+ Map.entry("SmartSpeaker", OrtbDeviceType.CONNECTED_DEVICE),
+ Map.entry("SmartWatch", OrtbDeviceType.CONNECTED_DEVICE),
+ Map.entry("Tablet", OrtbDeviceType.TABLET),
+ Map.entry("Tv", OrtbDeviceType.CONNECTED_TV),
+ Map.entry("Vehicle Display", OrtbDeviceType.PERSONAL_COMPUTER));
+
+ public static OrtbDeviceType resolveFrom(String deviceType) {
+ return Optional.ofNullable(DEVICE_FIELD_MAPPING.get(deviceType)).orElse(UNKNOWN);
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/PipelineBuilder.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/PipelineBuilder.java
new file mode 100644
index 00000000000..99bb8de408c
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/PipelineBuilder.java
@@ -0,0 +1,203 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import fiftyone.devicedetection.DeviceDetectionOnPremisePipelineBuilder;
+import fiftyone.devicedetection.DeviceDetectionPipelineBuilder;
+import fiftyone.pipeline.core.flowelements.Pipeline;
+import fiftyone.pipeline.engines.Constants;
+import fiftyone.pipeline.engines.services.DataUpdateServiceDefault;
+import org.apache.commons.lang3.BooleanUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.DataFile;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.DataFileUpdate;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.ModuleConfig;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.PerformanceConfig;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class PipelineBuilder {
+
+ private static final Collection PROPERTIES_USED = List.of(
+ "devicetype",
+ "hardwarevendor",
+ "hardwaremodel",
+ "hardwarename",
+ "platformname",
+ "platformversion",
+ "screenpixelsheight",
+ "screenpixelswidth",
+ "screeninchesheight",
+ "pixelratio",
+
+ "BrowserName",
+ "BrowserVersion",
+ "IsCrawler",
+
+ "BrowserVendor",
+ "PlatformVendor",
+ "Javascript",
+ "GeoLocation",
+ "HardwareModelVariants");
+
+ private final ModuleConfig moduleConfig;
+
+ public PipelineBuilder(ModuleConfig moduleConfig) {
+ this.moduleConfig = moduleConfig;
+ }
+
+ public Pipeline build(DeviceDetectionPipelineBuilder premadeBuilder) throws Exception {
+ final DataFile dataFile = moduleConfig.getDataFile();
+
+ final Boolean shouldMakeDataCopy = dataFile.getMakeTempCopy();
+ final DeviceDetectionOnPremisePipelineBuilder builder = premadeBuilder.useOnPremise(
+ dataFile.getPath(),
+ BooleanUtils.isTrue(shouldMakeDataCopy));
+
+ applyUpdateOptions(builder, dataFile.getUpdate());
+ applyPerformanceOptions(builder, moduleConfig.getPerformance());
+ PROPERTIES_USED.forEach(builder::setProperty);
+ return builder.build();
+ }
+
+ private static void applyUpdateOptions(DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ if (updateConfig == null) {
+ return;
+ }
+ pipelineBuilder.setDataUpdateService(new DataUpdateServiceDefault());
+
+ resolveAutoUpdate(pipelineBuilder, updateConfig);
+ resolveUpdateOnStartup(pipelineBuilder, updateConfig);
+ resolveUpdateURL(pipelineBuilder, updateConfig);
+ resolveLicenseKey(pipelineBuilder, updateConfig);
+ resolveWatchFileSystem(pipelineBuilder, updateConfig);
+ resolveUpdatePollingInterval(pipelineBuilder, updateConfig);
+ }
+
+ private static void resolveAutoUpdate(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final Boolean auto = updateConfig.getAuto();
+ if (auto != null) {
+ pipelineBuilder.setAutoUpdate(auto);
+ }
+ }
+
+ private static void resolveUpdateOnStartup(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final Boolean onStartup = updateConfig.getOnStartup();
+ if (onStartup != null) {
+ pipelineBuilder.setDataUpdateOnStartup(onStartup);
+ }
+ }
+
+ private static void resolveUpdateURL(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final String url = updateConfig.getUrl();
+ if (StringUtils.isNotEmpty(url)) {
+ pipelineBuilder.setDataUpdateUrl(url);
+ }
+ }
+
+ private static void resolveLicenseKey(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final String licenseKey = updateConfig.getLicenseKey();
+ if (StringUtils.isNotEmpty(licenseKey)) {
+ pipelineBuilder.setDataUpdateLicenseKey(licenseKey);
+ }
+ }
+
+ private static void resolveWatchFileSystem(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final Boolean watchFileSystem = updateConfig.getWatchFileSystem();
+ if (watchFileSystem != null) {
+ pipelineBuilder.setDataFileSystemWatcher(watchFileSystem);
+ }
+ }
+
+ private static void resolveUpdatePollingInterval(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ DataFileUpdate updateConfig) {
+ final Integer pollingInterval = updateConfig.getPollingInterval();
+ if (pollingInterval != null) {
+ pipelineBuilder.setUpdatePollingInterval(pollingInterval);
+ }
+ }
+
+ private static void applyPerformanceOptions(DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ if (performanceConfig == null) {
+ return;
+ }
+ resolvePerformanceProfile(pipelineBuilder, performanceConfig);
+ resolveConcurrency(pipelineBuilder, performanceConfig);
+ resolveDifference(pipelineBuilder, performanceConfig);
+ resolveAllowUnmatched(pipelineBuilder, performanceConfig);
+ resolveDrift(pipelineBuilder, performanceConfig);
+ }
+
+ private static void resolvePerformanceProfile(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ final String profile = performanceConfig.getProfile();
+ if (StringUtils.isEmpty(profile)) {
+ return;
+ }
+ for (Constants.PerformanceProfiles nextProfile : Constants.PerformanceProfiles.values()) {
+ if (StringUtils.equalsIgnoreCase(nextProfile.name(), profile)) {
+ pipelineBuilder.setPerformanceProfile(nextProfile);
+ return;
+ }
+ }
+ throw new IllegalArgumentException(
+ "Invalid value for performance profile ("
+ + profile
+ + ") -- should be one of: "
+ + Arrays.stream(Constants.PerformanceProfiles.values())
+ .map(Enum::name)
+ .collect(Collectors.joining(", "))
+ );
+ }
+
+ private static void resolveConcurrency(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ final Integer concurrency = performanceConfig.getConcurrency();
+ if (concurrency != null) {
+ pipelineBuilder.setConcurrency(concurrency);
+ }
+ }
+
+ private static void resolveDifference(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ final Integer difference = performanceConfig.getDifference();
+ if (difference != null) {
+ pipelineBuilder.setDifference(difference);
+ }
+ }
+
+ private static void resolveAllowUnmatched(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ final Boolean allowUnmatched = performanceConfig.getAllowUnmatched();
+ if (allowUnmatched != null) {
+ pipelineBuilder.setAllowUnmatched(allowUnmatched);
+ }
+ }
+
+ private static void resolveDrift(
+ DeviceDetectionOnPremisePipelineBuilder pipelineBuilder,
+ PerformanceConfig performanceConfig) {
+ final Integer drift = performanceConfig.getDrift();
+ if (drift != null) {
+ pipelineBuilder.setDrift(drift);
+ }
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/SecureHeadersRetriever.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/SecureHeadersRetriever.java
new file mode 100644
index 00000000000..cc47233e68a
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/SecureHeadersRetriever.java
@@ -0,0 +1,101 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import com.iab.openrtb.request.BrandVersion;
+import com.iab.openrtb.request.UserAgent;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import jakarta.annotation.Nonnull;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class SecureHeadersRetriever {
+
+ private SecureHeadersRetriever() {
+ }
+
+ public static Map retrieveFrom(@Nonnull UserAgent userAgent) {
+ final Map secureHeaders = new HashMap<>();
+
+ final List versions = userAgent.getBrowsers();
+ if (CollectionUtils.isNotEmpty(versions)) {
+ final String fullUA = brandListToString(versions);
+ secureHeaders.put("header.Sec-CH-UA", fullUA);
+ secureHeaders.put("header.Sec-CH-UA-Full-Version-List", fullUA);
+ }
+
+ final BrandVersion platform = userAgent.getPlatform();
+ if (platform != null) {
+ final String platformName = platform.getBrand();
+ if (StringUtils.isNotBlank(platformName)) {
+ secureHeaders.put("header.Sec-CH-UA-Platform", toHeaderSafe(platformName));
+ }
+
+ final List platformVersions = platform.getVersion();
+ if (CollectionUtils.isNotEmpty(platformVersions)) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ stringBuilder.append('"');
+ appendVersionList(stringBuilder, platformVersions);
+ stringBuilder.append('"');
+ secureHeaders.put("header.Sec-CH-UA-Platform-Version", stringBuilder.toString());
+ }
+ }
+
+ final Integer isMobile = userAgent.getMobile();
+ if (isMobile != null) {
+ secureHeaders.put("header.Sec-CH-UA-Mobile", "?" + isMobile);
+ }
+
+ final String architecture = userAgent.getArchitecture();
+ if (StringUtils.isNotBlank(architecture)) {
+ secureHeaders.put("header.Sec-CH-UA-Arch", toHeaderSafe(architecture));
+ }
+
+ final String bitness = userAgent.getBitness();
+ if (StringUtils.isNotBlank(bitness)) {
+ secureHeaders.put("header.Sec-CH-UA-Bitness", toHeaderSafe(bitness));
+ }
+
+ final String model = userAgent.getModel();
+ if (StringUtils.isNotBlank(model)) {
+ secureHeaders.put("header.Sec-CH-UA-Model", toHeaderSafe(model));
+ }
+
+ return secureHeaders;
+ }
+
+ private static String toHeaderSafe(String rawValue) {
+ return '"' + rawValue.replace("\"", "\\\"") + '"';
+ }
+
+ private static String brandListToString(List versions) {
+ final StringBuilder stringBuilder = new StringBuilder();
+ for (BrandVersion nextBrandVersion : versions) {
+ final String brandName = nextBrandVersion.getBrand();
+ if (brandName == null) {
+ continue;
+ }
+ if (!stringBuilder.isEmpty()) {
+ stringBuilder.append(", ");
+ }
+ stringBuilder.append(toHeaderSafe(brandName));
+ stringBuilder.append(";v=\"");
+ appendVersionList(stringBuilder, nextBrandVersion.getVersion());
+ stringBuilder.append('"');
+ }
+ return stringBuilder.toString();
+ }
+
+ private static void appendVersionList(StringBuilder stringBuilder, List versions) {
+ if (CollectionUtils.isEmpty(versions)) {
+ return;
+ }
+
+ stringBuilder.append(versions.getFirst());
+ for (int i = 1; i < versions.size(); i++) {
+ stringBuilder.append('.');
+ stringBuilder.append(versions.get(i));
+ }
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionEntrypointHook.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionEntrypointHook.java
new file mode 100644
index 00000000000..44788286dd3
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionEntrypointHook.java
@@ -0,0 +1,43 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.hooks;
+
+import io.vertx.core.Future;
+import org.prebid.server.hooks.execution.v1.InvocationResultImpl;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary.CollectedEvidence;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.model.ModuleContext;
+import org.prebid.server.hooks.v1.InvocationAction;
+import org.prebid.server.hooks.v1.InvocationContext;
+import org.prebid.server.hooks.v1.InvocationResult;
+import org.prebid.server.hooks.v1.InvocationStatus;
+import org.prebid.server.hooks.v1.entrypoint.EntrypointHook;
+import org.prebid.server.hooks.v1.entrypoint.EntrypointPayload;
+
+public class FiftyOneDeviceDetectionEntrypointHook implements EntrypointHook {
+
+ private static final String CODE = "fiftyone-devicedetection-entrypoint-hook";
+
+ @Override
+ public String code() {
+ return CODE;
+ }
+
+ @Override
+ public Future> call(
+ EntrypointPayload payload,
+ InvocationContext invocationContext) {
+ return Future.succeededFuture(
+ InvocationResultImpl.builder()
+ .status(InvocationStatus.success)
+ .action(InvocationAction.no_action)
+ .moduleContext(
+ ModuleContext
+ .builder()
+ .collectedEvidence(
+ CollectedEvidence
+ .builder()
+ .rawHeaders(payload.headers().entries())
+ .build()
+ )
+ .build())
+ .build());
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionRawAuctionRequestHook.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionRawAuctionRequestHook.java
new file mode 100644
index 00000000000..a0d91e8bb0a
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/hooks/FiftyOneDeviceDetectionRawAuctionRequestHook.java
@@ -0,0 +1,154 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.hooks;
+
+import com.iab.openrtb.request.BidRequest;
+import com.iab.openrtb.request.Device;
+import com.iab.openrtb.request.UserAgent;
+import io.vertx.core.Future;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.prebid.server.auction.model.AuctionContext;
+import org.prebid.server.hooks.execution.v1.InvocationResultImpl;
+import org.prebid.server.hooks.execution.v1.auction.AuctionRequestPayloadImpl;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary.CollectedEvidence;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config.AccountFilter;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core.DeviceEnricher;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core.EnrichmentResult;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core.SecureHeadersRetriever;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.model.ModuleContext;
+import org.prebid.server.hooks.v1.InvocationAction;
+import org.prebid.server.hooks.v1.InvocationResult;
+import org.prebid.server.hooks.v1.InvocationStatus;
+import org.prebid.server.hooks.v1.auction.AuctionInvocationContext;
+import org.prebid.server.hooks.v1.auction.AuctionRequestPayload;
+import org.prebid.server.hooks.v1.auction.RawAuctionRequestHook;
+import org.prebid.server.settings.model.Account;
+import org.prebid.server.util.ObjectUtil;
+
+import java.util.List;
+import java.util.Optional;
+
+public class FiftyOneDeviceDetectionRawAuctionRequestHook implements RawAuctionRequestHook {
+
+ private static final String CODE = "fiftyone-devicedetection-raw-auction-request-hook";
+
+ private final AccountFilter accountFilter;
+ private final DeviceEnricher deviceEnricher;
+
+ public FiftyOneDeviceDetectionRawAuctionRequestHook(AccountFilter accountFilter, DeviceEnricher deviceEnricher) {
+ this.accountFilter = accountFilter;
+ this.deviceEnricher = deviceEnricher;
+ }
+
+ @Override
+ public String code() {
+ return CODE;
+ }
+
+ @Override
+ public Future> call(AuctionRequestPayload payload,
+ AuctionInvocationContext invocationContext) {
+ final ModuleContext oldModuleContext = (ModuleContext) invocationContext.moduleContext();
+
+ if (shouldSkipEnriching(payload, invocationContext)) {
+ return Future.succeededFuture(
+ InvocationResultImpl.builder()
+ .status(InvocationStatus.success)
+ .action(InvocationAction.no_action)
+ .moduleContext(oldModuleContext)
+ .build());
+ }
+
+ final ModuleContext moduleContext = addEvidenceToContext(
+ oldModuleContext,
+ payload.bidRequest());
+
+ return Future.succeededFuture(
+ InvocationResultImpl.builder()
+ .status(InvocationStatus.success)
+ .action(InvocationAction.update)
+ .payloadUpdate(freshPayload -> updatePayload(freshPayload, moduleContext.collectedEvidence()))
+ .moduleContext(moduleContext)
+ .build()
+ );
+ }
+
+ private boolean shouldSkipEnriching(AuctionRequestPayload payload, AuctionInvocationContext invocationContext) {
+ if (!isAccountAllowed(invocationContext)) {
+ return true;
+ }
+ final Device device = ObjectUtil.getIfNotNull(payload.bidRequest(), BidRequest::getDevice);
+ return device != null && DeviceEnricher.shouldSkipEnriching(device);
+ }
+
+ private boolean isAccountAllowed(AuctionInvocationContext invocationContext) {
+ final List allowList = ObjectUtil.getIfNotNull(accountFilter, AccountFilter::getAllowList);
+ if (CollectionUtils.isEmpty(allowList)) {
+ return true;
+ }
+ return Optional.ofNullable(invocationContext)
+ .map(AuctionInvocationContext::auctionContext)
+ .map(AuctionContext::getAccount)
+ .map(Account::getId)
+ .filter(StringUtils::isNotBlank)
+ .map(allowList::contains)
+ .orElse(false);
+ }
+
+ private ModuleContext addEvidenceToContext(ModuleContext moduleContext, BidRequest bidRequest) {
+ final CollectedEvidence.CollectedEvidenceBuilder evidenceBuilder = Optional.ofNullable(moduleContext)
+ .map(ModuleContext::collectedEvidence)
+ .map(CollectedEvidence::toBuilder)
+ .orElseGet(CollectedEvidence::builder);
+
+ collectEvidence(evidenceBuilder, bidRequest);
+
+ return Optional.ofNullable(moduleContext)
+ .map(ModuleContext::toBuilder)
+ .orElseGet(ModuleContext::builder)
+ .collectedEvidence(evidenceBuilder.build())
+ .build();
+ }
+
+ private void collectEvidence(CollectedEvidence.CollectedEvidenceBuilder evidenceBuilder, BidRequest bidRequest) {
+ final Device device = ObjectUtil.getIfNotNull(bidRequest, BidRequest::getDevice);
+ if (device == null) {
+ return;
+ }
+ final String ua = device.getUa();
+ if (ua != null) {
+ evidenceBuilder.deviceUA(ua);
+ }
+ final UserAgent sua = device.getSua();
+ if (sua != null) {
+ evidenceBuilder.secureHeaders(SecureHeadersRetriever.retrieveFrom(sua));
+ }
+ }
+
+ private AuctionRequestPayload updatePayload(AuctionRequestPayload existingPayload,
+ CollectedEvidence collectedEvidence) {
+ final BidRequest currentRequest = existingPayload.bidRequest();
+ try {
+ final BidRequest patchedRequest = enrichDevice(currentRequest, collectedEvidence);
+ return patchedRequest == null ? existingPayload : AuctionRequestPayloadImpl.of(patchedRequest);
+ } catch (Exception ignored) {
+ return existingPayload;
+ }
+ }
+
+ private BidRequest enrichDevice(BidRequest bidRequest, CollectedEvidence collectedEvidence) throws Exception {
+ if (bidRequest == null) {
+ return null;
+ }
+
+ final CollectedEvidence.CollectedEvidenceBuilder evidenceBuilder = collectedEvidence.toBuilder();
+ collectEvidence(evidenceBuilder, bidRequest);
+
+ final EnrichmentResult mergeResult = deviceEnricher.populateDeviceInfo(
+ bidRequest.getDevice(),
+ evidenceBuilder.build());
+ return Optional.ofNullable(mergeResult)
+ .map(EnrichmentResult::enrichedDevice)
+ .map(mergedDevice -> bidRequest.toBuilder().device(mergedDevice).build())
+ .orElse(null);
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/model/ModuleContext.java b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/model/ModuleContext.java
new file mode 100644
index 00000000000..2ec7af61bf5
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/model/ModuleContext.java
@@ -0,0 +1,8 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.model;
+
+import lombok.Builder;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary.CollectedEvidence;
+
+@Builder(toBuilder = true)
+public record ModuleContext(CollectedEvidence collectedEvidence) {
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/main/resources/module-config/fiftyone-devicedetection.yaml b/extra/modules/fiftyone-devicedetection/src/main/resources/module-config/fiftyone-devicedetection.yaml
new file mode 100644
index 00000000000..c54ab0d86f8
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/main/resources/module-config/fiftyone-devicedetection.yaml
@@ -0,0 +1,21 @@
+hooks:
+ modules:
+ fiftyone-devicedetection:
+ account-filter:
+ allow-list: [] # list of strings
+ data-file:
+ path: ~ # string, REQUIRED, download the sample from https://github.com/51Degrees/device-detection-data/blob/main/51Degrees-LiteV4.1.hash or Enterprise from https://51degrees.com/pricing
+ make-temp-copy: ~ # boolean
+ update:
+ auto: ~ # boolean
+ on-startup: ~ # boolean
+ url: ~ # string
+ license-key: ~ # string
+ watch-file-system: ~ # boolean
+ polling-interval: ~ # int, seconds
+ performance:
+ profile: ~ # string, one of [LowMemory,MaxPerformance,HighPerformance,Balanced,BalancedTemp]
+ concurrency: ~ # int
+ difference: ~ # int
+ allow-unmatched: ~ # boolean
+ drift: ~ # int
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilterTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilterTest.java
new file mode 100644
index 00000000000..424d08db123
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/AccountFilterTest.java
@@ -0,0 +1,32 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import org.junit.jupiter.api.Test;
+
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class AccountFilterTest {
+
+ private static final List TEST_ALLOW_LIST = List.of("sister", "cousin");
+
+ @Test
+ public void shouldReturnAllowList() {
+ // given
+ final AccountFilter accountFilter = new AccountFilter();
+ accountFilter.setAllowList(TEST_ALLOW_LIST);
+
+ // when and then
+ assertThat(accountFilter.getAllowList()).isEqualTo(TEST_ALLOW_LIST);
+ }
+
+ @Test
+ public void shouldHaveDescription() {
+ // given
+ final AccountFilter accountFilter = new AccountFilter();
+ accountFilter.setAllowList(TEST_ALLOW_LIST);
+
+ // when and then
+ assertThat(accountFilter.toString()).isNotBlank();
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileTest.java
new file mode 100644
index 00000000000..d56531ef68a
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileTest.java
@@ -0,0 +1,58 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import org.junit.jupiter.api.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class DataFileTest {
+
+ @Test
+ public void shouldReturnPath() {
+ // given
+ final String path = "/path/to/file.txt";
+
+ // when
+ final DataFile dataFile = new DataFile();
+ dataFile.setPath(path);
+
+ // then
+ assertThat(dataFile.getPath()).isEqualTo(path);
+ }
+
+ @Test
+ public void shouldReturnMakeTempCopy() {
+ // given
+ final boolean makeCopy = true;
+
+ // when
+ final DataFile dataFile = new DataFile();
+ dataFile.setMakeTempCopy(makeCopy);
+
+ // then
+ assertThat(dataFile.getMakeTempCopy()).isEqualTo(makeCopy);
+ }
+
+ @Test
+ public void shouldReturnUpdate() {
+ // given
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setUrl("www.void");
+
+ // when
+ final DataFile dataFile = new DataFile();
+ dataFile.setUpdate(dataFileUpdate);
+
+ // then
+ assertThat(dataFile.getUpdate()).isEqualTo(dataFileUpdate);
+ }
+
+ @Test
+ public void shouldHaveDescription() {
+ // given
+ final DataFile dataFile = new DataFile();
+ dataFile.setPath("/etc/null");
+
+ // when and then
+ assertThat(dataFile.toString()).isNotBlank();
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdateTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdateTest.java
new file mode 100644
index 00000000000..fa3790e3261
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/DataFileUpdateTest.java
@@ -0,0 +1,96 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import org.junit.jupiter.api.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class DataFileUpdateTest {
+
+ @Test
+ public void shouldReturnAuto() {
+ // given
+ final boolean value = true;
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setAuto(value);
+
+ // then
+ assertThat(dataFileUpdate.getAuto()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldReturnOnStartup() {
+ // given
+ final boolean value = true;
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setOnStartup(value);
+
+ // then
+ assertThat(dataFileUpdate.getOnStartup()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldReturnUrl() {
+ // given
+ final String value = "/path/to/file.txt";
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setUrl(value);
+
+ // then
+ assertThat(dataFileUpdate.getUrl()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldReturnLicenseKey() {
+ // given
+ final String value = "/path/to/file.txt";
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setLicenseKey(value);
+
+ // then
+ assertThat(dataFileUpdate.getLicenseKey()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldReturnWatchFileSystem() {
+ // given
+ final boolean value = true;
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setWatchFileSystem(value);
+
+ // then
+ assertThat(dataFileUpdate.getWatchFileSystem()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldReturnPollingInterval() {
+ // given
+ final int value = 42;
+
+ // when
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setPollingInterval(value);
+
+ // then
+ assertThat(dataFileUpdate.getPollingInterval()).isEqualTo(value);
+ }
+
+ @Test
+ public void shouldHaveDescription() {
+ // given
+ final DataFileUpdate dataFileUpdate = new DataFileUpdate();
+ dataFileUpdate.setPollingInterval(29);
+
+ // when and then
+ assertThat(dataFileUpdate.toString()).isNotBlank();
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfigTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfigTest.java
new file mode 100644
index 00000000000..3157c9167d2
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/ModuleConfigTest.java
@@ -0,0 +1,66 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import org.junit.jupiter.api.Test;
+
+import java.util.Collections;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class ModuleConfigTest {
+
+ @Test
+ public void shouldReturnAccountFilter() {
+ // given
+ final AccountFilter accountFilter = new AccountFilter();
+ accountFilter.setAllowList(Collections.singletonList("raccoon"));
+
+ // when
+ final ModuleConfig moduleConfig = new ModuleConfig();
+ moduleConfig.setAccountFilter(accountFilter);
+
+ // then
+ assertThat(moduleConfig.getAccountFilter()).isEqualTo(accountFilter);
+ }
+
+ @Test
+ public void shouldReturnDataFile() {
+ // given
+ final DataFile dataFile = new DataFile();
+ dataFile.setPath("B:\\archive");
+
+ // when
+ final ModuleConfig moduleConfig = new ModuleConfig();
+ moduleConfig.setDataFile(dataFile);
+
+ // then
+ assertThat(moduleConfig.getDataFile()).isEqualTo(dataFile);
+ }
+
+ @Test
+ public void shouldReturnPerformanceConfig() {
+ // given
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setProfile("SilentHunter");
+
+ // when
+ final ModuleConfig moduleConfig = new ModuleConfig();
+ moduleConfig.setPerformance(performanceConfig);
+
+ // then
+ assertThat(moduleConfig.getPerformance()).isEqualTo(performanceConfig);
+ }
+
+ @Test
+ public void shouldHaveDescription() {
+ // given
+ final DataFile dataFile = new DataFile();
+ dataFile.setPath("Z:\\virtual-drive");
+
+ // when
+ final ModuleConfig moduleConfig = new ModuleConfig();
+ moduleConfig.setDataFile(dataFile);
+
+ // when and then
+ assertThat(moduleConfig.toString()).isNotBlank();
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfigTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfigTest.java
new file mode 100644
index 00000000000..829f5298fa1
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/model/config/PerformanceConfigTest.java
@@ -0,0 +1,83 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.model.config;
+
+import org.junit.jupiter.api.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class PerformanceConfigTest {
+
+ @Test
+ public void shouldReturnProfile() {
+ // given
+ final String profile = "TurtleSlow";
+
+ // when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setProfile(profile);
+
+ // then
+ assertThat(performanceConfig.getProfile()).isEqualTo(profile);
+ }
+
+ @Test
+ public void shouldReturnConcurrency() {
+ // given
+ final int concurrency = 5438;
+
+ // when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setConcurrency(concurrency);
+
+ // then
+ assertThat(performanceConfig.getConcurrency()).isEqualTo(concurrency);
+ }
+
+ @Test
+ public void shouldReturnDifference() {
+ // given
+ final int difference = 5438;
+
+ // when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setDifference(difference);
+
+ // then
+ assertThat(performanceConfig.getDifference()).isEqualTo(difference);
+ }
+
+ @Test
+ public void shouldReturnAllowUnmatched() {
+ // given
+ final boolean allowUnmatched = true;
+
+ // when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setAllowUnmatched(allowUnmatched);
+
+ // then
+ assertThat(performanceConfig.getAllowUnmatched()).isEqualTo(allowUnmatched);
+ }
+
+ @Test
+ public void shouldReturnDrift() {
+ // given
+ final int drift = 8624;
+
+ // when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setDrift(drift);
+
+ // then
+ assertThat(performanceConfig.getDrift()).isEqualTo(drift);
+ }
+
+ @Test
+ public void shouldHaveDescription() {
+ // given and when
+ final PerformanceConfig performanceConfig = new PerformanceConfig();
+ performanceConfig.setProfile("LightningFast");
+
+ // when and then
+ assertThat(performanceConfig.toString()).isNotBlank();
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModuleTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModuleTest.java
new file mode 100644
index 00000000000..e3ca77b5e24
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/FiftyOneDeviceDetectionModuleTest.java
@@ -0,0 +1,33 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1;
+
+import org.junit.jupiter.api.Test;
+import org.prebid.server.hooks.v1.Hook;
+import org.prebid.server.hooks.v1.InvocationContext;
+import org.prebid.server.hooks.v1.Module;
+
+import java.util.Collection;
+import java.util.Collections;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class FiftyOneDeviceDetectionModuleTest {
+
+ @Test
+ public void shouldReturnNonBlankCode() {
+ // given
+ final Module module = new FiftyOneDeviceDetectionModule(null);
+
+ // when and then
+ assertThat(module.code()).isNotBlank();
+ }
+
+ @Test
+ public void shouldReturnSavedHooks() {
+ // given
+ final Collection> hooks = Collections.emptyList();
+ final Module module = new FiftyOneDeviceDetectionModule(hooks);
+
+ // when and then
+ assertThat(module.hooks()).isEqualTo(hooks);
+ }
+}
diff --git a/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricherTest.java b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricherTest.java
new file mode 100644
index 00000000000..0aa2610f62c
--- /dev/null
+++ b/extra/modules/fiftyone-devicedetection/src/test/java/org/prebid/server/hooks/modules/fiftyone/devicedetection/v1/core/DeviceEnricherTest.java
@@ -0,0 +1,643 @@
+package org.prebid.server.hooks.modules.fiftyone.devicedetection.v1.core;
+
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.iab.openrtb.request.Device;
+import fiftyone.devicedetection.shared.DeviceData;
+import fiftyone.pipeline.core.data.FlowData;
+import fiftyone.pipeline.core.flowelements.Pipeline;
+import fiftyone.pipeline.engines.data.AspectPropertyValue;
+import fiftyone.pipeline.engines.exceptions.NoValueException;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.prebid.server.hooks.modules.fiftyone.devicedetection.model.boundary.CollectedEvidence;
+import org.prebid.server.proto.openrtb.ext.request.ExtDevice;
+
+import java.math.BigDecimal;
+import java.util.AbstractMap;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.mockito.Mock.Strictness.LENIENT;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@ExtendWith(MockitoExtension.class)
+public class DeviceEnricherTest {
+
+ @Mock(strictness = LENIENT)
+ private Pipeline pipeline;
+
+ @Mock(strictness = LENIENT)
+ private FlowData flowData;
+
+ @Mock(strictness = LENIENT)
+ private DeviceData deviceData;
+
+ private DeviceEnricher target;
+
+ @BeforeEach
+ public void setUp() {
+ when(pipeline.createFlowData()).thenReturn(flowData);
+ when(flowData.get(DeviceData.class)).thenReturn(deviceData);
+ target = new DeviceEnricher(pipeline);
+ }
+
+ @Test
+ public void shouldSkipEnrichingShouldReturnFalseWhenExtIsNull() {
+ // given
+ final Device device = Device.builder().build();
+
+ // when and then
+ assertThat(DeviceEnricher.shouldSkipEnriching(device)).isFalse();
+ }
+
+ @Test
+ public void shouldSkipEnrichingShouldReturnFalseWhenExtIsEmpty() {
+ // given
+ final ExtDevice ext = ExtDevice.empty();
+ final Device device = Device.builder().ext(ext).build();
+
+ // when and then
+ assertThat(DeviceEnricher.shouldSkipEnriching(device)).isFalse();
+ }
+
+ @Test
+ public void shouldSkipEnrichingShouldReturnTrueWhenExtContainsProfileID() {
+ // given
+ final ExtDevice ext = ExtDevice.empty();
+ ext.addProperty("fiftyonedegrees_deviceId", new TextNode("0-0-0-0"));
+ final Device device = Device.builder().ext(ext).build();
+
+ // when and then
+ assertThat(DeviceEnricher.shouldSkipEnriching(device)).isTrue();
+ }
+
+ @Test
+ public void populateDeviceInfoShouldReportErrorWhenPipelineThrowsException() {
+ // given
+ final Exception e = new RuntimeException();
+ when(pipeline.createFlowData()).thenThrow(e);
+
+ // when and then
+ assertThatThrownBy(() -> target.populateDeviceInfo(null, null)).isEqualTo(e);
+ }
+
+ @Test
+ public void populateDeviceInfoShouldReportErrorWhenProcessThrowsException() {
+ // given
+ final Exception e = new RuntimeException();
+ doThrow(e).when(flowData).process();
+ final CollectedEvidence collectedEvidence = CollectedEvidence.builder().build();
+
+ // when and then
+ assertThatThrownBy(() -> target.populateDeviceInfo(null, collectedEvidence)).isEqualTo(e);
+ }
+
+ @Test
+ public void populateDeviceInfoShouldReturnNullWhenDeviceDataIsNull() throws Exception {
+ // given
+ when(flowData.get(DeviceData.class)).thenReturn(null);
+ final CollectedEvidence collectedEvidence = CollectedEvidence.builder().build();
+
+ // when
+ final EnrichmentResult result = target.populateDeviceInfo(
+ null,
+ collectedEvidence);
+
+ // then
+ assertThat(result).isNull();
+ verify(flowData, times(1)).get(DeviceData.class);
+ }
+
+ @Test
+ public void populateDeviceInfoShouldPassToFlowDataHeadersMadeFromSuaWhenPresent() throws Exception {
+ // given
+ final Map secureHeaders = Collections.singletonMap("ua", "fake-ua");
+ final CollectedEvidence collectedEvidence = CollectedEvidence.builder()
+ .secureHeaders(secureHeaders)
+ .rawHeaders(Collections.singletonMap("ua", "zumba").entrySet())
+ .build();
+
+ // when
+ target.populateDeviceInfo(null, collectedEvidence);
+
+ // then
+ final ArgumentCaptor