diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d4c8bde..d0e43cb 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -15,30 +15,250 @@ jobs:
java-version: [11, 17, 21]
steps:
- - name: Checkout code
- uses: actions/checkout@v4
-
- - name: Set up JDK ${{ matrix.java-version }}
- uses: actions/setup-java@v4
- with:
- java-version: ${{ matrix.java-version }}
- distribution: 'temurin'
-
- - name: Cache Gradle dependencies
- uses: actions/cache@v4
- with:
- path: |
- ~/.gradle/caches
- ~/.gradle/wrapper
- key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
- restore-keys: |
- ${{ runner.os }}-gradle-
-
- - name: Make gradlew executable
- run: chmod +x ./gradlew
-
- - name: Run tests
- run: ./gradlew test
-
- - name: Run build
- run: ./gradlew build
\ No newline at end of file
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up JDK ${{ matrix.java-version }}
+ uses: actions/setup-java@v4
+ with:
+ java-version: ${{ matrix.java-version }}
+ distribution: 'temurin'
+
+ - name: Cache Gradle dependencies
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.gradle/caches
+ ~/.gradle/wrapper
+ key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
+ restore-keys: |
+ ${{ runner.os }}-gradle-
+
+ - name: Make gradlew executable
+ run: chmod +x ./gradlew
+
+ - name: Run tests
+ run: ./gradlew test
+
+ - name: Run build
+ run: ./gradlew build
+
+ benchmark:
+ runs-on: ubuntu-latest
+ needs: test
+ # Add explicit permissions for commenting on PRs
+ permissions:
+ contents: read
+ pull-requests: write
+ issues: write
+ # Only run benchmarks on main branch pushes and PRs to main to avoid excessive CI time
+ if: github.ref == 'refs/heads/main' || github.base_ref == 'main'
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up JDK 11
+ uses: actions/setup-java@v4
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+
+ - name: Cache Gradle dependencies
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.gradle/caches
+ ~/.gradle/wrapper
+ key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
+ restore-keys: |
+ ${{ runner.os }}-gradle-
+
+ - name: Make gradlew executable
+ run: chmod +x ./gradlew
+
+ - name: Create benchmark results directory
+ run: mkdir -p benchmark-results
+
+ - name: Run serialization benchmarks
+ run: |
+ ./gradlew jmhRunSerializationBenchmarks
+ continue-on-error: true
+
+ - name: Run deserialization benchmarks
+ run: |
+ ./gradlew jmhRunDeserializationBenchmarks
+ continue-on-error: true
+
+ - name: Run field access benchmarks
+ run: |
+ ./gradlew jmhRunFieldAccessBenchmarks
+ continue-on-error: true
+
+ - name: Run size comparison benchmarks
+ run: |
+ ./gradlew jmhRunSizeComparisonBenchmarks
+ continue-on-error: true
+
+ - name: Upload benchmark results
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: benchmark-results-${{ github.sha }}
+ path: benchmark-results/
+ retention-days: 30
+
+ - name: Comment benchmark results on PR
+ if: github.event_name == 'pull_request'
+ uses: actions/github-script@v7
+ continue-on-error: true
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ try {
+ const fs = require('fs');
+ const path = require('path');
+
+ // Find the latest benchmark results file
+ const resultsDir = 'benchmark-results';
+ let latestFile = null;
+ let latestTime = 0;
+
+ if (fs.existsSync(resultsDir)) {
+ const files = fs.readdirSync(resultsDir);
+ for (const file of files) {
+ if (file.endsWith('.json')) {
+ const filePath = path.join(resultsDir, file);
+ const stats = fs.statSync(filePath);
+ if (stats.mtime.getTime() > latestTime) {
+ latestTime = stats.mtime.getTime();
+ latestFile = filePath;
+ }
+ }
+ }
+ }
+
+ if (latestFile) {
+ console.log(`š Found benchmark results: ${latestFile}`);
+ const results = JSON.parse(fs.readFileSync(latestFile, 'utf8'));
+
+ // Group results by benchmark type
+ const serialization = results.filter(r => r.benchmark.includes('serialize'));
+ const deserialization = results.filter(r => r.benchmark.includes('deserialize'));
+ const fieldAccess = results.filter(r => r.benchmark.includes('singleFieldAccess'));
+ const sizes = results.filter(r => r.benchmark.includes('measure'));
+
+ // Format results into a table
+ const formatResults = (benchmarks, title) => {
+ if (benchmarks.length === 0) return '';
+
+ let table = `\n### ${title}\n\n| Library | Score (ns/op) | Error | Unit |\n|---------|---------------|-------|------|\n`;
+
+ benchmarks
+ .sort((a, b) => a.primaryMetric.score - b.primaryMetric.score)
+ .forEach(benchmark => {
+ const name = benchmark.benchmark.split('.').pop().replace(/serialize|deserialize|singleFieldAccess|measure/, '').replace(/Imprint|JacksonJson|Kryo|MessagePack|Avro|Protobuf|FlatBuffers/, (match) => match);
+ const score = benchmark.primaryMetric.score.toFixed(2);
+ const error = benchmark.primaryMetric.scoreError.toFixed(2);
+ const unit = benchmark.primaryMetric.scoreUnit;
+ table += `| ${name} | ${score} | ±${error} | ${unit} |\n`;
+ });
+
+ return table;
+ };
+
+ const comment = `## š Benchmark Results
+
+ Benchmark comparison between Imprint and other serialization libraries:
+ ${formatResults(serialization, 'Serialization Performance')}
+ ${formatResults(deserialization, 'Deserialization Performance')}
+ ${formatResults(fieldAccess, 'Single Field Access Performance')}
+ ${formatResults(sizes, 'Serialized Size Comparison')}
+
+
+ View detailed results
+
+ Results generated from commit: \`${context.sha.substring(0, 7)}\`
+
+ Lower scores are better for performance benchmarks.
+
+ `;
+
+ await github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: comment
+ });
+
+ console.log('ā
Successfully posted benchmark results to PR');
+ } else {
+ console.log('ā ļø No benchmark results found');
+ await github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: '## š Benchmark Results\n\nBenchmark execution completed but no results file was found. Check the [workflow logs](' +
+ `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}` + ') for details.'
+ });
+ }
+ } catch (error) {
+ console.log('ā Failed to post benchmark comment:', error.message);
+ console.log('š Benchmark results are still available in workflow artifacts');
+
+ // Try to post a simple error message
+ try {
+ await github.rest.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: `## š Benchmark Results\n\nā ļø Failed to process benchmark results automatically.\n\nResults are available in the [workflow artifacts](https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}).`
+ });
+ } catch (commentError) {
+ console.log('ā Also failed to post error comment:', commentError.message);
+ }
+ }
+
+ # Optional: Run full benchmark suite on releases
+ benchmark-full:
+ runs-on: ubuntu-latest
+ if: startsWith(github.ref, 'refs/tags/')
+ permissions:
+ contents: read
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Set up JDK 11
+ uses: actions/setup-java@v4
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+
+ - name: Cache Gradle dependencies
+ uses: actions/cache@v4
+ with:
+ path: |
+ ~/.gradle/caches
+ ~/.gradle/wrapper
+ key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
+ restore-keys: |
+ ${{ runner.os }}-gradle-
+
+ - name: Make gradlew executable
+ run: chmod +x ./gradlew
+
+ - name: Create benchmark results directory
+ run: mkdir -p benchmark-results
+
+ - name: Run full benchmark suite
+ run: |
+ ./gradlew jmhRunAllBenchmarks
+
+ - name: Upload full benchmark results
+ uses: actions/upload-artifact@v4
+ with:
+ name: full-benchmark-results-${{ github.ref_name }}
+ path: benchmark-results/
+ retention-days: 90
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
index 2606710..d9093f9 100644
--- a/build.gradle
+++ b/build.gradle
@@ -2,6 +2,8 @@ plugins {
id 'java-library'
id 'maven-publish'
id 'me.champeau.jmh' version '0.7.2'
+ id 'com.google.protobuf' version '0.9.4'
+ id 'io.netifi.flatbuffers' version '1.0.7'
}
group = 'com.imprint'
@@ -23,45 +25,290 @@ dependencies {
// Lombok for reducing boilerplate
compileOnly 'org.projectlombok:lombok:1.18.30'
annotationProcessor 'org.projectlombok:lombok:1.18.30'
-
+
// Test dependencies
testImplementation 'org.junit.jupiter:junit-jupiter:5.10.0'
testImplementation 'org.assertj:assertj-core:3.24.2'
testImplementation 'org.mockito:mockito-core:5.5.0'
-
+
// Lombok for tests
testCompileOnly 'org.projectlombok:lombok:1.18.30'
testAnnotationProcessor 'org.projectlombok:lombok:1.18.30'
-
+
// Performance testing with JMH
jmhImplementation 'org.openjdk.jmh:jmh-core:1.37'
jmhAnnotationProcessor 'org.openjdk.jmh:jmh-generator-annprocess:1.37'
-
- // Competitor libraries for benchmarking
+
+ // Suppress SLF4J warnings
+ jmhImplementation 'org.slf4j:slf4j-nop:1.7.36'
+
+ // Competitor libraries for benchmarking (JMH only)
jmhImplementation 'com.google.protobuf:protobuf-java:3.25.1'
jmhImplementation 'org.apache.avro:avro:1.11.3'
jmhImplementation 'com.fasterxml.jackson.core:jackson-databind:2.16.0'
jmhImplementation 'com.google.flatbuffers:flatbuffers-java:23.5.26'
jmhImplementation 'com.esotericsoftware:kryo:5.4.0'
+ jmhImplementation 'org.msgpack:msgpack-core:0.9.8'
+ jmhImplementation 'org.msgpack:jackson-dataformat-msgpack:0.9.8'
+}
+
+protobuf {
+ protoc {
+ artifact = "com.google.protobuf:protoc:3.25.1"
+ }
+ generateProtoTasks {
+ // Only generate for JMH, not main
+ all().each { task ->
+ task.enabled = false
+ }
+ ofSourceSet('jmh').each { task ->
+ task.enabled = true
+ task.builtins {
+ java {
+ outputSubDir = 'java'
+ }
+ }
+ }
+ }
+}
+
+// Download and setup FlatBuffers compiler for Linux (CI environment)
+tasks.register('downloadFlatc') {
+ description = 'Download FlatBuffers compiler'
+ group = 'build setup'
+
+ def flatcDir = file("${buildDir}/flatc")
+ def flatcExe = file("${flatcDir}/flatc")
+ def isWindows = System.getProperty('os.name').toLowerCase().contains('windows')
+ def flatcUrl = isWindows ?
+ "https://github.com/google/flatbuffers/releases/download/v23.5.26/Windows.flatc.binary.zip" :
+ "https://github.com/google/flatbuffers/releases/download/v23.5.26/Linux.flatc.binary.clang++-12.zip"
+ def flatcZip = file("${buildDir}/flatc.zip")
+
+ outputs.file(flatcExe)
+
+ doLast {
+ if (!flatcExe.exists()) {
+ println "Downloading FlatBuffers compiler for ${isWindows ? 'Windows' : 'Linux'}..."
+ flatcDir.mkdirs()
+
+ // Download
+ new URL(flatcUrl).withInputStream { i ->
+ flatcZip.withOutputStream { it << i }
+ }
+
+ // Extract
+ copy {
+ from zipTree(flatcZip)
+ into flatcDir
+ }
+
+ // Make executable on Unix systems
+ if (!isWindows) {
+ exec {
+ commandLine 'chmod', '+x', flatcExe.absolutePath
+ }
+ }
+
+ flatcZip.delete()
+ println "FlatBuffers compiler downloaded to: ${flatcExe}"
+ }
+ }
+}
+
+// Generate FlatBuffers sources
+tasks.register('generateFlatBuffers', Exec) {
+ dependsOn downloadFlatc
+ description = 'Generate Java classes from FlatBuffers schema'
+ group = 'build'
+
+ def isWindows = System.getProperty('os.name').toLowerCase().contains('windows')
+ def flatcExe = file("${buildDir}/flatc/${isWindows ? 'flatc.exe' : 'flatc'}")
+ def schemaFile = file('src/jmh/flatbuffers/test_record.fbs')
+ def outputDir = file('build/generated/source/flatbuffers/jmh/java')
+
+ commandLine flatcExe.absolutePath, '--java', '-o', outputDir.absolutePath, schemaFile.absolutePath
+
+ inputs.file(schemaFile)
+ outputs.dir(outputDir)
+
+ doFirst {
+ outputDir.mkdirs()
+ }
+}
+
+// Add generated FlatBuffers sources to JMH source set
+sourceSets {
+ jmh {
+ java {
+ srcDir 'build/generated/source/flatbuffers/jmh/java'
+ }
+ proto {
+ srcDir 'src/jmh/proto'
+ }
+ }
+}
+
+// Make JMH compilation depend on FlatBuffers generation
+compileJmhJava.dependsOn generateFlatBuffers
+
+// Handle duplicate proto files
+tasks.named('processJmhResources') {
+ duplicatesStrategy = DuplicatesStrategy.EXCLUDE
}
test {
useJUnitPlatform()
-
+
// Enable detailed test output
testLogging {
events "passed", "skipped", "failed"
}
}
-// JMH configuration
+// JMH configuration - optimized for Java 11
jmh {
fork = 1
- warmupIterations = 3
- iterations = 3
+ warmupIterations = 2 // Reduced for faster CI
+ iterations = 3 // Reduced for faster CI
resultFormat = 'JSON'
includeTests = false
resultsFile = file("${projectDir}/benchmark-results/jmh-results-${new Date().format('yyyy-MM-dd-HHmmss')}.json")
+
+ // Java 11 specific JVM args
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions',
+ '-XX:+UseJVMCICompiler'
+ ]
+}
+
+// Create individual benchmark tasks for CI pipeline
+tasks.register('jmhRunSerializationBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run serialization benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runSerializationBenchmarks']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
+}
+
+tasks.register('jmhRunDeserializationBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run deserialization benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runDeserializationBenchmarks']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
+}
+
+tasks.register('jmhRunFieldAccessBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run field access benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runFieldAccessBenchmarks']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
+}
+
+tasks.register('jmhRunSizeComparisonBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run size comparison benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runSizeComparisonBenchmarks']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
+}
+
+tasks.register('jmhRunMergeBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run merge operation benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runMergeBenchmarks']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
+}
+
+tasks.register('jmhRunAllBenchmarks', JavaExec) {
+ dependsOn compileJmhJava
+ description = 'Run all comparison benchmarks'
+ group = 'benchmarking'
+
+ classpath = sourceSets.jmh.runtimeClasspath
+ mainClass = 'com.imprint.benchmark.ComparisonBenchmark'
+ args = ['runAll']
+
+ // Java 11 optimized JVM settings
+ jvmArgs = [
+ '-XX:+UseG1GC',
+ '-Xmx2g',
+ '-XX:+UnlockExperimentalVMOptions'
+ ]
+
+ doFirst {
+ file("${projectDir}/benchmark-results").mkdirs()
+ }
}
compileJava {
@@ -75,4 +322,4 @@ javadoc {
}
// Don't fail build on missing javadoc
options.addStringOption('Xdoclint:none', '-quiet')
-}
+}
\ No newline at end of file
diff --git a/src/jmh/flatbuffers/test_record.fbs b/src/jmh/flatbuffers/test_record.fbs
new file mode 100644
index 0000000..ccc31d0
--- /dev/null
+++ b/src/jmh/flatbuffers/test_record.fbs
@@ -0,0 +1,15 @@
+namespace com.imprint.benchmark;
+
+table TestRecordFB {
+ id: int;
+ name: string;
+ price: double;
+ active: bool;
+ category: string;
+ tags: [string];
+ metadata_keys: [string];
+ metadata_values: [string];
+ extra_data: [string];
+}
+
+root_type TestRecordFB;
\ No newline at end of file
diff --git a/src/jmh/java/com/imprint/benchmark/ComparisonBenchmark.java b/src/jmh/java/com/imprint/benchmark/ComparisonBenchmark.java
index 8163522..ee32ff0 100644
--- a/src/jmh/java/com/imprint/benchmark/ComparisonBenchmark.java
+++ b/src/jmh/java/com/imprint/benchmark/ComparisonBenchmark.java
@@ -4,13 +4,26 @@
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.flatbuffers.FlatBufferBuilder;
import com.imprint.core.ImprintRecord;
import com.imprint.core.ImprintWriter;
import com.imprint.core.SchemaId;
import com.imprint.types.MapKey;
import com.imprint.types.Value;
+import lombok.NoArgsConstructor;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.io.*;
+import org.msgpack.jackson.dataformat.MessagePackFactory;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.infra.Blackhole;
+import org.openjdk.jmh.runner.Runner;
+import org.openjdk.jmh.runner.RunnerException;
+import org.openjdk.jmh.runner.options.Options;
+import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -25,42 +38,59 @@
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
-@Warmup(iterations = 3, time = 1, timeUnit = TimeUnit.SECONDS)
+@Warmup(iterations = 2, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 3, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(1)
+@SuppressWarnings("unused")
public class ComparisonBenchmark {
// Test data
private TestRecord testData;
-
+
// Serialized formats
- private ByteBuffer imprintBytes;
- private byte[] jacksonBytes;
+ private ByteBuffer imprintBytesBuffer;
+ private byte[] jacksonJsonBytes;
private byte[] kryoBytes;
-
+ private byte[] messagePackBytes;
+ private byte[] avroBytes;
+ private byte[] protobufBytes;
+ private ByteBuffer flatbuffersBytes;
+
// Library instances
- private ObjectMapper jackson;
+ private Schema avroSchema;
+ private DatumWriter avroWriter;
+ private DatumReader avroReader;
+ private ObjectMapper jacksonJsonMapper;
private Kryo kryo;
+ private ObjectMapper messagePackMapper;
@Setup
public void setup() throws Exception {
testData = createTestRecord();
-
+
// Initialize libraries
- jackson = new ObjectMapper();
+ jacksonJsonMapper = new ObjectMapper();
kryo = new Kryo();
kryo.register(TestRecord.class);
kryo.register(ArrayList.class);
kryo.register(HashMap.class);
-
+ kryo.register(Arrays.asList().getClass());
+
+ // Initialize MessagePack ObjectMapper
+ messagePackMapper = new ObjectMapper(new MessagePackFactory());
+ setupAvro();
+
// Pre-serialize for deserialization benchmarks
- imprintBytes = serializeWithImprint(testData);
- jacksonBytes = serializeWithJackson(testData);
+ imprintBytesBuffer = serializeWithImprint(testData);
+ jacksonJsonBytes = serializeWithJacksonJson(testData);
kryoBytes = serializeWithKryo(testData);
+ messagePackBytes = serializeWithMessagePack(testData);
+ avroBytes = serializeWithAvro(testData);
+ protobufBytes = serializeWithProtobuf(testData);
+ flatbuffersBytes = serializeWithFlatBuffers(testData);
}
// ===== SERIALIZATION BENCHMARKS =====
-
@Benchmark
public void serializeImprint(Blackhole bh) throws Exception {
ByteBuffer result = serializeWithImprint(testData);
@@ -68,8 +98,8 @@ public void serializeImprint(Blackhole bh) throws Exception {
}
@Benchmark
- public void serializeJackson(Blackhole bh) throws Exception {
- byte[] result = serializeWithJackson(testData);
+ public void serializeJacksonJson(Blackhole bh) throws Exception {
+ byte[] result = serializeWithJacksonJson(testData);
bh.consume(result);
}
@@ -79,17 +109,40 @@ public void serializeKryo(Blackhole bh) {
bh.consume(result);
}
- // ===== DESERIALIZATION BENCHMARKS =====
+ @Benchmark
+ public void serializeMessagePack(Blackhole bh) throws Exception {
+ byte[] result = serializeWithMessagePack(testData);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void serializeAvro(Blackhole bh) throws Exception {
+ byte[] result = serializeWithAvro(testData);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void serializeProtobuf(Blackhole bh) {
+ byte[] result = serializeWithProtobuf(testData);
+ bh.consume(result);
+ }
+ @Benchmark
+ public void serializeFlatBuffers(Blackhole bh) {
+ ByteBuffer result = serializeWithFlatBuffers(testData);
+ bh.consume(result);
+ }
+
+ // ===== DESERIALIZATION BENCHMARKS =====
@Benchmark
public void deserializeImprint(Blackhole bh) throws Exception {
- ImprintRecord result = ImprintRecord.deserialize(imprintBytes.duplicate());
+ ImprintRecord result = ImprintRecord.deserialize(imprintBytesBuffer.duplicate());
bh.consume(result);
}
@Benchmark
- public void deserializeJackson(Blackhole bh) throws Exception {
- TestRecord result = jackson.readValue(jacksonBytes, TestRecord.class);
+ public void deserializeJacksonJson(Blackhole bh) throws Exception {
+ TestRecord result = jacksonJsonMapper.readValue(jacksonJsonBytes, TestRecord.class);
bh.consume(result);
}
@@ -101,135 +154,359 @@ public void deserializeKryo(Blackhole bh) {
bh.consume(result);
}
+ @Benchmark
+ public void deserializeMessagePack(Blackhole bh) throws Exception {
+ TestRecord result = messagePackMapper.readValue(messagePackBytes, TestRecord.class);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void deserializeAvro(Blackhole bh) throws Exception {
+ GenericRecord result = deserializeWithAvro(avroBytes);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void deserializeProtobuf(Blackhole bh) throws Exception {
+ TestRecordProto.TestRecord result = TestRecordProto.TestRecord.parseFrom(protobufBytes);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void deserializeFlatBuffers(Blackhole bh) {
+ TestRecordFB result = TestRecordFB.getRootAsTestRecordFB(flatbuffersBytes.duplicate());
+ bh.consume(result);
+ }
+
// ===== FIELD ACCESS BENCHMARKS =====
// Tests accessing a single field near the end of a large record
@Benchmark
public void singleFieldAccessImprint(Blackhole bh) throws Exception {
- ImprintRecord record = ImprintRecord.deserialize(imprintBytes.duplicate());
-
- // Access field 15 directly via directory lookup - O(1)
- var field15 = record.getValue(15);
+ ImprintRecord record = ImprintRecord.deserialize(imprintBytesBuffer.duplicate());
+ var field15 = record.getString(15);
bh.consume(field15);
}
@Benchmark
- public void singleFieldAccessJackson(Blackhole bh) throws Exception {
- // Jackson must deserialize entire object to access any field
- TestRecord record = jackson.readValue(jacksonBytes, TestRecord.class);
-
- // Access field15 equivalent (extraData[4]) after full deserialization
+ public void singleFieldAccessJacksonJson(Blackhole bh) throws Exception {
+ TestRecord record = jacksonJsonMapper.readValue(jacksonJsonBytes, TestRecord.class);
bh.consume(record.extraData.get(4));
}
@Benchmark
public void singleFieldAccessKryo(Blackhole bh) {
- // Kryo must deserialize entire object to access any field
Input input = new Input(new ByteArrayInputStream(kryoBytes));
TestRecord record = kryo.readObject(input, TestRecord.class);
input.close();
-
- // Access field15 equivalent (extraData[4]) after full deserialization
bh.consume(record.extraData.get(4));
}
+ @Benchmark
+ public void singleFieldAccessMessagePack(Blackhole bh) throws Exception {
+ TestRecord record = messagePackMapper.readValue(messagePackBytes, TestRecord.class);
+ bh.consume(record.extraData.get(4));
+ }
+
+ @Benchmark
+ public void singleFieldAccessAvro(Blackhole bh) throws Exception {
+ GenericRecord record = deserializeWithAvro(avroBytes);
+ bh.consume(record.get("extraData4"));
+ }
+
+ @Benchmark
+ public void singleFieldAccessProtobuf(Blackhole bh) throws Exception {
+ TestRecordProto.TestRecord record = TestRecordProto.TestRecord.parseFrom(protobufBytes);
+ bh.consume(record.getExtraData(4));
+ }
+
+ @Benchmark
+ public void singleFieldAccessFlatBuffers(Blackhole bh) {
+ TestRecordFB record = TestRecordFB.getRootAsTestRecordFB(flatbuffersBytes.duplicate());
+ bh.consume(record.extraData(4));
+ }
+
// ===== SIZE COMPARISON =====
@Benchmark
- public void measureImprintSize(Blackhole bh) throws Exception {
- ByteBuffer serialized = serializeWithImprint(testData);
- bh.consume(serialized.remaining());
+ public void measureImprintSize(Blackhole bh) {
+ bh.consume(imprintBytesBuffer.remaining());
}
@Benchmark
- public void measureJacksonSize(Blackhole bh) throws Exception {
- byte[] serialized = serializeWithJackson(testData);
- bh.consume(serialized.length);
+ public void measureJacksonJsonSize(Blackhole bh) {
+ bh.consume(jacksonJsonBytes.length);
}
@Benchmark
public void measureKryoSize(Blackhole bh) {
- byte[] serialized = serializeWithKryo(testData);
- bh.consume(serialized.length);
+ bh.consume(kryoBytes.length);
+ }
+
+ @Benchmark
+ public void measureMessagePackSize(Blackhole bh) {
+ bh.consume(messagePackBytes.length);
+ }
+
+ @Benchmark
+ public void measureAvroSize(Blackhole bh) {
+ bh.consume(avroBytes.length);
+ }
+
+ @Benchmark
+ public void measureProtobufSize(Blackhole bh) {
+ bh.consume(protobufBytes.length);
+ }
+
+ @Benchmark
+ public void measureFlatBuffersSize(Blackhole bh) {
+ bh.consume(flatbuffersBytes.remaining());
}
// ===== MERGE SIMULATION BENCHMARKS =====
@Benchmark
public void mergeImprint(Blackhole bh) throws Exception {
- var record1 = serializeWithImprint(testData);
- var record2 = serializeWithImprint(createTestRecord2());
+ var record1Buffer = imprintBytesBuffer.duplicate();
+ var record2Data = createTestRecord2();
+ var record2Buffer = serializeWithImprint(record2Data);
- var deserialized1 = ImprintRecord.deserialize(record1);
- var deserialized2 = ImprintRecord.deserialize(record2);
+ var deserialized1 = ImprintRecord.deserialize(record1Buffer);
+ var deserialized2 = ImprintRecord.deserialize(record2Buffer);
var merged = simulateMerge(deserialized1, deserialized2);
-
+
bh.consume(merged);
}
@Benchmark
- public void mergeJackson(Blackhole bh) throws Exception {
- // Jackson merge requires full deserialization + merge + serialization
- var record1 = jackson.readValue(jacksonBytes, TestRecord.class);
- var record2 = jackson.readValue(serializeWithJackson(createTestRecord2()), TestRecord.class);
-
- var merged = mergeTestRecords(record1, record2);
- byte[] result = jackson.writeValueAsBytes(merged);
-
+ public void mergeJacksonJson(Blackhole bh) throws Exception {
+ var record1 = jacksonJsonMapper.readValue(jacksonJsonBytes, TestRecord.class);
+ var record2Data = createTestRecord2();
+ var record2Bytes = serializeWithJacksonJson(record2Data);
+ var record2 = jacksonJsonMapper.readValue(record2Bytes, TestRecord.class);
+
+ var mergedPojo = mergeTestRecords(record1, record2);
+ byte[] result = jacksonJsonMapper.writeValueAsBytes(mergedPojo);
bh.consume(result);
}
@Benchmark
public void mergeKryo(Blackhole bh) {
- // Kryo merge requires full deserialization + merge + serialization
Input input1 = new Input(new ByteArrayInputStream(kryoBytes));
var record1 = kryo.readObject(input1, TestRecord.class);
input1.close();
-
- Input input2 = new Input(new ByteArrayInputStream(serializeWithKryo(createTestRecord2())));
+
+ var record2Data = createTestRecord2();
+ var record2Bytes = serializeWithKryo(record2Data);
+ Input input2 = new Input(new ByteArrayInputStream(record2Bytes));
var record2 = kryo.readObject(input2, TestRecord.class);
input2.close();
-
- var merged = mergeTestRecords(record1, record2);
- byte[] result = serializeWithKryo(merged);
-
+
+ var mergedPojo = mergeTestRecords(record1, record2);
+ byte[] result = serializeWithKryo(mergedPojo);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void mergeMessagePack(Blackhole bh) throws Exception {
+ var record1 = messagePackMapper.readValue(messagePackBytes, TestRecord.class);
+ var record2Data = createTestRecord2();
+ var record2Bytes = serializeWithMessagePack(record2Data);
+ var record2 = messagePackMapper.readValue(record2Bytes, TestRecord.class);
+
+ var mergedPojo = mergeTestRecords(record1, record2);
+ byte[] result = messagePackMapper.writeValueAsBytes(mergedPojo);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void mergeAvro(Blackhole bh) throws Exception {
+ var record1 = deserializeWithAvro(avroBytes);
+ var record2Data = createTestRecord2();
+ var record2Bytes = serializeWithAvro(record2Data);
+ var record2 = deserializeWithAvro(record2Bytes);
+
+ var merged = mergeAvroRecords(record1, record2);
+ byte[] result = serializeAvroRecord(merged);
+ bh.consume(result);
+ }
+
+ @Benchmark
+ public void mergeProtobuf(Blackhole bh) throws Exception {
+ var record1 = TestRecordProto.TestRecord.parseFrom(protobufBytes);
+ var record2Data = createTestRecord2();
+ var record2Bytes = serializeWithProtobuf(record2Data);
+ var record2 = TestRecordProto.TestRecord.parseFrom(record2Bytes);
+
+ var merged = mergeProtobufRecords(record1, record2);
+ byte[] result = merged.toByteArray();
bh.consume(result);
}
+ @Benchmark
+ public void mergeFlatBuffers(Blackhole bh) {
+ var record1 = TestRecordFB.getRootAsTestRecordFB(flatbuffersBytes.duplicate());
+ var record2Data = createTestRecord2();
+ var record2Buffer = serializeWithFlatBuffers(record2Data);
+ var record2 = TestRecordFB.getRootAsTestRecordFB(record2Buffer);
+
+ var merged = mergeFlatBuffersRecords(record1, record2);
+ bh.consume(merged);
+ }
+
+ // ===== MAIN METHOD TO RUN BENCHMARKS =====
+
+ public static void main(String[] args) throws RunnerException {
+ runAll();
+ // Or, uncomment specific runner methods to execute subsets:
+ // runSerializationBenchmarks();
+ // runDeserializationBenchmarks();
+ // runFieldAccessBenchmarks();
+ // runSizeComparisonBenchmarks();
+ // runMergeBenchmarks();
+ // runMessagePackBenchmarks();
+ }
+
+ public static void runAll() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName())
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runSerializationBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".serialize.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runDeserializationBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".deserialize.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runFieldAccessBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".singleFieldAccess.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runSizeComparisonBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".measure.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runMergeBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".merge.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runMessagePackBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".*MessagePack.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runAvroBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".*Avro.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runProtobufBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".*Protobuf.*")
+ .build();
+ new Runner(opt).run();
+ }
+
+ public static void runFlatBuffersBenchmarks() throws RunnerException {
+ Options opt = new OptionsBuilder()
+ .include(ComparisonBenchmark.class.getSimpleName() + ".*FlatBuffers.*")
+ .build();
+ new Runner(opt).run();
+ }
+
// ===== HELPER METHODS =====
+ private void setupAvro() {
+ String schemaJson = "{\n" +
+ " \"type\": \"record\",\n" +
+ " \"name\": \"TestRecord\",\n" +
+ " \"fields\": [\n" +
+ " {\"name\": \"id\", \"type\": \"int\"},\n" +
+ " {\"name\": \"name\", \"type\": \"string\"},\n" +
+ " {\"name\": \"price\", \"type\": \"double\"},\n" +
+ " {\"name\": \"active\", \"type\": \"boolean\"},\n" +
+ " {\"name\": \"category\", \"type\": \"string\"},\n" +
+ " {\"name\": \"tags\", \"type\": {\"type\": \"array\", \"items\": \"string\"}},\n" +
+ " {\"name\": \"metadata\", \"type\": {\"type\": \"map\", \"values\": \"string\"}},\n" +
+ " {\"name\": \"extraData0\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData1\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData2\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData3\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData4\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData5\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData6\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData7\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData8\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData9\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData10\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData11\", \"type\": \"string\"},\n" +
+ " {\"name\": \"extraData12\", \"type\": \"string\"}\n" +
+ " ]\n" +
+ "}";
+
+ avroSchema = new Schema.Parser().parse(schemaJson);
+ avroWriter = new GenericDatumWriter<>(avroSchema);
+ avroReader = new GenericDatumReader<>(avroSchema);
+ }
+
private ByteBuffer serializeWithImprint(TestRecord data) throws Exception {
var writer = new ImprintWriter(new SchemaId(1, 0x12345678));
-
+
writer.addField(1, Value.fromInt32(data.id));
writer.addField(2, Value.fromString(data.name));
writer.addField(3, Value.fromFloat64(data.price));
writer.addField(4, Value.fromBoolean(data.active));
writer.addField(5, Value.fromString(data.category));
-
- // Convert tags list
+
var tagValues = new ArrayList();
- for (String tag : data.tags) {
- tagValues.add(Value.fromString(tag));
+ if (data.tags != null) {
+ for (String tag : data.tags) {
+ tagValues.add(Value.fromString(tag));
+ }
}
writer.addField(6, Value.fromArray(tagValues));
-
- // Convert metadata map
+
var metadataMap = new HashMap();
- for (var entry : data.metadata.entrySet()) {
- metadataMap.put(MapKey.fromString(entry.getKey()), Value.fromString(entry.getValue()));
+ if (data.metadata != null) {
+ for (var entry : data.metadata.entrySet()) {
+ metadataMap.put(MapKey.fromString(entry.getKey()), Value.fromString(entry.getValue()));
+ }
}
writer.addField(7, Value.fromMap(metadataMap));
-
- // Add extra fields (8-20) to create a larger record
- for (int i = 0; i < data.extraData.size(); i++) {
- writer.addField(8 + i, Value.fromString(data.extraData.get(i)));
+
+ if (data.extraData != null) {
+ for (int i = 0; i < data.extraData.size(); i++) {
+ writer.addField(8 + i, Value.fromString(data.extraData.get(i)));
+ }
}
-
+
return writer.build().serializeToBuffer();
}
- private byte[] serializeWithJackson(TestRecord data) throws Exception {
- return jackson.writeValueAsBytes(data);
+ private byte[] serializeWithJacksonJson(TestRecord data) throws Exception {
+ return jacksonJsonMapper.writeValueAsBytes(data);
}
private byte[] serializeWithKryo(TestRecord data) {
@@ -240,14 +517,117 @@ private byte[] serializeWithKryo(TestRecord data) {
return baos.toByteArray();
}
+ private byte[] serializeWithMessagePack(TestRecord data) throws Exception {
+ return messagePackMapper.writeValueAsBytes(data);
+ }
+
+ private byte[] serializeWithAvro(TestRecord data) throws Exception {
+ GenericRecord record = new GenericData.Record(avroSchema);
+ record.put("id", data.id);
+ record.put("name", data.name);
+ record.put("price", data.price);
+ record.put("active", data.active);
+ record.put("category", data.category);
+ record.put("tags", data.tags);
+ record.put("metadata", data.metadata);
+
+ for (int i = 0; i < data.extraData.size(); i++) {
+ record.put("extraData" + i, data.extraData.get(i));
+ }
+
+ return serializeAvroRecord(record);
+ }
+
+ private byte[] serializeAvroRecord(GenericRecord record) throws Exception {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
+ avroWriter.write(record, encoder);
+ encoder.flush();
+ return baos.toByteArray();
+ }
+
+ private GenericRecord deserializeWithAvro(byte[] data) throws Exception {
+ Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
+ return avroReader.read(null, decoder);
+ }
+
+ private byte[] serializeWithProtobuf(TestRecord data) {
+ var builder = TestRecordProto.TestRecord.newBuilder()
+ .setId(data.id)
+ .setName(data.name)
+ .setPrice(data.price)
+ .setActive(data.active)
+ .setCategory(data.category)
+ .addAllTags(data.tags)
+ .putAllMetadata(data.metadata);
+
+ for (String extraData : data.extraData) {
+ builder.addExtraData(extraData);
+ }
+
+ return builder.build().toByteArray();
+ }
+
+ private ByteBuffer serializeWithFlatBuffers(TestRecord data) {
+ FlatBufferBuilder builder = new FlatBufferBuilder(1024);
+
+ // Create strings (must be created before the object that uses them)
+ int nameOffset = builder.createString(data.name);
+ int categoryOffset = builder.createString(data.category);
+
+ // Create tags array
+ int[] tagOffsets = new int[data.tags.size()];
+ for (int i = 0; i < data.tags.size(); i++) {
+ tagOffsets[i] = builder.createString(data.tags.get(i));
+ }
+ int tagsOffset = TestRecordFB.createTagsVector(builder, tagOffsets);
+
+ // Create metadata (as parallel arrays for keys and values)
+ String[] metadataKeys = data.metadata.keySet().toArray(new String[0]);
+ String[] metadataValues = new String[metadataKeys.length];
+ int[] keyOffsets = new int[metadataKeys.length];
+ int[] valueOffsets = new int[metadataKeys.length];
+
+ for (int i = 0; i < metadataKeys.length; i++) {
+ metadataValues[i] = data.metadata.get(metadataKeys[i]);
+ keyOffsets[i] = builder.createString(metadataKeys[i]);
+ valueOffsets[i] = builder.createString(metadataValues[i]);
+ }
+ int metadataKeysOffset = TestRecordFB.createMetadataKeysVector(builder, keyOffsets);
+ int metadataValuesOffset = TestRecordFB.createMetadataValuesVector(builder, valueOffsets);
+
+ // Create extra data array
+ int[] extraDataOffsets = new int[data.extraData.size()];
+ for (int i = 0; i < data.extraData.size(); i++) {
+ extraDataOffsets[i] = builder.createString(data.extraData.get(i));
+ }
+ int extraDataOffset = TestRecordFB.createExtraDataVector(builder, extraDataOffsets);
+
+ // Create the main object
+ TestRecordFB.startTestRecordFB(builder);
+ TestRecordFB.addId(builder, data.id);
+ TestRecordFB.addName(builder, nameOffset);
+ TestRecordFB.addPrice(builder, data.price);
+ TestRecordFB.addActive(builder, data.active);
+ TestRecordFB.addCategory(builder, categoryOffset);
+ TestRecordFB.addTags(builder, tagsOffset);
+ TestRecordFB.addMetadataKeys(builder, metadataKeysOffset);
+ TestRecordFB.addMetadataValues(builder, metadataValuesOffset);
+ TestRecordFB.addExtraData(builder, extraDataOffset);
+ int recordOffset = TestRecordFB.endTestRecordFB(builder);
+
+ // Finish and return
+ builder.finish(recordOffset);
+ return builder.dataBuffer().slice();
+ }
+
private ImprintRecord simulateMerge(ImprintRecord first, ImprintRecord second) throws Exception {
var writer = new ImprintWriter(first.getHeader().getSchemaId());
var usedFieldIds = new HashSet();
-
- // Copy fields from first record (takes precedence)
+
copyFieldsToWriter(first, writer, usedFieldIds);
copyFieldsToWriter(second, writer, usedFieldIds);
-
+
return writer.build();
}
@@ -265,23 +645,121 @@ private void copyFieldsToWriter(ImprintRecord record, ImprintWriter writer, Set<
}
private TestRecord mergeTestRecords(TestRecord first, TestRecord second) {
- // Simple merge logic - first record takes precedence
var merged = new TestRecord();
merged.id = first.id;
merged.name = first.name != null ? first.name : second.name;
merged.price = first.price != 0.0 ? first.price : second.price;
merged.active = first.active;
merged.category = first.category != null ? first.category : second.category;
-
+
merged.tags = new ArrayList<>(first.tags);
merged.tags.addAll(second.tags);
-
+
merged.metadata = new HashMap<>(first.metadata);
merged.metadata.putAll(second.metadata);
-
+
return merged;
}
+ private GenericRecord mergeAvroRecords(GenericRecord first, GenericRecord second) {
+ GenericRecord merged = new GenericData.Record(avroSchema);
+
+ // Copy all fields from first record
+ for (Schema.Field field : avroSchema.getFields()) {
+ merged.put(field.name(), first.get(field.name()));
+ }
+
+ // Override with non-null values from second record
+ for (Schema.Field field : avroSchema.getFields()) {
+ Object secondValue = second.get(field.name());
+ if (secondValue != null && !secondValue.toString().isEmpty()) {
+ merged.put(field.name(), secondValue);
+ }
+ }
+
+ return merged;
+ }
+
+ private TestRecordProto.TestRecord mergeProtobufRecords(TestRecordProto.TestRecord first, TestRecordProto.TestRecord second) {
+ return TestRecordProto.TestRecord.newBuilder()
+ .mergeFrom(first)
+ .mergeFrom(second)
+ .build();
+ }
+
+ private ByteBuffer mergeFlatBuffersRecords(TestRecordFB first, TestRecordFB second) {
+ FlatBufferBuilder builder = new FlatBufferBuilder(1024);
+
+ // Use second record's values if they exist, otherwise first record's values
+ String name = second.name() != null && !Objects.requireNonNull(second.name()).isEmpty() ? second.name() : first.name();
+ String category = second.category() != null && !Objects.requireNonNull(second.category()).isEmpty() ? second.category() : first.category();
+ double price = second.price() != 0.0 ? second.price() : first.price();
+ boolean active = second.active(); // Use second's boolean value
+ int id = first.id(); // Keep first record's ID
+
+ // Create merged strings
+ int nameOffset = builder.createString(name);
+ int categoryOffset = builder.createString(category);
+
+ // Merge tags (combine both arrays)
+ List mergedTags = new ArrayList<>();
+ for (int i = 0; i < first.tagsLength(); i++) {
+ mergedTags.add(first.tags(i));
+ }
+ for (int i = 0; i < second.tagsLength(); i++) {
+ mergedTags.add(second.tags(i));
+ }
+
+ int[] tagOffsets = new int[mergedTags.size()];
+ for (int i = 0; i < mergedTags.size(); i++) {
+ tagOffsets[i] = builder.createString(mergedTags.get(i));
+ }
+ int tagsOffset = TestRecordFB.createTagsVector(builder, tagOffsets);
+
+ // Merge metadata (second overwrites first)
+ Map mergedMetadata = new HashMap<>();
+ for (int i = 0; i < first.metadataKeysLength(); i++) {
+ mergedMetadata.put(first.metadataKeys(i), first.metadataValues(i));
+ }
+ for (int i = 0; i < second.metadataKeysLength(); i++) {
+ mergedMetadata.put(second.metadataKeys(i), second.metadataValues(i));
+ }
+
+ String[] metadataKeys = mergedMetadata.keySet().toArray(new String[0]);
+ int[] keyOffsets = new int[metadataKeys.length];
+ int[] valueOffsets = new int[metadataKeys.length];
+
+ for (int i = 0; i < metadataKeys.length; i++) {
+ keyOffsets[i] = builder.createString(metadataKeys[i]);
+ valueOffsets[i] = builder.createString(mergedMetadata.get(metadataKeys[i]));
+ }
+ int metadataKeysOffset = TestRecordFB.createMetadataKeysVector(builder, keyOffsets);
+ int metadataValuesOffset = TestRecordFB.createMetadataValuesVector(builder, valueOffsets);
+
+ // Use first record's extra data (or could merge both)
+ int[] extraDataOffsets = new int[first.extraDataLength()];
+ for (int i = 0; i < first.extraDataLength(); i++) {
+ extraDataOffsets[i] = builder.createString(first.extraData(i));
+ }
+ int extraDataOffset = TestRecordFB.createExtraDataVector(builder, extraDataOffsets);
+
+ // Create the merged object
+ TestRecordFB.startTestRecordFB(builder);
+ TestRecordFB.addId(builder, id);
+ TestRecordFB.addName(builder, nameOffset);
+ TestRecordFB.addPrice(builder, price);
+ TestRecordFB.addActive(builder, active);
+ TestRecordFB.addCategory(builder, categoryOffset);
+ TestRecordFB.addTags(builder, tagsOffset);
+ TestRecordFB.addMetadataKeys(builder, metadataKeysOffset);
+ TestRecordFB.addMetadataValues(builder, metadataValuesOffset);
+ TestRecordFB.addExtraData(builder, extraDataOffset);
+ int recordOffset = TestRecordFB.endTestRecordFB(builder);
+
+ builder.finish(recordOffset);
+ return builder.dataBuffer().slice();
+ }
+
private TestRecord createTestRecord() {
var record = new TestRecord();
record.id = 12345;
@@ -289,20 +767,19 @@ var record = new TestRecord();
record.price = 99.99;
record.active = true;
record.category = "Electronics";
-
+
record.tags = Arrays.asList("popular", "trending", "bestseller");
-
+
record.metadata = new HashMap<>();
record.metadata.put("manufacturer", "TechCorp");
record.metadata.put("model", "TC-2024");
record.metadata.put("warranty", "2 years");
-
- // Add extra data to create a larger record (fields 8-20)
+
record.extraData = new ArrayList<>();
for (int i = 0; i < 13; i++) {
record.extraData.add("extraField" + i + "_value_" + (1000 + i));
}
-
+
return record;
}
@@ -313,23 +790,23 @@ var record = new TestRecord();
record.price = 149.99;
record.active = false;
record.category = "Software";
-
+
record.tags = Arrays.asList("new", "premium");
-
+
record.metadata = new HashMap<>();
record.metadata.put("vendor", "SoftCorp");
record.metadata.put("version", "2.1");
-
- // Add extra data to match the structure
+
record.extraData = new ArrayList<>();
for (int i = 0; i < 13; i++) {
record.extraData.add("extraField" + i + "_value2_" + (2000 + i));
}
-
+
return record;
}
// Test data class for other serialization libraries
+ @NoArgsConstructor
public static class TestRecord {
public int id;
public String name;
@@ -338,8 +815,7 @@ public static class TestRecord {
public String category;
public List tags = new ArrayList<>();
public Map metadata = new HashMap<>();
- public List extraData = new ArrayList<>(); // Fields 8-20 for large record test
-
- public TestRecord() {} // Required for deserialization
+ // Fields 8-20 for large record test
+ public List extraData = new ArrayList<>();
}
}
\ No newline at end of file
diff --git a/src/jmh/proto/test_record.proto b/src/jmh/proto/test_record.proto
new file mode 100644
index 0000000..7a76f71
--- /dev/null
+++ b/src/jmh/proto/test_record.proto
@@ -0,0 +1,15 @@
+syntax = "proto3";
+
+option java_package = "com.imprint.benchmark";
+option java_outer_classname = "TestRecordProto";
+
+message TestRecord {
+ int32 id = 1;
+ string name = 2;
+ double price = 3;
+ bool active = 4;
+ string category = 5;
+ repeated string tags = 6;
+ map metadata = 7;
+ repeated string extra_data = 8;
+}
\ No newline at end of file
diff --git a/src/main/java/com/imprint/core/ImprintRecord.java b/src/main/java/com/imprint/core/ImprintRecord.java
index 5d4719f..e7dab70 100644
--- a/src/main/java/com/imprint/core/ImprintRecord.java
+++ b/src/main/java/com/imprint/core/ImprintRecord.java
@@ -34,7 +34,7 @@ public final class ImprintRecord {
*/
public ImprintRecord(Header header, List directory, ByteBuffer payload) {
this.header = Objects.requireNonNull(header, "Header cannot be null");
- this.directory = List.copyOf(Objects.requireNonNull(directory, "Directory cannot be null"));
+ this.directory = Collections.unmodifiableList(Objects.requireNonNull(directory, "Directory cannot be null"));
this.payload = payload.asReadOnlyBuffer(); // Zero-copy read-only view
}
@@ -79,12 +79,14 @@ private ByteBuffer getFieldBuffer(int fieldId) {
int endOffset = (index + 1 < directory.size()) ?
directory.get(index + 1).getOffset() : payload.limit();
- var fieldBuffer = payload.duplicate();
if (startOffset > payload.limit() || endOffset > payload.limit() || startOffset > endOffset) {
return null;
}
+
+ //Single allocation instead of duplicate + slice
+ var fieldBuffer = payload.duplicate();
fieldBuffer.position(startOffset).limit(endOffset);
- return fieldBuffer.slice();
+ return fieldBuffer;
}
/**
@@ -261,7 +263,7 @@ private static DirectoryEntry deserializeDirectoryEntry(ByteBuffer buffer) throw
}
private Value deserializeValue(TypeCode typeCode, ByteBuffer buffer) throws ImprintException {
- ByteBuffer valueSpecificBuffer = buffer.duplicate();
+ var valueSpecificBuffer = buffer.duplicate();
valueSpecificBuffer.order(ByteOrder.LITTLE_ENDIAN);
switch (typeCode) {
@@ -351,7 +353,7 @@ public double getFloat64(int fieldId) throws ImprintException {
* @throws ImprintException if the field is not found, is null, or is not of type STRING.
*/
public String getString(int fieldId) throws ImprintException {
- Value value = getValue(fieldId);
+ var value = getValue(fieldId);
if (value == null) {
throw new ImprintException(ErrorType.FIELD_NOT_FOUND,
diff --git a/src/main/java/com/imprint/types/TypeHandler.java b/src/main/java/com/imprint/types/TypeHandler.java
index be4fc7b..634867b 100644
--- a/src/main/java/com/imprint/types/TypeHandler.java
+++ b/src/main/java/com/imprint/types/TypeHandler.java
@@ -9,7 +9,7 @@
/**
* Interface for handling type-specific serialization, deserialization, and size estimation.
- * Note that primitives are potentially auto/un-boxed here which could impact performance slightly
+ * Note that primitives are basically boxed here which could impact performance slightly
* but having all the types in their own implementation helps keep things organized for now, especially
* for dealing with and testing more complex types in the future.
*/
@@ -17,52 +17,7 @@ public interface TypeHandler {
Value deserialize(ByteBuffer buffer) throws ImprintException;
void serialize(Value value, ByteBuffer buffer) throws ImprintException;
int estimateSize(Value value) throws ImprintException;
- ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException;
-
-
-
- @FunctionalInterface
- interface BufferViewer {
- int measureDataLength(ByteBuffer tempBuffer, int numElements) throws ImprintException;
- }
-
- // Helper method to eliminate duplication in ARRAY/MAP readValueBytes
- static ByteBuffer readComplexValueBytes(ByteBuffer buffer, String typeName, BufferViewer measurer) throws ImprintException {
- int initialPosition = buffer.position();
- ByteBuffer tempBuffer = buffer.duplicate();
- tempBuffer.order(buffer.order());
-
- VarInt.DecodeResult lengthResult = VarInt.decode(tempBuffer);
- int numElements = lengthResult.getValue();
- int varIntLength = tempBuffer.position() - initialPosition;
-
- if (numElements == 0) {
- if (buffer.remaining() < varIntLength) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for empty " + typeName + " VarInt. Needed: " +
- varIntLength + ", available: " + buffer.remaining());
- }
- ByteBuffer valueSlice = buffer.slice();
- valueSlice.limit(varIntLength);
- buffer.position(initialPosition + varIntLength);
- return valueSlice.asReadOnlyBuffer();
- }
-
- int dataLength = measurer.measureDataLength(tempBuffer, numElements);
- int totalLength = varIntLength + dataLength;
-
- if (buffer.remaining() < totalLength) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for " + typeName + " value. Needed: " + totalLength +
- ", available: " + buffer.remaining() + " at position " + initialPosition);
- }
- ByteBuffer valueSlice = buffer.slice();
- valueSlice.limit(totalLength);
- buffer.position(initialPosition + totalLength);
- return valueSlice.asReadOnlyBuffer();
- }
-
// Static implementations for each type
TypeHandler NULL = new TypeHandler() {
@Override
@@ -79,11 +34,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 0;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- return ByteBuffer.allocate(0).asReadOnlyBuffer();
- }
};
TypeHandler BOOL = new TypeHandler() {
@@ -108,14 +58,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 1;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- var boolBuffer = buffer.slice();
- boolBuffer.limit(1);
- buffer.position(buffer.position() + 1);
- return boolBuffer.asReadOnlyBuffer();
- }
};
TypeHandler INT32 = new TypeHandler() {
@@ -137,14 +79,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 4;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- var int32Buffer = buffer.slice();
- int32Buffer.limit(4);
- buffer.position(buffer.position() + 4);
- return int32Buffer.asReadOnlyBuffer();
- }
};
TypeHandler INT64 = new TypeHandler() {
@@ -166,14 +100,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 8;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- var int64Buffer = buffer.slice();
- int64Buffer.limit(8);
- buffer.position(buffer.position() + 8);
- return int64Buffer.asReadOnlyBuffer();
- }
};
TypeHandler FLOAT32 = new TypeHandler() {
@@ -195,14 +121,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 4;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- var float32Buffer = buffer.slice();
- float32Buffer.limit(4);
- buffer.position(buffer.position() + 4);
- return float32Buffer.asReadOnlyBuffer();
- }
};
TypeHandler FLOAT64 = new TypeHandler() {
@@ -224,14 +142,6 @@ public void serialize(Value value, ByteBuffer buffer) {
public int estimateSize(Value value) {
return 8;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) {
- var float64Buffer = buffer.slice();
- float64Buffer.limit(8);
- buffer.position(buffer.position() + 8);
- return float64Buffer.asReadOnlyBuffer();
- }
};
TypeHandler BYTES = new TypeHandler() {
@@ -274,29 +184,6 @@ public int estimateSize(Value value) {
return VarInt.encodedLength(bytes.length) + bytes.length;
}
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
- int initialPos = buffer.position();
- ByteBuffer tempMeasureBuffer = buffer.duplicate();
- VarInt.DecodeResult dr = VarInt.decode(tempMeasureBuffer);
-
- int varIntByteLength = tempMeasureBuffer.position() - initialPos;
- int payloadByteLength = dr.getValue();
- int totalValueLength = varIntByteLength + payloadByteLength;
-
- if (buffer.remaining() < totalValueLength) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for VarInt-prefixed data. Needed: " + totalValueLength +
- ", available: " + buffer.remaining() + " at position " + initialPos);
- }
-
- ByteBuffer resultSlice = buffer.slice();
- resultSlice.limit(totalValueLength);
-
- buffer.position(initialPos + totalValueLength);
- return resultSlice.asReadOnlyBuffer();
- }
};
TypeHandler STRING = new TypeHandler() {
@@ -344,29 +231,6 @@ public int estimateSize(Value value) {
return VarInt.encodedLength(utf8Bytes.length) + utf8Bytes.length;
}
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
- int initialPos = buffer.position();
- ByteBuffer tempMeasureBuffer = buffer.duplicate();
- VarInt.DecodeResult dr = VarInt.decode(tempMeasureBuffer);
-
- int varIntByteLength = tempMeasureBuffer.position() - initialPos;
- int payloadByteLength = dr.getValue();
- int totalValueLength = varIntByteLength + payloadByteLength;
-
- if (buffer.remaining() < totalValueLength) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for VarInt-prefixed string. Needed: " + totalValueLength +
- ", available: " + buffer.remaining() + " at position " + initialPos);
- }
-
- ByteBuffer resultSlice = buffer.slice();
- resultSlice.limit(totalValueLength);
-
- buffer.position(initialPos + totalValueLength);
- return resultSlice.asReadOnlyBuffer();
- }
};
TypeHandler ARRAY = new TypeHandler() {
@@ -374,25 +238,24 @@ public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
public Value deserialize(ByteBuffer buffer) throws ImprintException {
VarInt.DecodeResult lengthResult = VarInt.decode(buffer);
int length = lengthResult.getValue();
-
+
if (length == 0) {
return Value.fromArray(Collections.emptyList());
}
-
+
if (buffer.remaining() < 1) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW, "Not enough bytes for ARRAY element type code.");
+ throw new ImprintException(ErrorType.BUFFER_UNDERFLOW, "Not enough bytes for ARRAY element type code.");
}
var elementType = TypeCode.fromByte(buffer.get());
var elements = new ArrayList(length);
var elementHandler = elementType.getHandler();
-
+
+ //Let each element handler consume what it needs from the buffer
for (int i = 0; i < length; i++) {
- var elementValueBytes = elementHandler.readValueBytes(buffer);
- elementValueBytes.order(buffer.order());
- var element = elementHandler.deserialize(elementValueBytes);
+ var element = elementHandler.deserialize(buffer); //Handler advances buffer position
elements.add(element);
}
-
+
return Value.fromArray(elements);
}
@@ -433,28 +296,6 @@ public int estimateSize(Value value) throws ImprintException {
}
return arraySize;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
- return readComplexValueBytes(buffer, "ARRAY", (tempBuffer, numElements) -> {
- if (tempBuffer.remaining() < 1) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for ARRAY element type code in temp buffer during measurement.");
- }
- byte elementTypeCodeByte = tempBuffer.get();
- int typeCodeLength = 1;
-
- TypeHandler elementHandler = TypeCode.fromByte(elementTypeCodeByte).getHandler();
- int elementsDataLength = 0;
- for (int i = 0; i < numElements; i++) {
- int elementStartPos = tempBuffer.position();
- elementHandler.readValueBytes(tempBuffer);
- elementsDataLength += (tempBuffer.position() - elementStartPos);
- }
-
- return typeCodeLength + elementsDataLength;
- });
- }
};
TypeHandler MAP = new TypeHandler() {
@@ -462,13 +303,13 @@ public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
public Value deserialize(ByteBuffer buffer) throws ImprintException {
VarInt.DecodeResult lengthResult = VarInt.decode(buffer);
int length = lengthResult.getValue();
-
+
if (length == 0) {
return Value.fromMap(Collections.emptyMap());
}
-
+
if (buffer.remaining() < 2) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW, "Not enough bytes for MAP key/value type codes.");
+ throw new ImprintException(ErrorType.BUFFER_UNDERFLOW, "Not enough bytes for MAP key/value type codes.");
}
var keyType = TypeCode.fromByte(buffer.get());
var valueType = TypeCode.fromByte(buffer.get());
@@ -476,20 +317,17 @@ public Value deserialize(ByteBuffer buffer) throws ImprintException {
var keyHandler = keyType.getHandler();
var valueHandler = valueType.getHandler();
-
+
+ //Let handlers consume directly from buffer
for (int i = 0; i < length; i++) {
- var keyBytes = keyHandler.readValueBytes(buffer);
- keyBytes.order(buffer.order());
- var keyValue = keyHandler.deserialize(keyBytes);
+ var keyValue = keyHandler.deserialize(buffer);// Advances buffer
var key = MapKey.fromValue(keyValue);
-
- var valueBytes = valueHandler.readValueBytes(buffer);
- valueBytes.order(buffer.order());
- var mapInternalValue = valueHandler.deserialize(valueBytes);
-
+
+ var mapInternalValue = valueHandler.deserialize(buffer);//Advances buffer
+
map.put(key, mapInternalValue);
}
-
+
return Value.fromMap(map);
}
@@ -549,29 +387,7 @@ public int estimateSize(Value value) throws ImprintException {
}
return mapSize;
}
-
- @Override
- public ByteBuffer readValueBytes(ByteBuffer buffer) throws ImprintException {
- return readComplexValueBytes(buffer, "MAP", (tempBuffer, numEntries) -> {
- if (tempBuffer.remaining() < 2) {
- throw new ImprintException(ErrorType.BUFFER_UNDERFLOW,
- "Not enough bytes for MAP key/value type codes in temp buffer during measurement.");
- }
- byte keyTypeCodeByte = tempBuffer.get();
- byte valueTypeCodeByte = tempBuffer.get();
- int typeCodesLength = 2;
- int entriesDataLength = 0;
- for (int i = 0; i < numEntries; i++) {
- int entryStartPos = tempBuffer.position();
- TypeCode.fromByte(keyTypeCodeByte).getHandler().readValueBytes(tempBuffer);
- TypeCode.fromByte(valueTypeCodeByte).getHandler().readValueBytes(tempBuffer);
- entriesDataLength += (tempBuffer.position() - entryStartPos);
- }
- return typeCodesLength + entriesDataLength;
- });
- }
-
private void serializeMapKey(MapKey key, ByteBuffer buffer) throws ImprintException {
switch (key.getTypeCode()) {
case INT32:
diff --git a/src/main/java/com/imprint/types/Value.java b/src/main/java/com/imprint/types/Value.java
index 7f3bbb9..fbb988c 100644
--- a/src/main/java/com/imprint/types/Value.java
+++ b/src/main/java/com/imprint/types/Value.java
@@ -192,11 +192,11 @@ public static class BytesValue extends Value {
private final byte[] value;
public BytesValue(byte[] value) {
- this.value = value.clone(); // defensive copy
+ this.value = value.clone();
}
public byte[] getValue() {
- return value.clone(); // defensive copy
+ return value.clone();
}
@Override
@@ -233,7 +233,7 @@ public static class BytesBufferValue extends Value {
private final ByteBuffer value;
public BytesBufferValue(ByteBuffer value) {
- this.value = value.asReadOnlyBuffer(); // zero-copy read-only view
+ this.value = value.asReadOnlyBuffer();
}
public byte[] getValue() {
@@ -244,7 +244,7 @@ public byte[] getValue() {
}
public ByteBuffer getBuffer() {
- return value.duplicate(); // zero-copy view
+ return value.duplicate();
}
@Override
@@ -289,11 +289,11 @@ public StringValue(String value) {
public byte[] getUtf8Bytes() {
var cached = cachedUtf8Bytes;
if (cached == null) {
- // Multiple threads may compute this - that's OK since it's idempotent
+ // UTF8 is idempotent so no need to synchronize
cached = value.getBytes(StandardCharsets.UTF_8);
cachedUtf8Bytes = cached;
}
- return cached; // Return our computed value, not re-read from volatile field
+ return cached; // Return computed value
}
@Override
@@ -328,16 +328,19 @@ public String toString() {
// String Value (ByteBuffer-based)
public static class StringBufferValue extends Value {
private final ByteBuffer value;
- private volatile String cachedString; // lazy decode
+ private volatile String cachedString;
+
+ private static final int THREAD_LOCAL_BUFFER_SIZE = 1024;
+ private static final ThreadLocal DECODE_BUFFER_CACHE =
+ ThreadLocal.withInitial(() -> new byte[THREAD_LOCAL_BUFFER_SIZE]);
public StringBufferValue(ByteBuffer value) {
- this.value = value.asReadOnlyBuffer(); // zero-copy read-only view
+ this.value = value.asReadOnlyBuffer();
}
public String getValue() {
String result = cachedString;
if (result == null) {
- // Simple, fast decoding - no thread-local overhead
result = decodeUtf8();
cachedString = result;
}
@@ -345,22 +348,29 @@ public String getValue() {
}
private String decodeUtf8() {
- // Fast path: zero-copy for array-backed ByteBuffers
+ final byte[] array;
+ final int offset;
+ final int length = value.remaining();
+
if (value.hasArray()) {
- return new String(value.array(), value.arrayOffset() + value.position(),
- value.remaining(), StandardCharsets.UTF_8);
+ array = value.array();
+ offset = value.arrayOffset() + value.position();
+ } else {
+ byte[] threadLocalBuffer = DECODE_BUFFER_CACHE.get();
+ if (length <= threadLocalBuffer.length) {
+ array = threadLocalBuffer;
+ } else {
+ // Fallback: copy bytes from the ByteBuffer to a new heap array (if too large for cache)
+ array = new byte[length];
+ }
+ value.duplicate().get(array, 0, length);
+ offset = 0;
}
-
- // Fallback path for non-array-backed ByteBuffers (e.g., direct buffers).
- // Allocation is required here as Java's String(byte[],...) constructor needs a heap array.
- // Data is copied from the ByteBuffer to a new byte array.
- var array = new byte[value.remaining()];
- value.duplicate().get(array);
- return new String(array, StandardCharsets.UTF_8);
+ return new String(array, offset, length, StandardCharsets.UTF_8);
}
public ByteBuffer getBuffer() {
- return value.duplicate(); // zero-copy view
+ return value.duplicate();
}
@Override
diff --git a/src/test/java/com/imprint/benchmark/ProfilerTest.java b/src/test/java/com/imprint/profile/ProfilerTest.java
similarity index 97%
rename from src/test/java/com/imprint/benchmark/ProfilerTest.java
rename to src/test/java/com/imprint/profile/ProfilerTest.java
index 5b531a9..3b9f371 100644
--- a/src/test/java/com/imprint/benchmark/ProfilerTest.java
+++ b/src/test/java/com/imprint/profile/ProfilerTest.java
@@ -1,9 +1,11 @@
-package com.imprint.benchmark;
+package com.imprint.profile;
-import com.imprint.core.*;
+import com.imprint.core.ImprintRecord;
+import com.imprint.core.ImprintWriter;
+import com.imprint.core.SchemaId;
import com.imprint.types.Value;
-import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import java.util.Random;
@@ -29,7 +31,7 @@
public class ProfilerTest {
private static final int ITERATIONS = 1_000_000;
- private static final int RECORD_SIZE = 20;
+ private static final int RECORD_SIZE = 50;
@Test
void profileFieldAccess() throws Exception {
diff --git a/src/test/java/com/imprint/types/TypeHandlerTest.java b/src/test/java/com/imprint/types/TypeHandlerTest.java
index f131a0f..75d118f 100644
--- a/src/test/java/com/imprint/types/TypeHandlerTest.java
+++ b/src/test/java/com/imprint/types/TypeHandlerTest.java
@@ -33,11 +33,6 @@ void testNullHandler() throws ImprintException {
buffer.flip();
var deserialized = handler.deserialize(buffer);
assertThat(deserialized).isEqualTo(value);
-
- // readValueBytes
- buffer.clear();
- var valueBytes = handler.readValueBytes(buffer);
- assertThat(valueBytes.remaining()).isEqualTo(0);
}
@ParameterizedTest