Skip to content
This repository was archived by the owner on Jan 26, 2026. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
71 commits
Select commit Hold shift + click to select a range
5cb33c9
Update .gitignore
expanded-for-real Jun 1, 2025
63313e4
initial commit for imprint-java
Jun 1, 2025
dd4fdbc
initial commit for imprint-java
expanded-for-real Jun 1, 2025
bce1d13
Add GitHub Actions CI workflow for automated testing
Jun 2, 2025
f5d90b5
Merge remote-tracking branch 'origin/dev' into dev
Jun 2, 2025
72c468f
Update GitHub Actions workflow to use upload-artifact@v4
Jun 2, 2025
468d682
Add Gradle wrapper validation to CI workflow
Jun 2, 2025
cf05b13
Fix gitignore to include gradle-wrapper.jar for CI
Jun 2, 2025
d0d7983
Force add gradle-wrapper.jar to repository
Jun 2, 2025
f2cdd1b
Update wrapper validation action to v3
Jun 2, 2025
57c8249
Fix Javadoc syntax errors and disable strict Javadoc checking
Jun 2, 2025
edb3057
Add JMH benchmark .bat and .sh for full suite benchmarking and perfor…
Jun 2, 2025
2853e3f
fix map serialization error in benchmark test and streamline ci file …
Jun 2, 2025
3a5a113
Add execute permissions back for gradlew in CI
Jun 2, 2025
50a288b
Add some more string based performance benchmarks and try to make str…
Jun 2, 2025
ea1c4c4
Merge pull request #2 from imprint-serde/faster-strings
expanded-for-real Jun 2, 2025
43cab28
second main commit to address initial commits
expanded-for-real Jun 3, 2025
fdb8a56
additional cleanup to address concerns in https://github.com/imprint-…
Jun 3, 2025
2e56688
minor style fixes
Jun 3, 2025
9353388
minor style fixes again
Jun 3, 2025
09d0377
minor style fixes on benchmark tests and supress unused
Jun 3, 2025
6209bb1
minor reordering
Jun 4, 2025
ace7c67
Merge branch 'main' into dev
Jun 4, 2025
4632e01
Full comprehensive comparison tests with a lot of other libraries + s…
Jun 5, 2025
3738861
replace deprecated gradle methods with latest
Jun 5, 2025
12d2823
Merge Comparisons into dev branch (#8)
expanded-for-real Jun 5, 2025
f7a6e8e
Lazy load of directory and header data
Jun 5, 2025
2834dbb
Merge remote-tracking branch 'origin/main' into dev
Jun 5, 2025
83ed961
minor cleanup
Jun 5, 2025
a605b65
minor cleanup
Jun 5, 2025
aacddeb
minor cleanup
Jun 5, 2025
3bf81ad
Actually fixes offsets and read Byte Values for Maps and Arrays even …
Jun 5, 2025
7eaa6e9
change CI file to use JMH plugin to respect iteration and warmup valu…
Jun 5, 2025
32640cd
ok plugin didn't work apparently so reverting that and just reducing …
Jun 5, 2025
2d882c2
Merge branch 'dev' into lazy-directory
Jun 5, 2025
880aeb0
trying to update github ci to make jmh actually work correctly
Jun 5, 2025
8831922
lazy directory deserialization
Jun 6, 2025
e361cf0
Merge branch 'main' into dev
Jun 6, 2025
73eade6
remove extra comments
Jun 6, 2025
02866d5
remove extra comments
Jun 6, 2025
6278665
Merge branch 'refs/heads/main' into dev
Jun 7, 2025
09443eb
Add merge and project APIs; optimize/simplify ImprintBuffers with Tre…
Jun 7, 2025
0c7b237
Optimize serialization path and remove ImprintWriter code in favor of…
Jun 7, 2025
574323e
Allow for record creation path from builder to bypass extra TreeMapping
Jun 7, 2025
b2bebee
Calculate estimated size as fields are added instead of deferring it
Jun 7, 2025
f1df8d7
Use idiomatic Directory interface and optimize builder
Jun 8, 2025
7420b7f
add large object profiling and refactor tests
Jun 8, 2025
4d86447
add Thrift competitor and fix framework issues
Jun 9, 2025
a722e45
Add single-field access test
Jun 9, 2025
9d0f2c8
correct benchmark methodology for fairness
Jun 9, 2025
4b2664c
micro-optiomize and attempt to make ComparisonBenchmark tests a littl…
Jun 9, 2025
b4cf85d
final optimization and reorganization into better project structure
Jun 11, 2025
cce8994
final optimization and reorganization into better project structure
Jun 12, 2025
f06ad98
Merge branch 'main' into zero-copy
Jun 12, 2025
50c8a4b
track custom map
Jun 12, 2025
eb40310
delete extra operations file because I moved it
Jun 12, 2025
b8449c8
adding comments and TODOs
Jun 12, 2025
96fbc20
various micro-optimizations
Jun 13, 2025
686a855
make serilaizers static/final; begin to refactor to avoid virtual dis…
Jun 13, 2025
ffc7918
add new Imprint specific benchmark
Jun 13, 2025
e562ac3
Remove Value and TypeHandler to significantly reduce dynamic dispatch
Jun 14, 2025
999f48c
add static serializers
Jun 14, 2025
3b0a0d1
Add Unsafe direct buffer wrapper class
Jun 14, 2025
8c54405
Try to get merge to use the new direct/unsafe/growable imprint buffer
Jun 17, 2025
1148079
update comments
Jun 17, 2025
541eddd
convert operations to use expandable buffer
Jun 25, 2025
0f7b52b
finalize using unsafe buffer everywhere
Jun 27, 2025
60aff65
fix formatting
Jun 27, 2025
8a8fbc5
fix comparison tests
Jun 27, 2025
69f66d9
Merge branch 'main' into custom-buffers
Jun 27, 2025
d3369cf
remove extra comments and debug outputs
Jun 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ dependencies {
jmhImplementation 'org.msgpack:jackson-dataformat-msgpack:0.9.8'
jmhImplementation 'org.apache.thrift:libthrift:0.19.0'
jmhImplementation 'javax.annotation:javax.annotation-api:1.3.2'
jmhImplementation 'net.openhft:chronicle-wire:2.25ea5'
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Got frustrated halfway through the effort and decided to add ChronicleWire as a comparison

}

protobuf {
Expand Down Expand Up @@ -236,7 +237,13 @@ jmh {
'-XX:+UseG1GC',
'-Xmx2g',
'-XX:+UnlockExperimentalVMOptions',
'-XX:+UseJVMCICompiler'
'-XX:+UseJVMCICompiler',
'--illegal-access=permit',
'--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED',
'--add-opens=java.base/java.lang=ALL-UNNAMED',
'--add-opens=java.base/java.lang.reflect=ALL-UNNAMED',
'--add-opens=java.base/java.util=ALL-UNNAMED',
'--add-opens=java.base/sun.nio.ch=ALL-UNNAMED'
]
}

Expand Down
34 changes: 21 additions & 13 deletions src/jmh/java/com/imprint/benchmark/ComparisonBenchmark.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,15 @@
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
@Warmup(iterations = 3, time = 1)
@Measurement(iterations = 25, time = 1)
@Fork(value = 1, jvmArgs = {"-Xms4g", "-Xmx4g"})
@Measurement(iterations = 10, time = 1)
@Fork(value = 1, jvmArgs = {"-Xms4g", "-Xmx4g",
"--illegal-access=permit",
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ChronicleWire apparently needs all this

"--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED",
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
"-Dimprint.buffer.bounds.check=false"})
public class ComparisonBenchmark {

private static final List<SerializingBenchmark> FRAMEWORKS = List.of(
Expand All @@ -28,9 +35,10 @@ public class ComparisonBenchmark {
new AvroSerializingBenchmark(),
new ThriftSerializingBenchmark(),
new KryoSerializingBenchmark(),
new MessagePackSerializingBenchmark());
new MessagePackSerializingBenchmark(),
new ChronicleWireSerializingBenchmark());

@Param({"Imprint"})
@Param({"Imprint", "Jackson-JSON", "Protobuf", "FlatBuffers", "Avro-Generic", "Thrift", "Kryo", "MessagePack", "Chronicle-Wire"})
public String framework;

private SerializingBenchmark serializingBenchmark;
Expand All @@ -51,27 +59,27 @@ public void setup() {
}

@Benchmark
public void serialize(Blackhole bh) {
public void serializeRecord(Blackhole bh) {
serializingBenchmark.serialize(bh);
}

//@Benchmark
public void deserialize(Blackhole bh) {
@Benchmark
public void deserializeRecord(Blackhole bh) {
serializingBenchmark.deserialize(bh);
}

//@Benchmark
public void projectAndSerialize(Blackhole bh) {
@Benchmark
public void projectThenSerialize(Blackhole bh) {
serializingBenchmark.projectAndSerialize(bh);
}

//@Benchmark
public void mergeAndSerialize(Blackhole bh) {
@Benchmark
public void mergeThenSerialize(Blackhole bh) {
serializingBenchmark.mergeAndSerialize(bh);
}

//@Benchmark
public void accessField(Blackhole bh) {
@Benchmark
public void accessSingleField(Blackhole bh) {
serializingBenchmark.accessField(bh);
}

Expand Down
15 changes: 1 addition & 14 deletions src/jmh/java/com/imprint/benchmark/ImprintDetailedBenchmark.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,12 @@ public class ImprintDetailedBenchmark {

private DataGenerator.TestRecord testData;
private ImprintRecordBuilder preBuiltBuilder;
private ImprintRecord preBuiltRecord;
private static final SchemaId SCHEMA_ID = new SchemaId(1, 1);

@Setup(Level.Trial)
public void setup() {
testData = DataGenerator.createTestRecord();
try {
preBuiltBuilder = buildRecord(testData);
preBuiltRecord = preBuiltBuilder.build();
} catch (ImprintException e) {
throw new RuntimeException(e);
}
preBuiltBuilder = buildRecord(testData);
}

private ImprintRecordBuilder buildRecord(DataGenerator.TestRecord pojo) {
Expand Down Expand Up @@ -74,12 +68,6 @@ public void buildToBuffer(Blackhole bh) {
}
}

@Benchmark
public void serializeToBuffer(Blackhole bh) {
// Benchmark: Record → Bytes (just buffer copy)
bh.consume(preBuiltRecord.serializeToBuffer());
}

@Benchmark
public void fullPipeline(Blackhole bh) {
// Benchmark: POJO → Builder → Bytes (complete pipeline)
Expand All @@ -97,7 +85,6 @@ public static void main(String[] args) throws RunnerException {
.mode(Mode.AverageTime)
.timeUnit(TimeUnit.NANOSECONDS)
.build();

new Runner(opt).run();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.util.List;

public class AvroSerializingBenchmark extends AbstractSerializingBenchmark {

Expand Down Expand Up @@ -101,7 +102,7 @@ public void projectAndSerialize(Blackhole bh) {
GenericRecord projected = new GenericData.Record(projectedSchema);
projected.put("id", original.get("id"));
projected.put("timestamp", original.get("timestamp"));
projected.put("tags", ((java.util.List)original.get("tags")).subList(0, 5));
projected.put("tags", ((List)original.get("tags")).subList(0, 5));

BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null);
projectedWriter.write(projected, encoder);
Expand Down Expand Up @@ -132,19 +133,6 @@ public void mergeAndSerialize(Blackhole bh) {
bh.consume(buildBytes(merged));
}

private GenericRecord buildAvroRecord(DataGenerator.TestRecord pojo) {
GenericRecord record = new GenericData.Record(schema);
record.put("id", pojo.id);
record.put("timestamp", pojo.timestamp);
record.put("flags", pojo.flags);
record.put("active", pojo.active);
record.put("value", pojo.value);
record.put("data", ByteBuffer.wrap(pojo.data));
record.put("tags", pojo.tags);
record.put("metadata", pojo.metadata);
return record;
}

private GenericRecord buildAvroRecordFromBytes(byte[] bytes) {
try {
BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(bytes, null);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
package com.imprint.benchmark.serializers;

import com.imprint.benchmark.DataGenerator;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.wire.BinaryWire;
import net.openhft.chronicle.wire.Wire;
import net.openhft.chronicle.wire.WireType;
import org.openjdk.jmh.infra.Blackhole;

import java.util.List;
import java.util.Map;

public class ChronicleWireSerializingBenchmark extends AbstractSerializingBenchmark {

private byte[] serializedRecord1;

public ChronicleWireSerializingBenchmark() {
super("Chronicle-Wire");
}

@Override
public void setup(DataGenerator.TestRecord record1, DataGenerator.TestRecord record2) {
super.setup(record1, record2);

// Pre-serialize for deserialize benchmarks
this.serializedRecord1 = serializeRecord(record1);
byte[] serializedRecord2 = serializeRecord(record2);
}

@Override
public void serialize(Blackhole bh) {
byte[] serialized = serializeRecord(testData);
bh.consume(serialized);
}

@Override
public void deserialize(Blackhole bh) {
DataGenerator.TestRecord deserialized = deserializeRecord(serializedRecord1);
bh.consume(deserialized);
}

@Override
public void projectAndSerialize(Blackhole bh) {
// Full round trip: deserialize, project to a new object, re-serialize
DataGenerator.TestRecord original = deserializeRecord(serializedRecord1);

// Simulate projection by creating projected object
DataGenerator.ProjectedRecord projected = new DataGenerator.ProjectedRecord();
projected.id = original.id;
projected.timestamp = original.timestamp;
projected.tags = original.tags.subList(0, Math.min(5, original.tags.size()));

byte[] serialized = serializeProjectedRecord(projected);
bh.consume(serialized);
}

@Override
public void mergeAndSerialize(Blackhole bh) {
// Deserialize both records, merge them, and serialize the result
DataGenerator.TestRecord r1 = deserializeRecord(serializedRecord1);
DataGenerator.TestRecord r2 = testData2; // Use second record directly

// Create merged record following the pattern from other implementations
DataGenerator.TestRecord merged = new DataGenerator.TestRecord();
merged.id = r1.id;
merged.timestamp = System.currentTimeMillis(); // new value
merged.flags = r1.flags;
merged.active = false; // new value
merged.value = r1.value;
merged.data = r1.data;
merged.tags = r2.tags;
merged.metadata = r2.metadata;

byte[] serialized = serializeRecord(merged);
bh.consume(serialized);
}

@Override
public void accessField(Blackhole bh) {
DataGenerator.TestRecord deserialized = deserializeRecord(serializedRecord1);
long timestamp = deserialized.timestamp;
bh.consume(timestamp);
}

private byte[] serializeRecord(DataGenerator.TestRecord record) {
Bytes<?> bytes = Bytes.elasticByteBuffer();
try {
Wire wire = WireType.BINARY.apply(bytes);

wire.writeDocument(false, w -> {
if (record.id != null) w.write("id").text(record.id);
w.write("timestamp").int64(record.timestamp)
.write("flags").int32(record.flags)
.write("active").bool(record.active)
.write("value").float64(record.value);

if (record.data != null) {
w.write("data").bytes(record.data);
}
if (record.tags != null) {
w.write("tags").object(record.tags);
}
if (record.metadata != null) {
w.write("metadata").marshallable(m -> {
for (Map.Entry<String, String> entry : record.metadata.entrySet()) {
m.write(entry.getKey()).text(entry.getValue());
}
});
}
});

byte[] result = new byte[(int) bytes.readRemaining()];
bytes.read(result);
return result;
} finally {
bytes.releaseLast();
}
}

private byte[] serializeProjectedRecord(DataGenerator.ProjectedRecord record) {
Bytes<?> bytes = Bytes.elasticByteBuffer();
try {
Wire wire = WireType.BINARY.apply(bytes);

wire.writeDocument(false, w -> {
if (record.id != null) w.write("id").text(record.id);
w.write("timestamp").int64(record.timestamp);
if (record.tags != null) {
w.write("tags").object(record.tags);
}
});

byte[] result = new byte[(int) bytes.readRemaining()];
bytes.read(result);
return result;
} finally {
bytes.releaseLast();
}
}

private DataGenerator.TestRecord deserializeRecord(byte[] data) {
Bytes<?> bytes = Bytes.wrapForRead(data);
try {
Wire wire = new BinaryWire(bytes);
DataGenerator.TestRecord record = new DataGenerator.TestRecord();

wire.readDocument(null, w -> {
record.id = w.read("id").text();
record.timestamp = w.read("timestamp").int64();
record.flags = w.read("flags").int32();
record.active = w.read("active").bool();
record.value = w.read("value").float64();
record.data = w.read("data").bytes();
record.tags = (List<Integer>) w.read("tags").object();
record.metadata = w.read("metadata").marshallableAsMap(String.class, String.class);
});

return record;
} finally {
bytes.releaseLast();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,9 @@
import com.imprint.error.ImprintException;
import org.openjdk.jmh.infra.Blackhole;

import java.nio.ByteBuffer;

public class ImprintSerializingBenchmark extends AbstractSerializingBenchmark {

private ImprintRecord imprintRecord1;
private ImprintRecordBuilder preBuiltRecord; // Pre-built record for testing
private byte[] serializedRecord1;
private byte[] serializedRecord2;
private static final SchemaId SCHEMA_ID = new SchemaId(1, 1);
Expand All @@ -26,14 +23,13 @@ public void setup(DataGenerator.TestRecord testRecord, DataGenerator.TestRecord
super.setup(testRecord, testRecord2);
try {
this.imprintRecord1 = buildRecord(testRecord).build();
this.preBuiltRecord = buildRecord(testRecord); // Pre-built for testing
ImprintRecord imprintRecord2 = buildRecord(testRecord2).build();

ByteBuffer buf1 = this.imprintRecord1.serializeToBuffer();
var buf1 = this.imprintRecord1.serializeToBuffer();
this.serializedRecord1 = new byte[buf1.remaining()];
buf1.get(this.serializedRecord1);

ByteBuffer buf2 = imprintRecord2.serializeToBuffer();
var buf2 = imprintRecord2.serializeToBuffer();
this.serializedRecord2 = new byte[buf2.remaining()];
buf2.get(this.serializedRecord2);
} catch (ImprintException e) {
Expand All @@ -56,18 +52,11 @@ private ImprintRecordBuilder buildRecord(DataGenerator.TestRecord pojo) throws I

@Override
public void serialize(Blackhole bh) {
// Test 3: Just field addition (POJO → Builder)
try {
var builder = buildRecord(this.testData);
bh.consume(builder); // Consume builder to prevent dead code elimination
} catch (ImprintException ignored) {
bh.consume(buildRecord(DataGenerator.createTestRecord()).buildToBuffer());
} catch (ImprintException e) {
throw new RuntimeException(e);
}

// Test 2: Just serialization (Builder → Bytes)
// try{
// bh.consume(preBuiltRecord.buildToBuffer());
// } catch (ImprintException ignored) {
// }
}

@Override
Expand All @@ -82,8 +71,7 @@ public void deserialize(Blackhole bh) {
@Override
public void projectAndSerialize(Blackhole bh) {
try {
// Should use zero-copy projection directly from existing record
ImprintRecord projected = this.imprintRecord1.project(0, 1, 6);
var projected = this.imprintRecord1.project(0, 1, 6);
bh.consume(projected.serializeToBuffer());
} catch (ImprintException e) {
throw new RuntimeException(e);
Expand Down
Loading