Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 61 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,34 @@ jobs:
with:
python-version: "3.13"

rust-lint:
name: Rust static analysis
runs-on: ubuntu-latest
defaults:
run:
working-directory: rust
steps:
- uses: actions/checkout@v6

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy

- name: Cache Rust build artefacts
uses: Swatinem/rust-cache@v2
with:
workspaces: rust

- name: Check formatting
run: cargo fmt --check

- name: Clippy
run: cargo clippy -- -D warnings

- name: Test
run: cargo test

check-build:
name: Check packaging metadata
uses: less-action/reusables/.github/workflows/python-test-build.yaml@f26f5812fa62d31b9a4e0af9f75f19f7f028425c
Expand All @@ -37,6 +65,14 @@ jobs:
steps:
- uses: actions/checkout@v6

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable

- name: Cache Rust build artefacts
uses: Swatinem/rust-cache@v2
with:
workspaces: rust

- name: Set up Python
uses: actions/setup-python@v6
with:
Expand Down Expand Up @@ -72,6 +108,14 @@ jobs:
steps:
- uses: actions/checkout@v6

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable

- name: Cache Rust build artefacts
uses: Swatinem/rust-cache@v2
with:
workspaces: rust

- name: Set up Python
uses: actions/setup-python@v6
with:
Expand All @@ -94,6 +138,14 @@ jobs:
steps:
- uses: actions/checkout@v6

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable

- name: Cache Rust build artefacts
uses: Swatinem/rust-cache@v2
with:
workspaces: rust

- name: Set up Python
uses: actions/setup-python@v6
with:
Expand All @@ -116,6 +168,14 @@ jobs:
steps:
- uses: actions/checkout@v6

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable

- name: Cache Rust build artefacts
uses: Swatinem/rust-cache@v2
with:
workspaces: rust

- name: Start Kafka backend
run: docker compose up -d

Expand Down Expand Up @@ -196,6 +256,6 @@ jobs:
- name: Install dependencies
run: pip install --require-hashes -r docs/requirements.txt
- name: Install pushed version
run: pip install --no-dependencies .
run: pip install .
- name: Build docs
run: sphinx-build -Wb html docs docs/_build
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,6 @@ dmypy.json
/schema/
error-codes.txt
src/kio/_version.py

# Maturin
src/kio/lib_kio_native.dylib.dSYM/
14 changes: 11 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@ repos:
args:
- -disable-indent-size

- repo: local
hooks:
- id: cargo-fmt
name: cargo fmt
entry: cargo fmt --manifest-path rust/Cargo.toml --
language: system
types: [rust]
pass_filenames: false

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.15.6"
hooks:
Expand All @@ -43,7 +52,7 @@ repos:
- typing-extensions==4.10.0
- pytest==7.4.3
- pydantic==1.10.13
- hypothesis==6.99.12
- hypothesis==6.151.9
- types-setuptools==78.1.0.20250329
- types-requests==2.31.0.20240311

Expand Down Expand Up @@ -72,6 +81,5 @@ repos:
- id: check-manifest
args: ["--no-build-isolation"]
additional_dependencies:
- setuptools==80.9.0
- maturin==1.12.5
- setuptools-scm==9.2.2
- wheel==0.45.1
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ exclude *.yaml
exclude *.yml
exclude Makefile
exclude setup.cfg
exclude build_kio.py
Copy link

Copilot AI Apr 16, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pyproject.toml sets the PEP 517 build backend to build_kio (from build_kio.py), but MANIFEST.in excludes build_kio.py. This will break builds from an sdist because the backend module won't be present when pip tries to import it. Include build_kio.py in the sdist/wheel inputs (remove this exclude or explicitly include it).

Suggested change
exclude build_kio.py
include build_kio.py

Copilot uses AI. Check for mistakes.
recursive-include src py.typed
recursive-include src *.py
include LICENSE
Expand Down
14 changes: 14 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# Enable globstar to allow ** to match recursive directories.
SHELL := /usr/bin/env bash -O globstar

.PHONY: fetch-schema-src
fetch-schema-src:
python3 -m codegen.fetch_schema
Expand All @@ -11,3 +14,14 @@ generate-schema:

.PHONY: build-schema
build-schema: fetch-schema-src generate-schema

.PHONY: clean
clean:
rm -rf {**/,}*.egg-info {**/,}/__pycache__ build dist src/{**/,}*.so .coverage rust/target src/kio/_version.py src/kio/lib_kio_native.dylib.dSYM

.PHONY: nuke
nuke: clean
rm -rf .hypothesis .mypy_cache .import_linter_cache .pytest_cache .ruff_cache

build:
maturin develop
134 changes: 134 additions & 0 deletions benchmarks/parsing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
"""
Large flexible `MetadataResponse` parse micro-benchmark.

Reproduce and compare branches (after rebuilding the extension, e.g. ``maturin develop --release``)::

PYTHONPATH=src python benchmarks/parsing.py --fast -o /tmp/kio-parse-main.json # on main
PYTHONPATH=src python benchmarks/parsing.py --fast -o /tmp/kio-parse-branch.json # on your branch
python -m pyperf compare_to /tmp/kio-parse-main.json /tmp/kio-parse-branch.json

Use ``--rigorous`` instead of ``--fast`` when you need stable numbers; run outside a sandbox
if ``pyperf`` fails collecting load average metadata.
"""

from __future__ import annotations

import datetime
import io
import uuid

from copy import replace
Copy link

Copilot AI Apr 16, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

copy doesn't provide replace; replace is from dataclasses. As written, this benchmark script will fail to import at runtime. Switch the import to from dataclasses import replace (consistent with benchmarks/parsing_memory.py).

Suggested change
from copy import replace
from dataclasses import replace

Copilot uses AI. Check for mistakes.

from kio.schema.errors import ErrorCode
from kio.schema.metadata.v12 import MetadataResponse
from kio.schema.metadata.v12.response import MetadataResponseBroker
from kio.schema.metadata.v12.response import MetadataResponsePartition
from kio.schema.metadata.v12.response import MetadataResponseTopic
from kio.schema.types import BrokerId
from kio.schema.types import TopicName
from kio.serial import entity_reader
from kio.serial import entity_writer
from kio.static.primitive import i32
from kio.static.primitive import i32Timedelta

write_metadata_response = entity_writer(MetadataResponse)
read_metadata_response = entity_reader(MetadataResponse)

instance = MetadataResponse(
throttle_time=i32Timedelta.parse(datetime.timedelta(milliseconds=123)),
brokers=tuple(
MetadataResponseBroker(
node_id=BrokerId(n),
host="foo.bar",
port=i32(1234),
rack=None,
)
for n in range(20)
),
cluster_id="556",
controller_id=BrokerId(3),
topics=tuple(
MetadataResponseTopic(
error_code=ErrorCode.kafka_storage_error,
name=TopicName(f"topic {n}"),
topic_id=uuid.uuid4(),
is_internal=False,
partitions=tuple(
MetadataResponsePartition(
error_code=ErrorCode.delegation_token_expired,
partition_index=i32(p),
leader_id=BrokerId(2345),
leader_epoch=i32(6445678),
replica_nodes=(BrokerId(12345), BrokerId(7651)),
isr_nodes=(),
offline_replicas=(),
)
for p in range(12)
),
topic_authorized_operations=i32(765443),
)
for n in range(1_000)
),
)

with io.BytesIO() as buffer:
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1001)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1002)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1003)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1004)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1005)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1006)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1007)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1008)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1009)))
write_metadata_response(buffer, replace(instance, controller_id=BrokerId(1010)))
buffer = buffer.getvalue()


def perform_parsing(
loops: int,
) -> float:
loop_range = range(loops)
accumulated = 0.0

for _ in loop_range:
offset = 0
t0 = pyperf.perf_counter()
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
_, size = read_metadata_response(buffer, offset)
offset += size
accumulated += pyperf.perf_counter() - t0
assert offset == len(buffer), "buffer not exhausted after read"

return accumulated


if __name__ == "__main__":
# from scalene import scalene_profiler
# scalene_profiler.start()
# perform_parsing(1000)
# scalene_profiler.stop()

# perform_parsing(1000)

import pyperf

runner = pyperf.Runner()
runner.bench_time_func("roundtrip", perform_parsing, inner_loops=10000)
Loading
Loading