Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/dissect-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ jobs:
ci:
uses: fox-it/dissect-workflow-templates/.github/workflows/dissect-ci-template.yml@main
secrets: inherit
with:
run-benchmarks: true


publish:
if: ${{ github.ref_name == 'main' || github.ref_type == 'tag' }}
Expand Down
20 changes: 19 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,31 @@
from __future__ import annotations

import importlib.util

import pytest

from dissect.cstruct.cstruct import cstruct

HAS_BENCHMARK = importlib.util.find_spec("pytest_benchmark") is not None


def pytest_configure(config: pytest.Config) -> None:
if not HAS_BENCHMARK:
# If we don't have pytest-benchmark (or pytest-codspeed) installed, register the benchmark marker ourselves
# to avoid pytest warnings
config.addinivalue_line("markers", "benchmark: mark test for benchmarking (requires pytest-benchmark)")


def pytest_runtest_setup(item: pytest.Item) -> None:
if not HAS_BENCHMARK and item.get_closest_marker("benchmark") is not None:
pytest.skip("pytest-benchmark is not installed")


@pytest.fixture
def cs() -> cstruct:
return cstruct()


@pytest.fixture(params=[True, False])
@pytest.fixture(params=[True, False], ids=["compiled", "interpreted"])
def compiled(request: pytest.FixtureRequest) -> bool:
return request.param
87 changes: 87 additions & 0 deletions tests/test_benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
from __future__ import annotations

from typing import TYPE_CHECKING

import pytest

if TYPE_CHECKING:
from pytest_benchmark.fixture import BenchmarkFixture

from dissect.cstruct.cstruct import cstruct


@pytest.mark.benchmark
def test_benchmark_basic(cs: cstruct, compiled: bool, benchmark: BenchmarkFixture) -> None:
"""Benchmark the parsing of a simple struct with both the compiled and interpreted backends."""
cdef = """
struct test {
uint32 a;
uint64 b;
uint16 c;
uint8 d;
};
"""
cs.load(cdef, compiled=compiled)

benchmark(lambda: cs.test(b"\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x04"))


@pytest.mark.benchmark
def test_benchmark_union(cs: cstruct, compiled: bool, benchmark: BenchmarkFixture) -> None:
"""Benchmark the parsing of a simple union with both the compiled and interpreted backends."""
cdef = """
union test {
uint32 a;
uint64 b;
uint16 c;
uint8 d;
};
"""
cs.load(cdef, compiled=compiled)

benchmark(lambda: cs.test(b"\x01\x02\x03\x04\x05\x06\x07\x08"))


@pytest.mark.benchmark
def test_benchmark_attribute_access(cs: cstruct, benchmark: BenchmarkFixture) -> None:
"""Benchmark the attribute access of a parsed struct."""
cdef = """
struct test {
uint32 a;
uint64 b;
uint16 c;
uint8 d;
};
"""
cs.load(cdef)
obj = cs.test(b"\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x04")

benchmark(lambda: (obj.a, obj.b, obj.c, obj.d))


@pytest.mark.benchmark
def test_benchmark_getattr_constants(cs: cstruct, benchmark: BenchmarkFixture) -> None:
"""Benchmark the resolving of constants on the cstruct instance."""
cdef = """
#define CONST1 1
"""
cs.load(cdef)

benchmark(lambda: cs.CONST1)


@pytest.mark.benchmark
def test_benchmark_getattr_types(cs: cstruct, benchmark: BenchmarkFixture) -> None:
"""Benchmark the resolving of types on the cstruct instance."""
benchmark(lambda: cs.uint8)


@pytest.mark.benchmark
def test_benchmark_getattr_typedefs(cs: cstruct, benchmark: BenchmarkFixture) -> None:
"""Benchmark the resolving of typedefs on the cstruct instance."""
cdef = """
typedef uint8 my_uint8;
"""
cs.load(cdef)

benchmark(lambda: cs.my_uint8)
12 changes: 11 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,20 @@ deps =
coverage
dependency_groups = test
commands =
pytest --basetemp="{envtmpdir}" {posargs:--color=yes --cov=dissect --cov-report=term-missing -v tests}
pytest --basetemp="{envtmpdir}" --import-mode="append" {posargs:--color=yes --cov=dissect --cov-report=term-missing -v tests}
coverage report
coverage xml

[testenv:benchmark]
deps =
pytest-benchmark
pytest-codspeed
dependency_groups = test
passenv =
CODSPEED_ENV
commands =
pytest --basetemp="{envtmpdir}" --import-mode="append" -m benchmark {posargs:--color=yes -v tests}

[testenv:build]
package = skip
dependency_groups = build
Expand Down