Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 48 additions & 29 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,49 +76,40 @@ NOTE: Some compilers may optimize away unused debug types. For example, with `gc

# 🛠️ Quick Start

## 1️⃣ CFFI-style `cdef`
## Load an ISF (Linux/ELF DWARF)

```python
from dwarffi import DFFI

ffi = DFFI()
ffi.cdef("""
struct sensor_data {
uint32_t timestamp;
int16_t readings[3];
uint8_t status;
};
""")

sensor = ffi.new("struct sensor_data", {
"timestamp": 1234567,
"readings": [10, -5, 20],
"status": 0x01
})

print(f"Bytes: {ffi.to_bytes(sensor).hex()}")
print(f"Reading[1]: {sensor.readings[1]}") # -5
```
# Accepts .json or .json.xz
ffi = DFFI("ubuntu:5.4.0-26-generic:64.json.xz")

---

## 2️⃣ Load an ISF (Linux/ELF DWARF)
list_head_type = ffi.typeof("list_head")
print("list_head sizeof:", ffi.sizeof(list_head_type))
print(list_head_type)

```python
from dwarffi import DFFI
''' prints out:
struct list_head (size: 16 bytes) {
[+0 ] pointer next;
[+8 ] pointer prev;
}
'''

# Accepts .json or .json.xz
ffi = DFFI("vmlinux_isf.json.xz")
# make a new complex type
proc = ffi.new("struct task_struct", init={"pid": 1234, "comm": b"my_process"})

task = ffi.typeof("struct task_struct")
print("task_struct sizeof:", ffi.sizeof(task))

ffi.inspect_layout("struct task_struct")
print(proc.pid) # 1234
print(bytes(proc.comm)) # b'my_process\x00\x00\x00\x00\x00\x00'
print(ffi.string(proc.comm)) # b'my_process'
```

Download this example .json.xz [here](https://panda.re/volatility3_profiles/ubuntu:5.4.0-26-generic:64.json.xz).

---

## 3️⃣ Load an ISF (Windows PDB-derived / Volatility-style)
## Load an ISF (Windows PDB-derived / Volatility-style)

```python
from dwarffi import DFFI
Expand All @@ -141,6 +132,34 @@ ffi.inspect_layout("struct _UNICODE_STRING")

---

## CFFI-style `cdef`

We do support inline C definitions that compile down to DWARF and ISF on the fly. This is ideal for quick prototyping or when you have a small struct definition that isn't already in your ISF.

```python
from dwarffi import DFFI

ffi = DFFI()
ffi.cdef("""
struct sensor_data {
uint32_t timestamp;
int16_t readings[3];
uint8_t status;
};
""")

sensor = ffi.new("struct sensor_data", {
"timestamp": 1234567,
"readings": [10, -5, 20],
"status": 0x01
})

print(f"Bytes: {ffi.to_bytes(sensor).hex()}")
print(f"Reading[1]: {sensor.readings[1]}") # -5
```
---


# 🧩 Advanced Usage

## Anonymous Unions
Expand Down
49 changes: 46 additions & 3 deletions src/dwarffi/instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,9 +724,52 @@ def __setattr__(self, name: str, new_value: Any) -> None:
_, field_offset, resolved_info, resolved_obj = flat_fields[name]

if resolved_info["kind"] == "array":
raise NotImplementedError(
f"Direct assignment to array field '{name}' is not supported."
)
# Allow direct assignment only for byte/char arrays
if isinstance(new_value, str):
data = new_value.encode("utf-8")
elif isinstance(new_value, (bytes, bytearray, memoryview)):
data = bytes(new_value)
else:
raise NotImplementedError(
f"Direct assignment to array field '{name}' is not supported."
)

# Resolve element type and ensure it's 1 byte
subtype_info = resolved_info.get("subtype")
if subtype_info is None:
raise ValueError(f"Array field '{name}' missing subtype info.")

elem_size = self._instance_vtype_accessor.get_type_size(subtype_info)
if elem_size != 1:
raise NotImplementedError(
f"Direct assignment to non-byte array field '{name}' is not supported."
)

count = resolved_info.get("count", 0)
if count <= 0:
return

# Compute array start in underlying buffer
start = self._instance_offset + field_offset
end = start + count

# Write like a C string: truncate, NUL-terminate, zero-fill
buf = self._instance_buffer
mv = memoryview(buf)

# zero-fill
mv[start:end] = b"\x00" * count

# copy payload
payload = data[: max(0, count - 1)]
mv[start : start + len(payload)] = payload

# Invalidate cache for this field if present
try:
del self._instance_cache[name]
except KeyError:
pass
return
self._write_data(resolved_info, resolved_obj, field_offset, new_value, name)

# Try/except is faster than checking 'in' for cache invalidation
Expand Down
Loading