diff --git a/tonsdk/boc/__init__.py b/tonsdk/boc/__init__.py index 7bf6bec..a69fabf 100644 --- a/tonsdk/boc/__init__.py +++ b/tonsdk/boc/__init__.py @@ -2,6 +2,7 @@ from ._builder import Builder, begin_cell from ._dict_builder import DictBuilder, begin_dict from ._slice import Slice +from ._string_utils import string_to_cell, cell_to_string, read_string_tail, write_string_tail __all__ = [ 'Cell', 'Slice', @@ -9,4 +10,5 @@ 'DictBuilder', 'begin_dict', 'deserialize_cell_data', 'parse_boc_header', + 'string_to_cell', 'cell_to_string', 'read_string_tail', 'write_string_tail' ] diff --git a/tonsdk/utils/_data.py b/tonsdk/utils/_data.py new file mode 100644 index 0000000..e044969 --- /dev/null +++ b/tonsdk/utils/_data.py @@ -0,0 +1,54 @@ +# https://github.com/ton-blockchain/TEPs/blob/master/text/0064-token-data-standard.md#data-serialization +from ctypes import Union + +from tonsdk.boc import begin_cell, Builder, Slice + + +class SnakeData: + prefix = 0x00 + prefix_len = 8 + + @classmethod + def write(cls, builder: Builder, data: Union[bytes, bytearray], prefixed=False): + if prefixed: + builder.store_uint(cls.prefix, cls.prefix_len) + + # todo: implement data serialization logic to a given builder (now data is a bytes sequence) + # implementation example + builders = [] + + while len(data) > 0: + max_bytes = builder.builder_rembits >> 3 + bits, data = data[:max_bytes], data[max_bytes:] + + builder.store_bytes(bits) + builders.append(builder) + + builder = begin_cell() + + if len(builders) > 1: + last_builder = builders[-1] + + for builder in reversed(builders[:-1]): + builder.store_ref(last_builder.end_cell()) + last_builder = builder + + return builders[0] + + @classmethod + def read(cls, cs: Slice, prefixed=False): + data = bytearray() + + if prefixed: + assert cs.preload_uint(cls.prefix_len) == cls.prefix + + while True: + data.extend(cs.load_bytes(cs.slice_bits >> 3)) + + if cs.slice_refs > 0: + cs = cs.load_ref().begin_parse() + continue + + break + + return data