diff --git a/conformance/src/txn_execute.zig b/conformance/src/txn_execute.zig index 3f984e0c79..b9f24ec742 100644 --- a/conformance/src/txn_execute.zig +++ b/conformance/src/txn_execute.zig @@ -387,7 +387,7 @@ fn executeTxnContext( .update_sysvar_deps = update_sysvar_deps, }, ); - try update_sysvar.updateClock(allocator, .{ + _ = try update_sysvar.updateClock(allocator, .{ .feature_set = &feature_set, .epoch_schedule = &epoch_schedule, .epoch_stakes = epoch_stakes_map.getPtr(epoch), @@ -611,7 +611,7 @@ fn executeTxnContext( .update_sysvar_deps = update_sysvar_deps, }, ); - try update_sysvar.updateClock(allocator, .{ + _ = try update_sysvar.updateClock(allocator, .{ .feature_set = &feature_set, .epoch_schedule = &epoch_schedule, .epoch_stakes = epoch_stakes_map.getPtr(epoch), diff --git a/src/cmd.zig b/src/cmd.zig index 4288a1847e..70d8953d71 100644 --- a/src/cmd.zig +++ b/src/cmd.zig @@ -1675,6 +1675,11 @@ fn validator( .account_reader = account_store.reader(), }); + try app_base.rpc_hooks.set(allocator, sig.rpc.hook_contexts.Ledger{ + .ledger = &ledger, + .slot_tracker = &replay_service_state.replay_state.slot_tracker, + }); + const replay_thread = try replay_service_state.spawnService( &app_base, if (maybe_vote_sockets) |*vs| vs else null, diff --git a/src/core/ReservedAccounts.zig b/src/core/ReservedAccounts.zig index a1d7c5a7d3..b7f1363c75 100644 --- a/src/core/ReservedAccounts.zig +++ b/src/core/ReservedAccounts.zig @@ -37,10 +37,19 @@ pub fn initForSlot( slot: Slot, ) Allocator.Error!ReservedAccounts { var reserved_accounts = try init(allocator); + errdefer reserved_accounts.deinit(allocator); reserved_accounts.update(feature_set, slot); return reserved_accounts; } +pub fn initAllActivated(allocator: Allocator) Allocator.Error!ReservedAccounts { + var reserved_accounts = ReservedAccounts{ .map = .empty }; + errdefer reserved_accounts.deinit(allocator); + try reserved_accounts.map.ensureTotalCapacity(allocator, ACCOUNTS.len); + for (ACCOUNTS) |account| reserved_accounts.map.putAssumeCapacity(account.pubkey, {}); + return reserved_accounts; +} + pub fn update( self: *ReservedAccounts, feature_set: *const FeatureSet, @@ -55,7 +64,7 @@ pub fn update( } } -const ACCOUNTS: []const struct { pubkey: Pubkey, feature: ?Feature } = &.{ +const ACCOUNTS = [_]struct { pubkey: Pubkey, feature: ?Feature }{ // zig fmt: off .{ .pubkey = sig.runtime.program.address_lookup_table.ID, .feature = .add_new_reserved_account_keys }, .{ .pubkey = sig.runtime.program.bpf_loader.v1.ID, .feature = null }, diff --git a/src/core/features.zon b/src/core/features.zon index 6a9510268e..8081937056 100644 --- a/src/core/features.zon +++ b/src/core/features.zon @@ -253,4 +253,5 @@ .{ .name = "increase_cpi_account_info_limit", .pubkey = "H6iVbVaDZgDphcPbcZwc5LoznMPWQfnJ1AM7L1xzqvt5" }, .{ .name = "vote_state_v4", .pubkey = "Gx4XFcrVMt4HUvPzTpTSVkdDVgcDSjKhDN1RqRS6KDuZ" }, .{ .name = "enable_bls12_381_syscall", .pubkey = "b1sraWPVFdcUizB2LV5wQTeMuK8M313bi5bHjco5eVU" }, + .{ .name = "remove_simple_vote_from_cost_model", .pubkey = "2GCrNXbzmt4xrwdcKS2RdsLzsgu4V5zHAemW57pcHT6a" }, } diff --git a/src/core/hash.zig b/src/core/hash.zig index fbbbda9fc9..32c8363689 100644 --- a/src/core/hash.zig +++ b/src/core/hash.zig @@ -125,6 +125,10 @@ pub const Hash = extern struct { }; } + pub fn jsonStringify(self: Hash, write_stream: anytype) !void { + try write_stream.write(self.base58String().constSlice()); + } + /// Intended to be used in tests. pub fn initRandom(random: std.Random) Hash { var data: [SIZE]u8 = undefined; diff --git a/src/core/transaction.zig b/src/core/transaction.zig index 4de12429d4..8c2e8cb6f3 100644 --- a/src/core/transaction.zig +++ b/src/core/transaction.zig @@ -419,9 +419,13 @@ pub const Message = struct { pub fn isWritable( self: Message, index: usize, - lookups: LookupTableAccounts, + maybe_lookups: ?LookupTableAccounts, reserved_accounts: *const ReservedAccounts, ) bool { + const lookups = maybe_lookups orelse LookupTableAccounts{ + .writable = &.{}, + .readonly = &.{}, + }; const pubkey = blk: { if (index < self.account_keys.len) { if (index >= self.signature_count) { diff --git a/src/ledger/Ledger.zig b/src/ledger/Ledger.zig index 128c01ac1c..de9b223f61 100644 --- a/src/ledger/Ledger.zig +++ b/src/ledger/Ledger.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const builtin = @import("builtin"); const sig = @import("../sig.zig"); const lib = @import("lib.zig"); @@ -53,6 +54,21 @@ pub fn init( }; } +pub fn initForTest( + allocator: Allocator, +) !struct { Ledger, std.testing.TmpDir } { + if (!builtin.is_test) @compileError("only used in tests"); + var tmp = std.testing.tmpDir(.{}); + try tmp.dir.makeDir("ledger"); + const path = try tmp.dir.realpathAlloc(allocator, "ledger"); + defer allocator.free(path); + + return .{ + try Ledger.init(allocator, .FOR_TESTS, path, null), + tmp, + }; +} + pub fn reader(self: *Ledger) Reader { return .{ .ledger = self, diff --git a/src/ledger/Reader.zig b/src/ledger/Reader.zig index 14416f405d..bc09462ef9 100644 --- a/src/ledger/Reader.zig +++ b/src/ledger/Reader.zig @@ -1519,7 +1519,6 @@ pub const VersionedConfirmedBlock = struct { pub fn deinit(self: @This(), allocator: Allocator) void { for (self.transactions) |it| it.deinit(allocator); - for (self.rewards) |it| it.deinit(allocator); allocator.free(self.transactions); allocator.free(self.rewards); } @@ -1548,7 +1547,7 @@ const ConfirmedTransactionWithStatusMeta = struct { block_time: ?UnixTimestamp, }; -const TransactionWithStatusMeta = union(enum) { +pub const TransactionWithStatusMeta = union(enum) { // Very old transactions may be missing metadata missing_metadata: Transaction, // Versioned stored transaction always have metadata @@ -2498,6 +2497,7 @@ test getTransactionStatus { .loaded_addresses = .{}, .return_data = .{}, .compute_units_consumed = 1000, + .cost_units = null, }; // insert transaction status and root it @@ -2689,6 +2689,7 @@ test getConfirmedSignaturesForAddress { .loaded_addresses = .{}, .return_data = .{}, .compute_units_consumed = 1000, + .cost_units = null, }; try write_batch.put(schema.transaction_status, .{ sig1, slot }, status_meta); diff --git a/src/ledger/ResultWriter.zig b/src/ledger/ResultWriter.zig index 7b5604c53d..1edd5cb8b4 100644 --- a/src/ledger/ResultWriter.zig +++ b/src/ledger/ResultWriter.zig @@ -37,15 +37,18 @@ pub fn writeTransactionStatus( self: *const ResultWriter, slot: Slot, signature: Signature, - writeable_keys: ArrayList(Pubkey), - readonly_keys: ArrayList(Pubkey), + writeable_keys: []const Pubkey, + readonly_keys: []const Pubkey, status: TransactionStatusMeta, transaction_index: usize, ) !void { - try self.ledger.db.put(schema.transaction_status, .{ signature, slot }, status); + var write_batch = try self.ledger.db.initWriteBatch(); + defer write_batch.deinit(); + + try write_batch.put(schema.transaction_status, .{ signature, slot }, status); inline for (.{ writeable_keys, readonly_keys }, .{ true, false }) |keys, writeable| { - for (keys.items) |address| { - try self.ledger.db.put( + for (keys) |address| { + try write_batch.put( schema.address_signatures, .{ .address = address, @@ -57,6 +60,8 @@ pub fn writeTransactionStatus( ); } } + + try self.ledger.db.commit(&write_batch); } /// agave: insert_bank_hash diff --git a/src/ledger/benchmarks.zig b/src/ledger/benchmarks.zig index fa78f3a548..af20ac441e 100644 --- a/src/ledger/benchmarks.zig +++ b/src/ledger/benchmarks.zig @@ -19,7 +19,7 @@ fn createRewards(allocator: std.mem.Allocator, count: usize) !Rewards { var rewards: Rewards = Rewards.init(allocator); for (0..count) |i| { try rewards.append(Reward{ - .pubkey = &Pubkey.initRandom(rand).data, + .pubkey = Pubkey.initRandom(rand), .lamports = @intCast(42 + i), .post_balance = std.math.maxInt(u64), .reward_type = RewardType.fee, @@ -160,7 +160,7 @@ pub const BenchmarkLedger = struct { var indices = try std.array_list.Managed(u32).initCapacity(allocator, num_reads); defer indices.deinit(); for (0..num_reads) |_| { - indices.appendAssumeCapacity(rng.random().uintAtMost(u32, @intCast(total_shreds))); + indices.appendAssumeCapacity(rng.random().uintAtMost(u32, @intCast(total_shreds - 1))); } const reader = state.reader(); @@ -247,7 +247,7 @@ pub const BenchmarkLedger = struct { var indices = try std.array_list.Managed(u32).initCapacity(allocator, total_shreds); defer indices.deinit(); for (0..total_shreds) |_| { - indices.appendAssumeCapacity(rng.random().uintAtMost(u32, @intCast(total_shreds))); + indices.appendAssumeCapacity(rng.random().uintAtMost(u32, @intCast(total_shreds - 1))); } const reader = state.reader(); @@ -347,8 +347,8 @@ pub const BenchmarkLedger = struct { _ = try result_writer.writeTransactionStatus( slot, signature, - w_keys, - r_keys, + w_keys.items, + r_keys.items, status, tx_idx, ); diff --git a/src/ledger/tests.zig b/src/ledger/tests.zig index 73326d0212..00b4106068 100644 --- a/src/ledger/tests.zig +++ b/src/ledger/tests.zig @@ -416,6 +416,7 @@ pub fn insertDataForBlockTest( .loaded_addresses = .{}, .return_data = .{}, .compute_units_consumed = compute_units_consumed, + .cost_units = null, }; try db.put(schema.transaction_status, .{ signature, slot }, status); try db.put(schema.transaction_status, .{ signature, slot + 1 }, status); @@ -435,6 +436,7 @@ pub fn insertDataForBlockTest( .loaded_addresses = .{}, .return_data = .{}, .compute_units_consumed = compute_units_consumed, + .cost_units = null, }, }); } diff --git a/src/ledger/transaction_status.zig b/src/ledger/transaction_status.zig index 75c100f04d..19961dbe40 100644 --- a/src/ledger/transaction_status.zig +++ b/src/ledger/transaction_status.zig @@ -3,6 +3,7 @@ const sig = @import("../sig.zig"); const Allocator = std.mem.Allocator; const InstructionErrorEnum = sig.core.instruction.InstructionErrorEnum; +const Pubkey = sig.core.Pubkey; const RewardType = sig.replay.rewards.RewardType; pub const TransactionStatusMeta = struct { @@ -30,6 +31,10 @@ pub const TransactionStatusMeta = struct { return_data: ?TransactionReturnData, /// The amount of BPF instructions that were executed in order to complete this transaction. compute_units_consumed: ?u64, + /// The total cost units for this transaction, used for block scheduling/packing. + /// This is the sum of: signature_cost + write_lock_cost + data_bytes_cost + + /// programs_execution_cost + loaded_accounts_data_size_cost. + cost_units: ?u64, pub const EMPTY_FOR_TEST = TransactionStatusMeta{ .status = null, @@ -44,23 +49,24 @@ pub const TransactionStatusMeta = struct { .loaded_addresses = .{}, .return_data = null, .compute_units_consumed = null, + .cost_units = null, }; pub fn deinit(self: @This(), allocator: Allocator) void { allocator.free(self.pre_balances); allocator.free(self.post_balances); if (self.log_messages) |log_messages| allocator.free(log_messages); - inline for (.{ - self.inner_instructions, - self.pre_token_balances, - self.post_token_balances, - self.rewards, - }) |maybe_slice| { + if (self.inner_instructions) |inner| { + for (inner) |item| item.deinit(allocator); + allocator.free(inner); + } + inline for (.{ self.pre_token_balances, self.post_token_balances }) |maybe_slice| { if (maybe_slice) |slice| { for (slice) |item| item.deinit(allocator); allocator.free(slice); } } + if (self.rewards) |rewards| allocator.free(rewards); self.loaded_addresses.deinit(allocator); if (self.return_data) |it| it.deinit(allocator); } @@ -105,16 +111,13 @@ pub const CompiledInstruction = struct { pub const TransactionTokenBalance = struct { account_index: u8, - mint: []const u8, + mint: Pubkey, ui_token_amount: UiTokenAmount, - owner: []const u8, - program_id: []const u8, + owner: Pubkey, + program_id: Pubkey, pub fn deinit(self: @This(), allocator: Allocator) void { self.ui_token_amount.deinit(allocator); - allocator.free(self.mint); - allocator.free(self.owner); - allocator.free(self.program_id); } }; @@ -133,24 +136,20 @@ pub const UiTokenAmount = struct { pub const Rewards = std.array_list.Managed(Reward); pub const Reward = struct { - pubkey: []const u8, + pubkey: Pubkey, lamports: i64, /// Account balance in lamports after `lamports` was applied post_balance: u64, reward_type: ?RewardType, /// Vote account commission when the reward was credited, only present for voting and staking rewards commission: ?u8, - - pub fn deinit(self: @This(), allocator: Allocator) void { - allocator.free(self.pubkey); - } }; pub const LoadedAddresses = struct { /// List of addresses for writable loaded accounts - writable: []const sig.core.Pubkey = &.{}, + writable: []const Pubkey = &.{}, /// List of addresses for read-only loaded accounts - readonly: []const sig.core.Pubkey = &.{}, + readonly: []const Pubkey = &.{}, pub fn deinit(self: @This(), allocator: Allocator) void { allocator.free(self.writable); @@ -159,7 +158,7 @@ pub const LoadedAddresses = struct { }; pub const TransactionReturnData = struct { - program_id: sig.core.Pubkey = sig.core.Pubkey.ZEROES, + program_id: Pubkey = Pubkey.ZEROES, data: []const u8 = &.{}, pub fn deinit(self: @This(), allocator: Allocator) void { @@ -167,6 +166,239 @@ pub const TransactionReturnData = struct { } }; +/// Builder for creating TransactionStatusMeta from execution results. +/// This is used by the replay system to persist transaction status metadata +/// to the ledger for RPC queries like getBlock and getTransaction. +pub const TransactionStatusMetaBuilder = struct { + const runtime = sig.runtime; + const TransactionContext = runtime.transaction_context.TransactionContext; + const LogCollector = runtime.LogCollector; + const InstructionTrace = TransactionContext.InstructionTrace; + const RuntimeInstructionInfo = runtime.InstructionInfo; + const RuntimeTransactionReturnData = runtime.transaction_context.TransactionReturnData; + const ProcessedTransaction = runtime.transaction_execution.ProcessedTransaction; + const ExecutedTransaction = runtime.transaction_execution.ExecutedTransaction; + + /// Build TransactionStatusMeta from a ProcessedTransaction and pre-captured balances. + /// + /// Arguments: + /// - allocator: Used to allocate the returned slices (caller owns the memory) + /// - processed_tx: The result of transaction execution + /// - pre_balances: Lamport balances of accounts before execution (caller must capture these) + /// - post_balances: Lamport balances of accounts after execution (caller must capture these) + /// - loaded_addresses: Addresses loaded from address lookup tables + /// - pre_token_balances: SPL Token balances before execution (optional) + /// - post_token_balances: SPL Token balances after execution (optional) + /// + /// Returns owned TransactionStatusMeta that must be freed with deinit(). + pub fn build( + allocator: Allocator, + processed_tx: ProcessedTransaction, + pre_balances: []const u64, + post_balances: []const u64, + loaded_addresses: LoadedAddresses, + pre_token_balances: ?[]const TransactionTokenBalance, + post_token_balances: ?[]const TransactionTokenBalance, + ) error{OutOfMemory}!TransactionStatusMeta { + // Convert log messages from LogCollector + const log_messages: ?[]const []const u8 = if (processed_tx.outputs) |outputs| blk: { + if (outputs.log_collector) |log_collector| { + break :blk try extractLogMessages(allocator, log_collector); + } + break :blk null; + } else null; + errdefer if (log_messages) |logs| allocator.free(logs); + + // Convert inner instructions from InstructionTrace + const inner_instructions = if (processed_tx.outputs) |outputs| blk: { + if (outputs.instruction_trace) |trace| { + break :blk try convertInstructionTrace(allocator, trace); + } + break :blk null; + } else null; + errdefer if (inner_instructions) |inner| { + for (inner) |item| item.deinit(allocator); + allocator.free(inner); + }; + + // Convert return data + const return_data: ?TransactionReturnData = if (processed_tx.outputs) |outputs| blk: { + if (outputs.return_data) |rd| { + break :blk try convertReturnData(allocator, rd); + } + break :blk null; + } else null; + errdefer if (return_data) |rd| rd.deinit(allocator); + + // Calculate compute units consumed + const compute_units_consumed: ?u64 = if (processed_tx.outputs) |outputs| + outputs.compute_limit - outputs.compute_meter + else + null; + + // Copy balances (caller provided these, we need to own them) + const owned_pre_balances = try allocator.dupe(u64, pre_balances); + errdefer allocator.free(owned_pre_balances); + + const owned_post_balances = try allocator.dupe(u64, post_balances); + errdefer allocator.free(owned_post_balances); + + // Copy loaded addresses + const writable = try allocator.dupe(Pubkey, loaded_addresses.writable); + errdefer allocator.free(writable); + const readonly = try allocator.dupe(Pubkey, loaded_addresses.readonly); + errdefer allocator.free(readonly); + const owned_loaded_addresses = LoadedAddresses{ + .writable = writable, + .readonly = readonly, + }; + + return TransactionStatusMeta{ + .status = processed_tx.err, + .fee = processed_tx.fees.total(), + .pre_balances = owned_pre_balances, + .post_balances = owned_post_balances, + .inner_instructions = inner_instructions, + .log_messages = log_messages, + .pre_token_balances = pre_token_balances, + .post_token_balances = post_token_balances, + // NOTE: rewards are not populated at all by agave + // [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/rpc/src/transaction_status_service.rs#L190 + .rewards = null, + .loaded_addresses = owned_loaded_addresses, + .return_data = return_data, + .compute_units_consumed = compute_units_consumed, + .cost_units = processed_tx.cost_units, + }; + } + + /// Extract log messages from a LogCollector. Returns a caller-owned slice + /// of string slices that point into the LogCollector's memory. + fn extractLogMessages( + allocator: Allocator, + log_collector: LogCollector, + ) error{OutOfMemory}![]const []const u8 { + // Count messages first + var count: usize = 0; + var iter = log_collector.iterator(); + while (iter.next()) |_| { + count += 1; + } + + if (count == 0) return &.{}; + + const messages = try allocator.alloc([]const u8, count); + errdefer allocator.free(messages); + + iter = log_collector.iterator(); + var i: usize = 0; + while (iter.next()) |msg| : (i += 1) { + // The log collector returns sentinel-terminated strings, we just store the slice + messages[i] = msg; + } + + return messages; + } + + /// Convert InstructionTrace to InnerInstructions array. + /// The trace contains all CPI calls; we need to group them by top-level instruction index. + fn convertInstructionTrace( + allocator: Allocator, + trace: InstructionTrace, + ) error{OutOfMemory}![]const InnerInstructions { + if (trace.len == 0) return &.{}; + + // Group instructions by their top-level instruction index (depth == 1 starts a new group) + // Instructions at depth > 1 are inner instructions of the most recent depth == 1 instruction + + var result = try std.ArrayList(InnerInstructions).initCapacity(allocator, trace.len); + errdefer { + for (result.items) |item| item.deinit(allocator); + result.deinit(allocator); + } + + var current_inner = try std.ArrayList(InnerInstruction).initCapacity(allocator, trace.len); + defer { + for (current_inner.items) |ix| ix.deinit(allocator); + current_inner.deinit(allocator); + } + + var current_top_level_index: u8 = 0; + var top_level_count: u8 = 0; + var has_top_level: bool = false; + + for (trace.slice()) |entry| { + if (entry.depth == 1) { + // This is a top-level instruction - flush previous group if any + if (has_top_level and current_inner.items.len > 0) { + try result.append(allocator, InnerInstructions{ + .index = current_top_level_index, + .instructions = try current_inner.toOwnedSlice(allocator), + }); + } + current_inner.clearRetainingCapacity(); + current_top_level_index = top_level_count; + top_level_count += 1; + has_top_level = true; + } else if (entry.depth > 1) { + // This is an inner instruction (CPI) + const inner = try convertToInnerInstruction(allocator, entry.ixn_info, entry.depth); + errdefer inner.deinit(allocator); + try current_inner.append(allocator, inner); + } + } + + // Flush final group + if (has_top_level and current_inner.items.len > 0) { + try result.append(allocator, InnerInstructions{ + .index = current_top_level_index, + .instructions = try current_inner.toOwnedSlice(allocator), + }); + } + + return try result.toOwnedSlice(allocator); + } + + /// Convert a single instruction from InstructionInfo to InnerInstruction format. + fn convertToInnerInstruction( + allocator: Allocator, + ixn_info: RuntimeInstructionInfo, + depth: u8, + ) error{OutOfMemory}!InnerInstruction { + // Build account indices array + const accounts = try allocator.alloc(u8, ixn_info.account_metas.items.len); + errdefer allocator.free(accounts); + + for (ixn_info.account_metas.items, 0..) |meta, i| { + accounts[i] = @intCast(meta.index_in_transaction); + } + + // Copy instruction data + const data = try allocator.dupe(u8, ixn_info.instruction_data); + errdefer allocator.free(data); + + return InnerInstruction{ + .instruction = CompiledInstruction{ + .program_id_index = @intCast(ixn_info.program_meta.index_in_transaction), + .accounts = accounts, + .data = data, + }, + .stack_height = depth, + }; + } + + /// Convert runtime TransactionReturnData to ledger TransactionReturnData. + fn convertReturnData( + allocator: Allocator, + rd: RuntimeTransactionReturnData, + ) error{OutOfMemory}!TransactionReturnData { + return TransactionReturnData{ + .program_id = rd.program_id, + .data = try allocator.dupe(u8, rd.data.slice()), + }; + } +}; + pub const TransactionError = union(enum(u32)) { /// An account is already being processed in another transaction in a way /// that does not support parallelism @@ -295,4 +527,530 @@ pub const TransactionError = union(enum(u32)) { else => {}, } } + + /// Serialize to JSON matching Agave's serde format for UiTransactionError. + /// - Unit variants: "VariantName" + /// - Tuple variants: {"VariantName": value} + /// - Struct variants: {"VariantName": {"field": value}} + /// - InstructionError: {"InstructionError": [index, error]} + pub fn jsonStringify(self: @This(), jw: anytype) !void { + switch (self) { + .InstructionError => |payload| { + try jw.beginObject(); + try jw.objectField("InstructionError"); + try jw.beginArray(); + try jw.write(payload.@"0"); + switch (payload.@"1") { + .BorshIoError => try jw.write("BorshIoError"), + inline else => |inner_payload, tag| { + if (@TypeOf(inner_payload) == void) { + try jw.write(@tagName(tag)); + } else { + try jw.beginObject(); + try jw.objectField(@tagName(tag)); + try jw.write(inner_payload); + try jw.endObject(); + } + }, + } + try jw.endArray(); + try jw.endObject(); + }, + inline else => |payload, tag| { + if (@TypeOf(payload) == void) { + try jw.write(@tagName(tag)); + } else { + try jw.beginObject(); + try jw.objectField(@tagName(tag)); + try jw.write(payload); + try jw.endObject(); + } + }, + } + } }; + +test "TransactionError jsonStringify" { + const expectJsonStringify = struct { + fn run(expected: []const u8, value: TransactionError) !void { + const actual = try std.json.Stringify.valueAlloc(std.testing.allocator, value, .{}); + defer std.testing.allocator.free(actual); + try std.testing.expectEqualStrings(expected, actual); + } + }.run; + + // InstructionError with Custom inner error (matches Agave test) + try expectJsonStringify( + \\{"InstructionError":[42,{"Custom":3735928559}]} + , + .{ .InstructionError = .{ 42, .{ .Custom = 0xdeadbeef } } }, + ); + + // Struct variant: InsufficientFundsForRent (matches Agave test) + try expectJsonStringify( + \\{"InsufficientFundsForRent":{"account_index":42}} + , + .{ .InsufficientFundsForRent = .{ .account_index = 42 } }, + ); + + // Single-value tuple variant: DuplicateInstruction (matches Agave test) + try expectJsonStringify( + \\{"DuplicateInstruction":42} + , + .{ .DuplicateInstruction = 42 }, + ); + + // Unit variant (matches Agave test) + try expectJsonStringify( + \\"InsufficientFundsForFee" + , + .InsufficientFundsForFee, + ); + + // InstructionError with BorshIoError (serialized as unit variant per Agave v3) + try expectJsonStringify( + \\{"InstructionError":[0,"BorshIoError"]} + , + .{ .InstructionError = .{ 0, .{ .BorshIoError = @constCast("Unknown") } } }, + ); + + // Additional unit variants (Agave compatibility) + try expectJsonStringify( + \\"AccountInUse" + , + .AccountInUse, + ); + try expectJsonStringify( + \\"AccountNotFound" + , + .AccountNotFound, + ); + try expectJsonStringify( + \\"ProgramAccountNotFound" + , + .ProgramAccountNotFound, + ); + + // Struct variant: ProgramExecutionTemporarilyRestricted (matches Agave format) + try expectJsonStringify( + \\{"ProgramExecutionTemporarilyRestricted":{"account_index":7}} + , + .{ .ProgramExecutionTemporarilyRestricted = .{ .account_index = 7 } }, + ); + + // InstructionError with void inner error (e.g. GenericError has no payload) + try expectJsonStringify( + \\{"InstructionError":[1,"GenericError"]} + , + .{ .InstructionError = .{ 1, .GenericError } }, + ); + + // InstructionError with ComputationalBudgetExceeded (void inner) + try expectJsonStringify( + \\{"InstructionError":[0,"ComputationalBudgetExceeded"]} + , + .{ .InstructionError = .{ 0, .ComputationalBudgetExceeded } }, + ); +} + +test "TransactionStatusMetaBuilder.extractLogMessages" { + const allocator = std.testing.allocator; + const LogCollector = sig.runtime.LogCollector; + + // Test with messages + { + var log_collector = try LogCollector.init(allocator, 10_000); + defer log_collector.deinit(allocator); + + try log_collector.log(allocator, "Program log: Hello", .{}); + try log_collector.log(allocator, "Program consumed {d} CUs", .{@as(u64, 5000)}); + + const messages = try TransactionStatusMetaBuilder.extractLogMessages( + allocator, + log_collector, + ); + defer allocator.free(messages); + + try std.testing.expectEqual(@as(usize, 2), messages.len); + try std.testing.expectEqualStrings("Program log: Hello", messages[0]); + try std.testing.expectEqualStrings("Program consumed 5000 CUs", messages[1]); + } + + // Test with empty log collector + { + var log_collector = try LogCollector.init(allocator, 10_000); + defer log_collector.deinit(allocator); + + const messages = try TransactionStatusMetaBuilder.extractLogMessages( + allocator, + log_collector, + ); + // Empty returns a static empty slice, no need to free + try std.testing.expectEqual(@as(usize, 0), messages.len); + } +} + +test "TransactionStatusMetaBuilder.convertReturnData" { + const allocator = std.testing.allocator; + const RuntimeReturnData = sig.runtime.transaction_context.TransactionReturnData; + + const program_id = Pubkey{ .data = [_]u8{0xAB} ** 32 }; + var rt_return_data = RuntimeReturnData{ + .program_id = program_id, + }; + // Add some data to the bounded array + rt_return_data.data.appendSliceAssumeCapacity("hello world"); + + const result = try TransactionStatusMetaBuilder.convertReturnData(allocator, rt_return_data); + defer result.deinit(allocator); + + try std.testing.expect(result.program_id.equals(&program_id)); + try std.testing.expectEqualStrings("hello world", result.data); +} + +test "TransactionStatusMetaBuilder.convertToInnerInstruction" { + const allocator = std.testing.allocator; + const InstructionInfo = sig.runtime.InstructionInfo; + + // Build a mock InstructionInfo + var account_metas: InstructionInfo.AccountMetas = .{}; + try account_metas.append(allocator, .{ + .pubkey = Pubkey{ .data = [_]u8{0x11} ** 32 }, + .index_in_transaction = 3, + .is_signer = true, + .is_writable = true, + }); + try account_metas.append(allocator, .{ + .pubkey = Pubkey{ .data = [_]u8{0x22} ** 32 }, + .index_in_transaction = 7, + .is_signer = false, + .is_writable = false, + }); + defer account_metas.deinit(allocator); + + const ixn_info = InstructionInfo{ + .program_meta = .{ + .pubkey = Pubkey{ .data = [_]u8{0xFF} ** 32 }, + .index_in_transaction = 5, + }, + .account_metas = account_metas, + .dedupe_map = @splat(0xff), + .instruction_data = &[_]u8{ 0x01, 0x02, 0x03 }, + .owned_instruction_data = false, + }; + + const inner = try TransactionStatusMetaBuilder.convertToInnerInstruction(allocator, ixn_info, 2); + defer inner.deinit(allocator); + + // Check program_id_index maps to the program's index_in_transaction + try std.testing.expectEqual(@as(u8, 5), inner.instruction.program_id_index); + // Check account indices + try std.testing.expectEqual(@as(usize, 2), inner.instruction.accounts.len); + try std.testing.expectEqual(@as(u8, 3), inner.instruction.accounts[0]); + try std.testing.expectEqual(@as(u8, 7), inner.instruction.accounts[1]); + // Check instruction data is copied + try std.testing.expectEqualSlices(u8, &[_]u8{ 0x01, 0x02, 0x03 }, inner.instruction.data); + // Check stack height + try std.testing.expectEqual(@as(?u32, 2), inner.stack_height); +} + +test "TransactionStatusMetaBuilder.convertInstructionTrace" { + const allocator = std.testing.allocator; + const InstructionInfo = sig.runtime.InstructionInfo; + const TransactionContext = sig.runtime.transaction_context.TransactionContext; + const InstructionTrace = TransactionContext.InstructionTrace; + + // Create a trace with 2 top-level instructions, where the second has a CPI + var trace = InstructionTrace{}; + + // Top-level instruction 0 (no inner instructions) + const metas0: InstructionInfo.AccountMetas = .{}; + trace.appendAssumeCapacity(.{ + .depth = 1, + .ixn_info = .{ + .program_meta = .{ .pubkey = Pubkey.ZEROES, .index_in_transaction = 0 }, + .account_metas = metas0, + .dedupe_map = @splat(0xff), + .instruction_data = &.{}, + .owned_instruction_data = false, + }, + }); + + // Top-level instruction 1 + const metas1: InstructionInfo.AccountMetas = .{}; + trace.appendAssumeCapacity(.{ + .depth = 1, + .ixn_info = .{ + .program_meta = .{ .pubkey = Pubkey.ZEROES, .index_in_transaction = 1 }, + .account_metas = metas1, + .dedupe_map = @splat(0xff), + .instruction_data = &.{}, + .owned_instruction_data = false, + }, + }); + + // CPI from instruction 1 (depth=2) + var metas2: InstructionInfo.AccountMetas = .{}; + try metas2.append(allocator, .{ + .pubkey = Pubkey{ .data = [_]u8{0xAA} ** 32 }, + .index_in_transaction = 4, + .is_signer = false, + .is_writable = true, + }); + defer metas2.deinit(allocator); + + trace.appendAssumeCapacity(.{ + .depth = 2, + .ixn_info = .{ + .program_meta = .{ .pubkey = Pubkey.ZEROES, .index_in_transaction = 2 }, + .account_metas = metas2, + .dedupe_map = @splat(0xff), + .instruction_data = &[_]u8{0x42}, + .owned_instruction_data = false, + }, + }); + + const result = try TransactionStatusMetaBuilder.convertInstructionTrace(allocator, trace); + defer { + for (result) |item| item.deinit(allocator); + allocator.free(result); + } + + // Only the second top-level has inner instructions (the CPI). + // The index should be 1, matching the top-level instruction position. + try std.testing.expectEqual(@as(usize, 1), result.len); + try std.testing.expectEqual(@as(u8, 1), result[0].index); + try std.testing.expectEqual(@as(usize, 1), result[0].instructions.len); + try std.testing.expectEqual(@as(u8, 2), result[0].instructions[0].instruction.program_id_index); + try std.testing.expectEqual(@as(?u32, 2), result[0].instructions[0].stack_height); + try std.testing.expectEqualSlices(u8, &[_]u8{0x42}, result[0].instructions[0].instruction.data); +} + +test "TransactionStatusMetaBuilder.convertInstructionTrace - empty trace" { + const allocator = std.testing.allocator; + const TransactionContext = sig.runtime.transaction_context.TransactionContext; + const InstructionTrace = TransactionContext.InstructionTrace; + + const trace = InstructionTrace{}; + const result = try TransactionStatusMetaBuilder.convertInstructionTrace(allocator, trace); + // Empty trace returns static empty slice + try std.testing.expectEqual(@as(usize, 0), result.len); +} + +test "TransactionStatusMetaBuilder.build - successful transaction" { + const allocator = std.testing.allocator; + const LogCollector = sig.runtime.LogCollector; + const TransactionContext = sig.runtime.transaction_context.TransactionContext; + const RuntimeReturnData = sig.runtime.transaction_context.TransactionReturnData; + const ExecutedTransaction = sig.runtime.transaction_execution.ExecutedTransaction; + const ProcessedTransaction = sig.runtime.transaction_execution.ProcessedTransaction; + + // Create LogCollector - defer must come before status defer so log_collector + // outlives the status (log messages point into the collector's pool). + var log_collector = try LogCollector.init(allocator, 10_000); + defer log_collector.deinit(allocator); + try log_collector.log(allocator, "Program log: success", .{}); + + // Create return data + var return_data = RuntimeReturnData{ .program_id = Pubkey{ .data = [_]u8{0xDD} ** 32 } }; + return_data.data.appendSliceAssumeCapacity("result"); + + const processed = ProcessedTransaction{ + .fees = .{ .transaction_fee = 5_000, .prioritization_fee = 1_000 }, + .rent = 0, + .writes = .{}, + .err = null, + .loaded_accounts_data_size = 0, + .outputs = ExecutedTransaction{ + .err = null, + .log_collector = log_collector, + .instruction_trace = TransactionContext.InstructionTrace{}, + .return_data = return_data, + .compute_limit = 200_000, + .compute_meter = 150_000, + .accounts_data_len_delta = 0, + }, + .pre_balances = .{}, + .pre_token_balances = .{}, + .cost_units = 42_000, + }; + + const pre_balances = [_]u64{ 1_000_000, 500_000 }; + const post_balances = [_]u64{ 995_000, 505_000 }; + + const status = try TransactionStatusMetaBuilder.build( + allocator, + processed, + &pre_balances, + &post_balances, + .{}, + null, + null, + ); + defer status.deinit(allocator); + + // Verify fee (5000 + 1000 = 6000) + try std.testing.expectEqual(@as(u64, 6_000), status.fee); + // Verify no error + try std.testing.expectEqual(@as(?TransactionError, null), status.status); + // Verify balances were copied + try std.testing.expectEqual(@as(usize, 2), status.pre_balances.len); + try std.testing.expectEqual(@as(u64, 1_000_000), status.pre_balances[0]); + try std.testing.expectEqual(@as(usize, 2), status.post_balances.len); + try std.testing.expectEqual(@as(u64, 505_000), status.post_balances[1]); + // Verify log messages were extracted + try std.testing.expect(status.log_messages != null); + try std.testing.expectEqual(@as(usize, 1), status.log_messages.?.len); + try std.testing.expectEqualStrings("Program log: success", status.log_messages.?[0]); + // Verify return data was converted + try std.testing.expect(status.return_data != null); + try std.testing.expectEqualStrings("result", status.return_data.?.data); + try std.testing.expect( + status.return_data.?.program_id.equals(&Pubkey{ .data = [_]u8{0xDD} ** 32 }), + ); + // Verify compute units consumed (200_000 - 150_000 = 50_000) + try std.testing.expectEqual(@as(?u64, 50_000), status.compute_units_consumed); + // Verify cost_units + try std.testing.expectEqual(@as(?u64, 42_000), status.cost_units); +} + +test "TransactionStatusMetaBuilder.build - transaction with no outputs" { + const allocator = std.testing.allocator; + const ProcessedTransaction = sig.runtime.transaction_execution.ProcessedTransaction; + + // A transaction that failed before execution (no outputs) + const processed = ProcessedTransaction{ + .fees = .{ .transaction_fee = 5_000, .prioritization_fee = 0 }, + .rent = 0, + .writes = .{}, + .err = .AccountNotFound, + .loaded_accounts_data_size = 0, + .outputs = null, + .pre_balances = .{}, + .pre_token_balances = .{}, + .cost_units = 0, + }; + + const pre_balances = [_]u64{1_000_000}; + const post_balances = [_]u64{1_000_000}; + + const status = try TransactionStatusMetaBuilder.build( + allocator, + processed, + &pre_balances, + &post_balances, + .{}, + null, + null, + ); + defer status.deinit(allocator); + + // Error should be set + try std.testing.expect(status.status != null); + // No log messages, inner instructions, return data, or compute units when outputs is null + try std.testing.expectEqual(@as(?[]const []const u8, null), status.log_messages); + try std.testing.expectEqual(@as(?[]const InnerInstructions, null), status.inner_instructions); + try std.testing.expectEqual(@as(?TransactionReturnData, null), status.return_data); + try std.testing.expectEqual(@as(?u64, null), status.compute_units_consumed); +} + +test "TransactionStatusMetaBuilder.build - outputs with null sub-fields" { + const allocator = std.testing.allocator; + const ExecutedTransaction = sig.runtime.transaction_execution.ExecutedTransaction; + const ProcessedTransaction = sig.runtime.transaction_execution.ProcessedTransaction; + + // Outputs exist but log_collector, instruction_trace, and return_data are all null + const processed = ProcessedTransaction{ + .fees = .{ .transaction_fee = 5_000, .prioritization_fee = 0 }, + .rent = 0, + .writes = .{}, + .err = null, + .loaded_accounts_data_size = 0, + .outputs = ExecutedTransaction{ + .err = null, + .log_collector = null, + .instruction_trace = null, + .return_data = null, + .compute_limit = 200_000, + .compute_meter = 100_000, + .accounts_data_len_delta = 0, + }, + .pre_balances = .{}, + .pre_token_balances = .{}, + .cost_units = 0, + }; + + const pre_balances = [_]u64{1_000_000}; + const post_balances = [_]u64{995_000}; + + const status = try TransactionStatusMetaBuilder.build( + allocator, + processed, + &pre_balances, + &post_balances, + .{}, + null, + null, + ); + defer status.deinit(allocator); + + // Outputs exist so compute_units_consumed should be calculated + try std.testing.expectEqual(@as(?u64, 100_000), status.compute_units_consumed); + // But sub-fields are null since their sources were null + try std.testing.expectEqual(@as(?[]const []const u8, null), status.log_messages); + try std.testing.expectEqual(@as(?[]const InnerInstructions, null), status.inner_instructions); + try std.testing.expectEqual(@as(?TransactionReturnData, null), status.return_data); +} + +test "TransactionStatusMetaBuilder.build - with loaded addresses" { + const allocator = std.testing.allocator; + const ExecutedTransaction = sig.runtime.transaction_execution.ExecutedTransaction; + const ProcessedTransaction = sig.runtime.transaction_execution.ProcessedTransaction; + + const processed = ProcessedTransaction{ + .fees = .{ .transaction_fee = 5_000, .prioritization_fee = 0 }, + .rent = 0, + .writes = .{}, + .err = null, + .loaded_accounts_data_size = 0, + .outputs = ExecutedTransaction{ + .err = null, + .log_collector = null, + .instruction_trace = null, + .return_data = null, + .compute_limit = 200_000, + .compute_meter = 200_000, + .accounts_data_len_delta = 0, + }, + .pre_balances = .{}, + .pre_token_balances = .{}, + .cost_units = 0, + }; + + const writable_keys = [_]Pubkey{Pubkey{ .data = [_]u8{0xAA} ** 32 }}; + const readonly_keys = [_]Pubkey{ + Pubkey{ .data = [_]u8{0xBB} ** 32 }, + Pubkey{ .data = [_]u8{0xCC} ** 32 }, + }; + + const status = try TransactionStatusMetaBuilder.build( + allocator, + processed, + &.{}, + &.{}, + .{ .writable = &writable_keys, .readonly = &readonly_keys }, + null, + null, + ); + defer status.deinit(allocator); + + // Loaded addresses should be copied + try std.testing.expectEqual(@as(usize, 1), status.loaded_addresses.writable.len); + try std.testing.expectEqual(@as(usize, 2), status.loaded_addresses.readonly.len); + try std.testing.expect( + status.loaded_addresses.writable[0].equals(&Pubkey{ .data = [_]u8{0xAA} ** 32 }), + ); + try std.testing.expect( + status.loaded_addresses.readonly[1].equals(&Pubkey{ .data = [_]u8{0xCC} ** 32 }), + ); +} diff --git a/src/replay/Committer.zig b/src/replay/Committer.zig index bf06a60c2f..296b5b1654 100644 --- a/src/replay/Committer.zig +++ b/src/replay/Committer.zig @@ -4,18 +4,27 @@ const replay = @import("lib.zig"); const tracy = @import("tracy"); const Allocator = std.mem.Allocator; +const ArrayListUnmanaged = std.ArrayListUnmanaged; const Channel = sig.sync.Channel; const Logger = sig.trace.Logger("replay.committer"); const Hash = sig.core.Hash; +const Pubkey = sig.core.Pubkey; const Slot = sig.core.Slot; const Transaction = sig.core.Transaction; const ResolvedTransaction = replay.resolve_lookup.ResolvedTransaction; +const Account = sig.core.Account; const LoadedAccount = sig.runtime.account_loader.LoadedAccount; const ProcessedTransaction = sig.runtime.transaction_execution.ProcessedTransaction; +const TransactionStatusMeta = sig.ledger.transaction_status.TransactionStatusMeta; +const TransactionStatusMetaBuilder = sig.ledger.transaction_status.TransactionStatusMetaBuilder; +const LoadedAddresses = sig.ledger.transaction_status.LoadedAddresses; +const Ledger = sig.ledger.Ledger; +const SlotAccountStore = sig.accounts_db.SlotAccountStore; +const spl_token = sig.runtime.spl_token; const ParsedVote = sig.consensus.vote_listener.vote_parser.ParsedVote; const parseSanitizedVoteTransaction = @@ -29,6 +38,10 @@ status_cache: *sig.core.StatusCache, stakes_cache: *sig.core.StakesCache, new_rate_activation_epoch: ?sig.core.Epoch, replay_votes_sender: ?*Channel(ParsedVote), +/// Ledger for persisting transaction status metadata (optional for backwards compatibility) +ledger: ?*Ledger, +/// Account store for looking up accounts (e.g. mint accounts for token balance resolution) +account_store: ?SlotAccountStore, pub fn commitTransactions( self: Committer, @@ -54,7 +67,7 @@ pub fn commitTransactions( var transaction_fees: u64 = 0; var priority_fees: u64 = 0; - for (transactions, tx_results) |transaction, *result| { + for (transactions, tx_results, 0..) |transaction, *result, transaction_index| { const message_hash = &result.@"0"; const tx_result = &result.@"1"; @@ -121,7 +134,19 @@ pub fn commitTransactions( slot, ); } - // NOTE: we'll need to store the actual status at some point, probably for rpc. + + // Write transaction status to ledger for RPC (getBlock, getTransaction) + if (self.ledger) |ledger| { + try writeTransactionStatus( + temp_allocator, + ledger, + slot, + transaction, + tx_result.*, + transaction_index, + self.account_store, + ); + } } _ = self.slot_state.collected_transaction_fees.fetchAdd(transaction_fees, .monotonic); @@ -140,6 +165,267 @@ pub fn commitTransactions( } } +/// Build and write TransactionStatusMeta to the ledger for a single transaction. +fn writeTransactionStatus( + allocator: Allocator, + ledger: *Ledger, + slot: Slot, + transaction: ResolvedTransaction, + tx_result: ProcessedTransaction, + transaction_index: usize, + account_store: ?SlotAccountStore, +) !void { + const status_write_zone = tracy.Zone.init(@src(), .{ .name = "writeTransactionStatus" }); + defer status_write_zone.deinit(); + + const signature = transaction.transaction.signatures[0]; + const num_accounts = transaction.accounts.len; + + // Use pre-balances captured during execution + // If pre_balances is empty (account loading failed), use zeros + const pre_balances = try allocator.alloc(u64, num_accounts); + defer allocator.free(pre_balances); + if (tx_result.pre_balances.len == num_accounts) { + @memcpy(pre_balances, tx_result.pre_balances.constSlice()); + } else { + // Account loading failed - pre-balances not available + @memset(pre_balances, 0); + } + + // Compute post-balances: start with pre-balances, then update from writes + var post_balances = try allocator.alloc(u64, num_accounts); + defer allocator.free(post_balances); + @memcpy(post_balances, pre_balances); + + // Update post-balances with values from written accounts + for (tx_result.writes.constSlice()) |*written_account| { + // Find the index of this account in the transaction + for (transaction.accounts.items(.pubkey), 0..) |pubkey, idx| { + if (pubkey.equals(&written_account.pubkey)) { + post_balances[idx] = written_account.account.lamports; + break; + } + } + } + + const num_static_addresses = transaction.transaction.msg.account_keys.len; + + // Count loaded addresses + var num_loaded_writable: usize = 0; + var num_loaded_readonly: usize = 0; + for (transaction.transaction.msg.address_lookups) |lookup| { + num_loaded_writable += lookup.writable_indexes.len; + num_loaded_readonly += lookup.readonly_indexes.len; + } + + // Populate loaded addresses and address_signatures index keys + var writable_keys = try ArrayListUnmanaged(Pubkey).initCapacity( + allocator, + num_static_addresses + num_loaded_writable, + ); + defer writable_keys.deinit(allocator); + var readonly_keys = try ArrayListUnmanaged(Pubkey).initCapacity( + allocator, + num_static_addresses + num_loaded_readonly, + ); + defer readonly_keys.deinit(allocator); + var loaded_writable_keys = try ArrayListUnmanaged(Pubkey).initCapacity( + allocator, + num_loaded_writable, + ); + defer loaded_writable_keys.deinit(allocator); + var loaded_readonly_keys = try ArrayListUnmanaged(Pubkey).initCapacity( + allocator, + num_loaded_readonly, + ); + defer loaded_readonly_keys.deinit(allocator); + for ( + transaction.accounts.items(.pubkey), + transaction.accounts.items(.is_writable), + 0.., + ) |pubkey, is_writable, index| { + const is_loaded = index >= num_static_addresses; + + if (is_writable) { + writable_keys.appendAssumeCapacity(pubkey); + if (is_loaded) loaded_writable_keys.appendAssumeCapacity(pubkey); + } else { + readonly_keys.appendAssumeCapacity(pubkey); + if (is_loaded) loaded_readonly_keys.appendAssumeCapacity(pubkey); + } + } + + const loaded_addresses = LoadedAddresses{ + .writable = loaded_writable_keys.items, + .readonly = loaded_readonly_keys.items, + }; + + // Collect token balances + // Build a mint decimals cache from writes (for mints modified in this tx) + var mint_cache = spl_token.MintDecimalsCache.init(allocator); + defer mint_cache.deinit(); + + // Populate cache with any mints found in the transaction writes + for (tx_result.writes.constSlice()) |*written_account| { + const acc = written_account.account; + const pubkey = written_account.pubkey; + if (acc.data.len >= spl_token.MINT_ACCOUNT_SIZE) { + if (spl_token.ParsedMint.parse(acc.data[0..spl_token.MINT_ACCOUNT_SIZE])) |mint| { + mint_cache.put(pubkey, mint.decimals) catch {}; + } + } + } + + // Resolve pre-token balances using FallbackAccountReader (writes first, then account store) + const mint_reader = FallbackAccountReader{ + .writes = tx_result.writes.constSlice(), + .account_store_reader = if (account_store) |store| store.reader() else null, + }; + const pre_token_balances = spl_token.resolveTokenBalances( + allocator, + tx_result.pre_token_balances, + &mint_cache, + FallbackAccountReader, + mint_reader, + ) catch null; + errdefer if (pre_token_balances) |balances| { + for (balances) |b| b.deinit(allocator); + allocator.free(balances); + }; + + // Compute post-token balances from writes + const post_raw_token_balances = collectPostTokenBalances(transaction, tx_result); + const post_token_balances = spl_token.resolveTokenBalances( + allocator, + post_raw_token_balances, + &mint_cache, + FallbackAccountReader, + mint_reader, + ) catch null; + errdefer if (post_token_balances) |balances| { + for (balances) |b| b.deinit(allocator); + allocator.free(balances); + }; + + // Build TransactionStatusMeta + const status = try TransactionStatusMetaBuilder.build( + allocator, + tx_result, + pre_balances, + post_balances, + loaded_addresses, + pre_token_balances, + post_token_balances, + ); + defer status.deinit(allocator); + + // Write to ledger + const result_writer = ledger.resultWriter(); + try result_writer.writeTransactionStatus( + slot, + signature, + writable_keys.items, + readonly_keys.items, + status, + transaction_index, + ); +} + +/// Collect post-execution token balances from transaction writes. +fn collectPostTokenBalances( + transaction: ResolvedTransaction, + tx_result: ProcessedTransaction, +) spl_token.RawTokenBalances { + var result = spl_token.RawTokenBalances{}; + + for (tx_result.writes.constSlice()) |*written_account| { + // Skip non-token accounts + if (!spl_token.isTokenProgram(written_account.account.owner)) continue; + + // Skip if data is too short for a token account + if (written_account.account.data.len < spl_token.TOKEN_ACCOUNT_SIZE) continue; + + // Try to parse as token account + const parsed = spl_token.ParsedTokenAccount.parse( + written_account.account.data[0..spl_token.TOKEN_ACCOUNT_SIZE], + ) orelse continue; + + // Find the account index in the transaction + var account_index: ?u8 = null; + for (transaction.accounts.items(.pubkey), 0..) |pubkey, idx| { + if (pubkey.equals(&written_account.pubkey)) { + account_index = @intCast(idx); + break; + } + } + + if (account_index) |idx| { + result.append(.{ + .account_index = idx, + .mint = parsed.mint, + .owner = parsed.owner, + .amount = parsed.amount, + .program_id = written_account.account.owner, + }) catch {}; // this is ok since tx_result.writes and result.len are the same + } + } + + return result; +} + +/// Account reader that checks transaction writes first, then falls back to the +/// account store. This ensures mint accounts can be found even when they weren't +/// modified by the transaction (the common case for token transfers). +/// [agave] Agave uses account_loader.load_account() which has full store access. +const FallbackAccountReader = struct { + writes: []const LoadedAccount, + account_store_reader: ?sig.accounts_db.SlotAccountReader, + + /// Stub account type returned by this reader. + /// Allocates and owns the data buffer. + const StubAccount = struct { + data: DataHandle, + + const DataHandle = struct { + slice: []const u8, + + pub fn constSlice(self: DataHandle) []const u8 { + return self.slice; + } + }; + + pub fn deinit(self: StubAccount, allocator: Allocator) void { + allocator.free(self.data.slice); + } + }; + + pub fn get(self: FallbackAccountReader, allocator: Allocator, pubkey: Pubkey) !?StubAccount { + // Check transaction writes first + for (self.writes) |*account| { + if (account.pubkey.equals(&pubkey)) { + const data_copy = try allocator.dupe(u8, account.account.data); + errdefer allocator.free(data_copy); + return StubAccount{ + .data = .{ .slice = data_copy }, + }; + } + } + + // Fall back to account store (e.g. for mint accounts not modified in this tx) + if (self.account_store_reader) |reader| { + const account = try reader.get(allocator, pubkey) orelse return null; + defer account.deinit(allocator); + const data_copy = try account.data.readAllAllocate(allocator); + errdefer allocator.free(data_copy); + return StubAccount{ .data = .{ + .slice = data_copy, + } }; + } + + return null; + } +}; + fn isSimpleVoteTransaction(tx: Transaction) bool { const msg = tx.msg; if (msg.instructions.len == 0) return false; diff --git a/src/replay/execution.zig b/src/replay/execution.zig index 7bfc7c9e19..74dc6f03ab 100644 --- a/src/replay/execution.zig +++ b/src/replay/execution.zig @@ -542,6 +542,8 @@ fn prepareSlot( .stakes_cache = &slot_info.state.stakes_cache, .new_rate_activation_epoch = new_rate_activation_epoch, .replay_votes_sender = state.replay_votes_channel, + .ledger = state.ledger, + .account_store = svm_gateway.params.account_store, }; const verify_ticks_params = replay.execution.VerifyTicksParams{ @@ -1112,6 +1114,8 @@ pub const TestState = struct { .stakes_cache = &self.stakes_cache, .new_rate_activation_epoch = null, .replay_votes_sender = self.replay_votes_channel, + .ledger = null, + .account_store = null, }; } diff --git a/src/replay/freeze.zig b/src/replay/freeze.zig index a8093fc4d9..86bfac6b7e 100644 --- a/src/replay/freeze.zig +++ b/src/replay/freeze.zig @@ -5,6 +5,7 @@ const tracy = @import("tracy"); const core = sig.core; const features = sig.core.features; +const rewards = sig.replay.rewards; const Allocator = std.mem.Allocator; const assert = std.debug.assert; @@ -49,6 +50,7 @@ pub const FreezeParams = struct { constants: *const SlotConstants, slot: Slot, blockhash: Hash, + ledger: *sig.ledger.Ledger, ) FreezeParams { return .{ .logger = logger, @@ -83,6 +85,9 @@ pub const FreezeParams = struct { .collector_id = constants.collector_id, .collected_transaction_fees = state.collected_transaction_fees.load(.monotonic), .collected_priority_fees = state.collected_priority_fees.load(.monotonic), + .ledger = ledger, + .reward_status = &state.reward_status, + .block_height = constants.block_height, }, }; } @@ -132,6 +137,8 @@ const FinalizeStateParams = struct { account_reader: SlotAccountReader, capitalization: *std.atomic.Value(u64), blockhash_queue: *sig.sync.RwMux(sig.core.BlockhashQueue), + ledger: *sig.ledger.Ledger, + reward_status: *const rewards.EpochRewardStatus, // data params rent: Rent, @@ -141,6 +148,7 @@ const FinalizeStateParams = struct { collector_id: Pubkey, collected_transaction_fees: u64, collected_priority_fees: u64, + block_height: u64, }; /// Updates some accounts and other shared state to finish up the slot execution. @@ -171,6 +179,9 @@ fn finalizeState(allocator: Allocator, params: FinalizeStateParams) !void { params.collector_id, params.collected_transaction_fees, params.collected_priority_fees, + params.ledger, + params.reward_status, + params.block_height, ); // Run incinerator @@ -192,6 +203,9 @@ fn finalizeState(allocator: Allocator, params: FinalizeStateParams) !void { } /// Burn and payout the appropriate portions of collected fees. +/// Records all rewards (fee, vote, staking) and num_partitions to the blockstore. +/// Matches Agave's fee distribution in `runtime/src/bank/fee_distribution.rs` +/// and reward recording in `get_rewards_and_num_partitions`. fn distributeTransactionFees( allocator: Allocator, account_store: AccountStore, @@ -202,6 +216,9 @@ fn distributeTransactionFees( collector_id: Pubkey, collected_transaction_fees: u64, collected_priority_fees: u64, + ledger: *sig.ledger.Ledger, + epoch_reward_status: *const rewards.EpochRewardStatus, + block_height: u64, ) !void { const zone = tracy.Zone.init(@src(), .{ .name = "distributeTransactionFees" }); defer zone.deinit(); @@ -211,7 +228,7 @@ fn distributeTransactionFees( const payout = total_fees -| burn; if (payout > 0) blk: { - const post_balance = tryPayoutFees( + const payout_result = tryPayoutFees( allocator, account_store, account_reader, @@ -229,14 +246,100 @@ fn distributeTransactionFees( }, else => return err, }; - // TODO: record rewards returned by tryPayoutFees - _ = post_balance; + + const fee_reward: sig.ledger.meta.Reward = .{ + .pubkey = collector_id, + .lamports = @intCast(payout_result.payout_amount), + .post_balance = payout_result.post_balance, + .reward_type = .fee, + .commission = null, + }; + + const keyed_rewards, const num_partitions = try getRewardsAndNumPartitions( + allocator, + epoch_reward_status, + block_height, + fee_reward, + ); + defer allocator.free(keyed_rewards); + + try ledger.db.put(sig.ledger.schema.schema.rewards, slot, .{ + .rewards = keyed_rewards, + .num_partitions = num_partitions, + }); } _ = capitalization.fetchSub(burn, .monotonic); } +/// Collect all rewards for this slot and determine num_partitions. +/// Matches Agave's `get_rewards_and_num_partitions`. +/// +/// On an epoch boundary block: returns vote rewards + fee reward + num_partitions. +/// On a distribution block: returns distributed stake rewards + fee reward. +/// On other blocks: returns just the fee reward. +fn getRewardsAndNumPartitions( + allocator: Allocator, + epoch_reward_status: *const rewards.EpochRewardStatus, + block_height: u64, + fee_reward: sig.ledger.meta.Reward, +) !struct { []const sig.ledger.meta.Reward, ?u64 } { + switch (epoch_reward_status.*) { + .active => |active| { + // The epoch boundary block is the one right before distribution starts. + // calculation.zig sets: distribution_starting_blockheight = block_height + 1 + const is_epoch_boundary = (block_height + 1 == active.distribution_start_block_height); + + if (is_epoch_boundary) { + // Epoch boundary: record vote rewards + fee reward + num_partitions. + const vote_entries = active.all_vote_rewards.entries; + const num_partitions: u64 = if (active.partitioned_indices) |pi| + pi.entries.len + else + 0; + + const all_rewards = try allocator.alloc( + sig.ledger.meta.Reward, + 1 + vote_entries.len, + ); + all_rewards[0] = fee_reward; + for (vote_entries, 1..) |vr, i| { + all_rewards[i] = .{ + .pubkey = vr.vote_pubkey, + .lamports = @intCast(vr.rewards.lamports), + .post_balance = vr.rewards.post_balance, + .reward_type = .voting, + .commission = vr.rewards.commission, + }; + } + return .{ all_rewards, num_partitions }; + } else { + // Distribution block: record distributed stake rewards + fee reward. + const all_rewards = try allocator.alloc( + sig.ledger.meta.Reward, + 1 + active.distributed_rewards.items.len, + ); + all_rewards[0] = fee_reward; + @memcpy(all_rewards[1..], active.distributed_rewards.items); + return .{ all_rewards, null }; + } + }, + .inactive => {}, + } + + // Non-epoch-reward block: just the fee reward. + const all_rewards = try allocator.alloc(sig.ledger.meta.Reward, 1); + all_rewards[0] = fee_reward; + return .{ all_rewards, null }; +} + /// Attempt to pay the payout to the collector. +/// Returns the payout amount and post-balance on success. +const PayoutResult = struct { + payout_amount: u64, + post_balance: u64, +}; + fn tryPayoutFees( allocator: Allocator, account_store: AccountStore, @@ -245,7 +348,7 @@ fn tryPayoutFees( slot: Slot, collector_id: Pubkey, payout: u64, -) !u64 { +) !PayoutResult { var fee_collector_account = if (try account_reader.get(allocator, collector_id)) |old_account| blk: { defer old_account.deinit(allocator); @@ -272,7 +375,10 @@ fn tryPayoutFees( // duplicates fee_collector_account, so we need to free it. try account_store.put(slot, collector_id, fee_collector_account); - return fee_collector_account.lamports; + return PayoutResult{ + .payout_amount = payout, + .post_balance = fee_collector_account.lamports, + }; } pub const HashSlotParams = struct { @@ -538,6 +644,12 @@ test "freezeSlot: trivial e2e merkle hash test" { tp.shutdown(); tp.deinit(); } + var ledger, var ledger_dir = try sig.ledger.Ledger.initForTest(allocator); + defer { + ledger.deinit(); + ledger_dir.cleanup(); + } + try freezeSlot(allocator, .init( .FOR_TESTS, account_store, @@ -546,6 +658,7 @@ test "freezeSlot: trivial e2e merkle hash test" { &constants, 0, .ZEROES, + &ledger, )); try std.testing.expectEqual( @@ -595,6 +708,12 @@ test "freezeSlot: trivial e2e lattice hash test" { var state: SlotState = .GENESIS; defer state.deinit(allocator); + var ledger, var ledger_dir = try sig.ledger.Ledger.initForTest(allocator); + defer { + ledger.deinit(); + ledger_dir.cleanup(); + } + try freezeSlot(allocator, .init( .FOR_TESTS, account_store, @@ -603,6 +722,7 @@ test "freezeSlot: trivial e2e lattice hash test" { &constants, 0, .ZEROES, + &ledger, )); try std.testing.expectEqual( diff --git a/src/replay/rewards/calculation.zig b/src/replay/rewards/calculation.zig index 1d7cfa9001..426c61b65d 100644 --- a/src/replay/rewards/calculation.zig +++ b/src/replay/rewards/calculation.zig @@ -31,6 +31,7 @@ const StakeRewards = sig.replay.rewards.StakeRewards; const PointValue = sig.replay.rewards.inflation_rewards.PointValue; const PartitionedStakeReward = sig.replay.rewards.PartitionedStakeReward; const PartitionedStakeRewards = sig.replay.rewards.PartitionedStakeRewards; +const PartitionedVoteRewards = sig.replay.rewards.PartitionedVoteRewards; const PartitionedVoteReward = sig.replay.rewards.PartitionedVoteReward; const redeemRewards = sig.replay.rewards.inflation_rewards.redeemRewards; @@ -71,7 +72,7 @@ pub fn beginPartitionedRewards( epoch_schedule, ); - const distributed_rewards, const point_value, const stake_rewards = + const distributed_rewards, const point_value, const stake_rewards, const vote_rewards = try calculateRewardsAndDistributeVoteRewards( allocator, slot, @@ -98,7 +99,9 @@ pub fn beginPartitionedRewards( slot_state.reward_status = .{ .active = .{ .distribution_start_block_height = distribution_starting_blockheight, .all_stake_rewards = stake_rewards, + .all_vote_rewards = vote_rewards, .partitioned_indices = null, + .distributed_rewards = .empty, } }; const blockhash_queue, var blockhash_queue_lg = slot_state.blockhash_queue.readWithLock(); @@ -194,6 +197,7 @@ fn calculateRewardsAndDistributeVoteRewards( u64, PointValue, PartitionedStakeRewards, + PartitionedVoteRewards, } { // TODO: Lookup in rewards calculation cache var rewards_for_partitioning = try calculateRewardsForPartitioning( @@ -220,9 +224,6 @@ fn calculateRewardsAndDistributeVoteRewards( new_warmup_and_cooldown_rate_epoch, ); - // TODO: Update vote rewards - // Looks like this is for metadata, and not protocol defining - std.debug.assert(rewards_for_partitioning.point_value.rewards >= rewards_for_partitioning.vote_rewards.total_vote_rewards_lamports + rewards_for_partitioning.stake_rewards.total_stake_rewards_lamports); @@ -233,10 +234,12 @@ fn calculateRewardsAndDistributeVoteRewards( ); rewards_for_partitioning.stake_rewards.stake_rewards.acquire(); + rewards_for_partitioning.vote_rewards.vote_rewards.acquire(); return .{ rewards_for_partitioning.vote_rewards.total_vote_rewards_lamports, rewards_for_partitioning.point_value, rewards_for_partitioning.stake_rewards.stake_rewards, + rewards_for_partitioning.vote_rewards.vote_rewards, }; } @@ -744,6 +747,7 @@ test calculateRewardsAndDistributeVoteRewards { slot_store, ); defer result[2].deinit(allocator); + defer result[3].deinit(allocator); const updated_vote_account = try slot_store.reader().get( allocator, diff --git a/src/replay/rewards/distribution.zig b/src/replay/rewards/distribution.zig index 6d62c91cd0..a15b82d638 100644 --- a/src/replay/rewards/distribution.zig +++ b/src/replay/rewards/distribution.zig @@ -19,6 +19,8 @@ const PartitionedStakeRewards = sig.replay.rewards.PartitionedStakeRewards; const PartitionedIndices = sig.replay.rewards.PartitionedIndices; const StakeReward = sig.replay.rewards.StakeReward; +const Reward = sig.ledger.transaction_status.Reward; + const EpochRewards = sig.runtime.sysvar.EpochRewards; const Rent = sig.runtime.sysvar.Rent; @@ -38,8 +40,8 @@ pub fn distributePartitionedEpochRewards( slot_store: SlotAccountStore, new_rate_activation_epoch: ?Epoch, ) !void { - var stake_rewards = switch (epoch_reward_status.*) { - .active => |active| active, + const stake_rewards = switch (epoch_reward_status.*) { + .active => |*active| active, .inactive => return, }; @@ -63,7 +65,6 @@ pub fn distributePartitionedEpochRewards( ); stake_rewards.partitioned_indices = try .init(allocator, partition_indices); - epoch_reward_status.* = .{ .active = stake_rewards }; } const partition_rewards, const partition_indices = .{ @@ -92,6 +93,7 @@ pub fn distributePartitionedEpochRewards( stakes_cache, slot_store, new_rate_activation_epoch, + &stake_rewards.distributed_rewards, ); } @@ -129,6 +131,7 @@ fn distributeEpochRewardsInPartition( stakes_cache: *StakesCache, slot_store: SlotAccountStore, new_rate_activation_epoch: ?Epoch, + distributed_rewards: *std.ArrayListUnmanaged(Reward), ) !void { const lamports_distributed, const lamports_burnt, const updated_stake_rewards = try storeStakeAccountsInPartition( @@ -168,8 +171,28 @@ fn distributeEpochRewardsInPartition( }, ); - // NOTE: Used for metadata - // updateRewardHistoryInPartition(updated_stake_rewards); + try addStakeRewardsToDistributedRewards( + allocator, + updated_stake_rewards, + distributed_rewards, + ); +} + +fn addStakeRewardsToDistributedRewards( + allocator: Allocator, + stake_rewards: []const StakeReward, + distributed_rewards: *std.ArrayListUnmanaged(Reward), +) !void { + try distributed_rewards.ensureTotalCapacity(allocator, stake_rewards.len); + for (stake_rewards) |sr| { + distributed_rewards.appendAssumeCapacity(.{ + .pubkey = sr.stake_pubkey, + .lamports = @intCast(sr.stake_reward_info.lamports), + .post_balance = sr.stake_reward_info.post_balance, + .reward_type = .staking, + .commission = sr.stake_reward_info.commission, + }); + } } fn storeStakeAccountsInPartition( @@ -415,6 +438,11 @@ test distributePartitionedEpochRewards { &[_]PartitionedStakeReward{partitioned_reward}, ), ), + .all_vote_rewards = try sig.replay.rewards.PartitionedVoteRewards.init( + allocator, + &[_]sig.replay.rewards.PartitionedVoteReward{}, + ), + .distributed_rewards = .empty, }, }; defer epoch_reward_status.deinit(allocator); @@ -538,6 +566,9 @@ test distributeEpochRewardsInPartition { ); defer partitioned_rewards.deinit(allocator); + var distributed_rewards: std.ArrayListUnmanaged(Reward) = .empty; + defer distributed_rewards.deinit(allocator); + const epoch_rewards = sig.runtime.sysvar.EpochRewards{ .distribution_starting_block_height = 0, .num_partitions = 1, @@ -565,6 +596,7 @@ test distributeEpochRewardsInPartition { &stakes_cache, slot_store, null, + &distributed_rewards, ); } diff --git a/src/replay/rewards/lib.zig b/src/replay/rewards/lib.zig index 3df2d13e94..1e399d03d6 100644 --- a/src/replay/rewards/lib.zig +++ b/src/replay/rewards/lib.zig @@ -9,6 +9,7 @@ const Stake = sig.runtime.program.stake.StakeStateV2.Stake; const VoteAccount = sig.core.stakes.VoteAccount; const AccountSharedData = sig.runtime.AccountSharedData; +const Reward = sig.ledger.transaction_status.Reward; pub const calculation = @import("calculation.zig"); pub const distribution = @import("distribution.zig"); @@ -189,31 +190,45 @@ pub const EpochRewardStatus = union(enum) { active: struct { distribution_start_block_height: u64, all_stake_rewards: PartitionedStakeRewards, + all_vote_rewards: PartitionedVoteRewards, partitioned_indices: ?PartitionedIndices, + /// Per-slot rewards from the most recent partition distribution. + distributed_rewards: std.ArrayListUnmanaged(Reward), }, inactive, - pub fn deinit(self: EpochRewardStatus, allocator: Allocator) void { - switch (self) { - .active => |active| { + pub fn deinit(self: *EpochRewardStatus, allocator: Allocator) void { + switch (self.*) { + .active => |*active| { active.all_stake_rewards.deinit(allocator); + active.all_vote_rewards.deinit(allocator); if (active.partitioned_indices) |pi| pi.deinit(allocator); + active.distributed_rewards.deinit(allocator); }, .inactive => {}, } } pub fn clone(self: EpochRewardStatus) EpochRewardStatus { - return switch (self) { - .active => |active| .{ .active = .{ + const active = switch (self) { + .active => |active| active, + .inactive => return .inactive, + }; + + return .{ + .active = .{ .distribution_start_block_height = active.distribution_start_block_height, .all_stake_rewards = active.all_stake_rewards.getAcquire(), + .all_vote_rewards = active.all_vote_rewards.getAcquire(), .partitioned_indices = if (active.partitioned_indices) |pi| pi.getAcquire() else null, - } }, - .inactive => .inactive, + // Each slot owns its own distributed_rewards list. The parent's + // buffer must not be shared, as the parent may be freed (rooted) + // while this slot is still alive, causing use-after-free. + .distributed_rewards = .empty, + }, }; } }; diff --git a/src/replay/service.zig b/src/replay/service.zig index d894401fe2..ceec2ff511 100644 --- a/src/replay/service.zig +++ b/src/replay/service.zig @@ -31,6 +31,8 @@ const SlotTree = replay.trackers.SlotTree; const GossipVerifiedVoteHash = sig.consensus.vote_listener.GossipVerifiedVoteHash; const ThresholdConfirmedSlot = sig.consensus.vote_listener.ThresholdConfirmedSlot; +const schema = sig.ledger.schema.schema; + const updateSysvarsForNewSlot = replay.update_sysvar.updateSysvarsForNewSlot; pub const Logger = sig.trace.Logger("replay"); @@ -408,7 +410,7 @@ pub fn trackNewSlots( ), ); - try updateSysvarsForNewSlot( + const clock = try updateSysvarsForNewSlot( allocator, account_store, epoch_tracker, @@ -417,6 +419,8 @@ pub fn trackNewSlots( slot, hard_forks, ); + try ledger.db.put(schema.blocktime, slot, clock.unix_timestamp); + try ledger.db.put(schema.block_height, slot, constants.block_height); try slot_tracker.put(allocator, slot, .{ .constants = constants, .state = state }); try slot_tree.record(allocator, slot, constants.parent_slot); @@ -450,9 +454,6 @@ pub fn newSlotFromParent( var state = try SlotState.fromFrozenParent(allocator, parent_state); errdefer state.deinit(allocator); - const epoch_reward_status = parent_state.reward_status.clone(); - errdefer epoch_reward_status.deinit(allocator); - var ancestors = try parent_constants.ancestors.clone(allocator); errdefer ancestors.deinit(allocator); @@ -576,6 +577,7 @@ fn freezeCompletedSlots(state: *ReplayState, results: []const ReplayResult) !boo slot_info.constants, slot, last_entry_hash, + state.ledger, )); processed_a_slot = true; } else { diff --git a/src/replay/update_sysvar.zig b/src/replay/update_sysvar.zig index 92116dcbec..8576e71c27 100644 --- a/src/replay/update_sysvar.zig +++ b/src/replay/update_sysvar.zig @@ -60,7 +60,7 @@ pub fn updateSysvarsForNewSlot( state: *sig.core.SlotState, slot: Slot, hard_forks: *const sig.core.HardForks, -) !void { +) !Clock { const epoch = epoch_tracker.epoch_schedule.getEpoch(slot); const parent_slots_epoch = epoch_tracker.epoch_schedule.getEpoch(constants.parent_slot); const epoch_info = try epoch_tracker.getEpochInfo(slot); @@ -80,7 +80,7 @@ pub fn updateSysvarsForNewSlot( .update_sysvar_deps = sysvar_deps, }); - try updateClock( + const clock = try updateClock( allocator, .{ .feature_set = &constants.feature_set, @@ -101,6 +101,7 @@ pub fn updateSysvarsForNewSlot( hard_forks, sysvar_deps, ); + return clock; } pub fn fillMissingSysvarCacheEntries( @@ -179,7 +180,7 @@ pub const UpdateClockDeps = struct { update_sysvar_deps: UpdateSysvarAccountDeps, }; -pub fn updateClock(allocator: Allocator, deps: UpdateClockDeps) !void { +pub fn updateClock(allocator: Allocator, deps: UpdateClockDeps) !Clock { const clock = try nextClock( allocator, deps.feature_set, @@ -194,6 +195,7 @@ pub fn updateClock(allocator: Allocator, deps: UpdateClockDeps) !void { deps.parent_slots_epoch, ); try updateSysvarAccount(Clock, allocator, clock, deps.update_sysvar_deps); + return clock; } pub fn updateLastRestartSlot( @@ -874,17 +876,20 @@ test "update all sysvars" { var stakes_cache = StakesCache.EMPTY; defer stakes_cache.deinit(allocator); - try updateClock(allocator, .{ - .feature_set = &feature_set, - .epoch_schedule = &epoch_schedule, - .epoch_stakes = &epoch_stakes, - .stakes_cache = &stakes_cache, - .epoch = epoch_schedule.getEpoch(slot), - .parent_slots_epoch = null, - .genesis_creation_time = 0, - .ns_per_slot = 0, - .update_sysvar_deps = update_sysvar_deps, - }); + _ = try updateClock( + allocator, + .{ + .feature_set = &feature_set, + .epoch_schedule = &epoch_schedule, + .epoch_stakes = &epoch_stakes, + .stakes_cache = &stakes_cache, + .epoch = epoch_schedule.getEpoch(slot), + .parent_slots_epoch = null, + .genesis_creation_time = 0, + .ns_per_slot = 0, + .update_sysvar_deps = update_sysvar_deps, + }, + ); const new_sysvar, const new_account = (try getSysvarAndAccount(Clock, allocator, account_reader)).?; @@ -897,7 +902,6 @@ test "update all sysvars" { epoch_schedule.getLeaderScheduleEpoch(slot), new_sysvar.leader_schedule_epoch, ); - try std.testing.expectEqual(0, new_sysvar.unix_timestamp); try expectSysvarAccountChange(rent, old_account, new_account); } diff --git a/src/rpc/hook_contexts/Ledger.zig b/src/rpc/hook_contexts/Ledger.zig new file mode 100644 index 0000000000..fea71390f0 --- /dev/null +++ b/src/rpc/hook_contexts/Ledger.zig @@ -0,0 +1,1327 @@ +///! RPC hook context for block-related methods. +///! Requires access to the Ledger and SlotTracker for commitment checks. +const std = @import("std"); +const sig = @import("../../sig.zig"); +const base58 = @import("base58"); +const methods = @import("../methods.zig"); +const parse_instruction = @import("../parse_instruction/lib.zig"); + +const AccountKeys = parse_instruction.AccountKeys; +const Allocator = std.mem.Allocator; +const GetBlock = methods.GetBlock; +const LoadedAddresses = sig.ledger.transaction_status.LoadedAddresses; +const Pubkey = sig.core.Pubkey; +const ReservedAccounts = sig.core.ReservedAccounts; +const Signature = sig.core.Signature; +const TransactionDetails = methods.common.TransactionDetails; +const TransactionEncoding = methods.common.TransactionEncoding; + +const LedgerHookContext = @This(); + +ledger: *sig.ledger.Ledger, +slot_tracker: *const sig.replay.trackers.SlotTracker, + +pub fn getBlock( + self: LedgerHookContext, + arena: Allocator, + params: GetBlock, +) !GetBlock.Response { + const config = params.resolveConfig(); + const commitment = config.getCommitment(); + const transaction_details = config.getTransactionDetails(); + const show_rewards = config.getRewards(); + const encoding = config.getEncoding(); + const max_supported_version = config.getMaxSupportedTransactionVersion(); + + // Reject processed commitment (Agave behavior: only confirmed and finalized supported) + if (commitment == .processed) { + return error.ProcessedNotSupported; + } + + // Get block from ledger. + // Finalized path uses getRootedBlock (adds checkLowestCleanupSlot + isRoot checks, + // matching Agave's get_rooted_block). + // Confirmed path uses getCompleteBlock (no cleanup check, slot may not be rooted yet). + const reader = self.ledger.reader(); + const latest_confirmed_slot = self.slot_tracker.getSlotForCommitment(.confirmed); + const block = if (params.slot <= latest_confirmed_slot) reader.getRootedBlock( + arena, + params.slot, + true, + ) catch |err| switch (err) { + // NOTE: we try getCompletedBlock incase SlotTracker has seen the slot + // but ledger has not yet rooted it + error.SlotNotRooted => try reader.getCompleteBlock( + arena, + params.slot, + true, + ), + else => return err, + } else if (commitment == .confirmed) try reader.getCompleteBlock( + arena, + params.slot, + true, + ) else return error.BlockNotAvailable; + + return try encodeBlockWithOptions(arena, block, encoding, .{ + .tx_details = transaction_details, + .show_rewards = show_rewards, + .max_supported_version = max_supported_version, + }); +} + +/// Encode transactions and/or signatures based on the requested options. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L332 +fn encodeBlockWithOptions( + arena: Allocator, + block: sig.ledger.Reader.VersionedConfirmedBlock, + encoding: TransactionEncoding, + options: struct { + tx_details: TransactionDetails, + show_rewards: bool, + max_supported_version: ?u8, + }, +) !GetBlock.Response { + const transactions, const signatures = blk: switch (options.tx_details) { + .none => break :blk .{ null, null }, + .full => { + const transactions = try arena.alloc( + GetBlock.Response.EncodedTransactionWithStatusMeta, + block.transactions.len, + ); + + for (block.transactions, 0..) |tx_with_meta, i| { + transactions[i] = try encodeTransactionWithStatusMeta( + arena, + .{ .complete = tx_with_meta }, + encoding, + options.max_supported_version, + options.show_rewards, + ); + } + + break :blk .{ transactions, null }; + }, + .signatures => { + const sigs = try arena.alloc(Signature, block.transactions.len); + + for (block.transactions, 0..) |tx_with_meta, i| { + if (tx_with_meta.transaction.signatures.len == 0) { + return error.InvalidTransaction; + } + sigs[i] = tx_with_meta.transaction.signatures[0]; + } + + break :blk .{ null, sigs }; + }, + .accounts => { + const transactions = try arena.alloc( + GetBlock.Response.EncodedTransactionWithStatusMeta, + block.transactions.len, + ); + + for (block.transactions, 0..) |tx_with_meta, i| { + transactions[i] = try buildJsonAccounts( + arena, + .{ .complete = tx_with_meta }, + options.max_supported_version, + options.show_rewards, + ); + } + + break :blk .{ transactions, null }; + }, + }; + + return .{ + .blockhash = block.blockhash, + .previousBlockhash = block.previous_blockhash, + .parentSlot = block.parent_slot, + .transactions = transactions, + .signatures = signatures, + .rewards = if (options.show_rewards) try convertRewards( + arena, + block.rewards, + ) else null, + .numRewardPartitions = block.num_partitions, + .blockTime = block.block_time, + .blockHeight = block.block_height, + }; +} + +/// Validates that the transaction version is supported by the provided max version +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L496 +fn validateVersion( + version: sig.core.transaction.Version, + max_supported_version: ?u8, +) !?GetBlock.Response.EncodedTransactionWithStatusMeta.TransactionVersion { + if (max_supported_version) |max_version| switch (version) { + .legacy => return .legacy, + // TODO: update this to use the version number + // that would be stored inside the version enum + .v0 => if (max_version >= 0) { + return .{ .number = 0 }; + } else return error.UnsupportedTransactionVersion, + } else switch (version) { + .legacy => return null, + .v0 => return error.UnsupportedTransactionVersion, + } +} + +/// Encode a transaction with its metadata for the RPC response. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L452 +fn encodeTransactionWithStatusMeta( + arena: Allocator, + tx_with_meta: sig.ledger.Reader.TransactionWithStatusMeta, + encoding: TransactionEncoding, + max_supported_version: ?u8, + show_rewards: bool, +) !GetBlock.Response.EncodedTransactionWithStatusMeta { + return switch (tx_with_meta) { + .missing_metadata => |tx| .{ + .version = null, + .transaction = try encodeTransactionWithoutMeta( + arena, + tx, + encoding, + ), + .meta = null, + }, + .complete => |vtx| try encodeVersionedTransactionWithStatusMeta( + arena, + vtx, + encoding, + max_supported_version, + show_rewards, + ), + }; +} + +/// Encode a transaction missing metadata +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L708 +fn encodeTransactionWithoutMeta( + arena: Allocator, + transaction: sig.core.Transaction, + encoding: TransactionEncoding, +) !GetBlock.Response.EncodedTransaction { + switch (encoding) { + .binary => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + var base58_str = try arena.alloc(u8, base58.encodedMaxSize(bincode_bytes.len)); + const encoded_len = base58.Table.BITCOIN.encode( + base58_str, + bincode_bytes, + ); + + return .{ .legacy_binary = base58_str[0..encoded_len] }; + }, + .base58 => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + var base58_str = try arena.alloc(u8, base58.encodedMaxSize(bincode_bytes.len)); + const encoded_len = base58.Table.BITCOIN.encode( + base58_str, + bincode_bytes, + ); + + return .{ .binary = .{ base58_str[0..encoded_len], .base58 } }; + }, + .base64 => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + const encoded_len = std.base64.standard.Encoder.calcSize(bincode_bytes.len); + const base64_buf = try arena.alloc(u8, encoded_len); + _ = std.base64.standard.Encoder.encode(base64_buf, bincode_bytes); + + return .{ .binary = .{ base64_buf, .base64 } }; + }, + .json, .jsonParsed => |enc| return .{ .json = .{ + .signatures = try arena.dupe(Signature, transaction.signatures), + .message = try encodeLegacyTransactionMessage( + arena, + transaction.msg, + enc, + ), + } }, + } +} + +/// Encode a full versioned transaction +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L520 +fn encodeVersionedTransactionWithStatusMeta( + arena: Allocator, + tx_with_meta: sig.ledger.Reader.VersionedTransactionWithStatusMeta, + encoding: TransactionEncoding, + max_supported_version: ?u8, + show_rewards: bool, +) !GetBlock.Response.EncodedTransactionWithStatusMeta { + const version = try validateVersion( + tx_with_meta.transaction.version, + max_supported_version, + ); + return .{ + .transaction = try encodeVersionedTransactionWithMeta( + arena, + tx_with_meta.transaction, + tx_with_meta.meta, + encoding, + ), + .meta = switch (encoding) { + .jsonParsed => try parseUiTransactionStatusMeta( + arena, + tx_with_meta.meta, + tx_with_meta.transaction.msg.account_keys, + show_rewards, + ), + else => try parseUiTransactionStatusMetaFromLedger( + arena, + tx_with_meta.meta, + show_rewards, + ), + }, + .version = version, + }; +} + +/// Parse a ledger transaction status meta directly into a UiTransactionStatusMeta (matches agave's From implementation) +/// [agave] https://github.com/anza-xyz/agave/blob/1c084acb9195fab0981b9876bcb409cabaf35d5c/transaction-status-client-types/src/lib.rs#L380 +fn parseUiTransactionStatusMetaFromLedger( + arena: Allocator, + meta: sig.ledger.meta.TransactionStatusMeta, + show_rewards: bool, +) !GetBlock.Response.UiTransactionStatusMeta { + // Build status field + const status: GetBlock.Response.UiTransactionResultStatus = if (meta.status) |err| + .{ .Ok = null, .Err = err } + else + .{ .Ok = .{}, .Err = null }; + + // Convert inner instructions + const inner_instructions = if (meta.inner_instructions) |iis| + try convertInnerInstructions(arena, iis) + else + &.{}; + + // Convert token balances + const pre_token_balances = if (meta.pre_token_balances) |balances| + try convertTokenBalances(arena, balances) + else + &.{}; + + const post_token_balances = if (meta.post_token_balances) |balances| + try convertTokenBalances(arena, balances) + else + &.{}; + + // Convert loaded addresses + const loaded_addresses = try LedgerHookContext.convertLoadedAddresses( + arena, + meta.loaded_addresses, + ); + + // Convert return data + const return_data = if (meta.return_data) |rd| + try convertReturnData(arena, rd) + else + null; + + const rewards: ?[]GetBlock.Response.UiReward = if (show_rewards) rewards: { + if (meta.rewards) |rewards| { + const converted = try arena.alloc(GetBlock.Response.UiReward, rewards.len); + for (rewards, 0..) |reward, i| { + converted[i] = try GetBlock.Response.UiReward.fromLedgerReward(reward); + } + break :rewards converted; + } else break :rewards &.{}; + } else null; + + return .{ + .err = meta.status, + .status = status, + .fee = meta.fee, + .preBalances = try arena.dupe(u64, meta.pre_balances), + .postBalances = try arena.dupe(u64, meta.post_balances), + .innerInstructions = .{ .value = inner_instructions }, + .logMessages = .{ .value = meta.log_messages orelse &.{} }, + .preTokenBalances = .{ .value = pre_token_balances }, + .postTokenBalances = .{ .value = post_token_balances }, + .rewards = if (rewards) |r| .{ .value = r } else .none, + .loadedAddresses = .{ .value = loaded_addresses }, + .returnData = if (return_data) |rd| .{ .value = rd } else .skip, + .computeUnitsConsumed = if (meta.compute_units_consumed) |cuc| .{ + .value = cuc, + } else .skip, + .costUnits = if (meta.cost_units) |cu| .{ .value = cu } else .skip, + }; +} + +/// Encode a transaction with its metadata +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L632 +fn encodeVersionedTransactionWithMeta( + arena: Allocator, + transaction: sig.core.Transaction, + meta: sig.ledger.transaction_status.TransactionStatusMeta, + encoding: TransactionEncoding, +) !GetBlock.Response.EncodedTransaction { + switch (encoding) { + .binary => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + var base58_str = try arena.alloc(u8, base58.encodedMaxSize(bincode_bytes.len)); + const encoded_len = base58.Table.BITCOIN.encode( + base58_str, + bincode_bytes, + ); + + return .{ .legacy_binary = base58_str[0..encoded_len] }; + }, + .base58 => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + var base58_str = try arena.alloc(u8, base58.encodedMaxSize(bincode_bytes.len)); + const encoded_len = base58.Table.BITCOIN.encode( + base58_str, + bincode_bytes, + ); + + return .{ .binary = .{ base58_str[0..encoded_len], .base58 } }; + }, + .base64 => { + const bincode_bytes = try sig.bincode.writeAlloc(arena, transaction, .{}); + + const encoded_len = std.base64.standard.Encoder.calcSize(bincode_bytes.len); + const base64_buf = try arena.alloc(u8, encoded_len); + _ = std.base64.standard.Encoder.encode(base64_buf, bincode_bytes); + + return .{ .binary = .{ base64_buf, .base64 } }; + }, + .json => return try jsonEncodeVersionedTransaction( + arena, + transaction, + ), + .jsonParsed => return .{ .json = .{ + .signatures = try arena.dupe(Signature, transaction.signatures), + .message = switch (transaction.version) { + .legacy => try encodeLegacyTransactionMessage( + arena, + transaction.msg, + .jsonParsed, + ), + .v0 => try jsonEncodeV0TransactionMessageWithMeta( + arena, + transaction.msg, + meta, + .jsonParsed, + ), + }, + } }, + } +} + +/// Encode a transaction to JSON format with its metadata +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L663 +fn jsonEncodeVersionedTransaction( + arena: Allocator, + transaction: sig.core.Transaction, +) !GetBlock.Response.EncodedTransaction { + return .{ .json = .{ + .signatures = try arena.dupe(Signature, transaction.signatures), + .message = switch (transaction.version) { + .legacy => try encodeLegacyTransactionMessage(arena, transaction.msg, .json), + .v0 => try jsonEncodeV0TransactionMessage(arena, transaction.msg), + }, + } }; +} + +/// Encode a legacy transaction message +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L743 +fn encodeLegacyTransactionMessage( + arena: Allocator, + message: sig.core.transaction.Message, + encoding: TransactionEncoding, +) !GetBlock.Response.UiMessage { + switch (encoding) { + .jsonParsed => { + var reserved_account_keys = try ReservedAccounts.initAllActivated(arena); + const account_keys = AccountKeys.init( + message.account_keys, + null, + ); + + var instructions = try arena.alloc( + parse_instruction.UiInstruction, + message.instructions.len, + ); + for (message.instructions, 0..) |ix, i| { + instructions[i] = try parse_instruction.parseUiInstruction( + arena, + .{ + .program_id_index = ix.program_index, + .accounts = ix.account_indexes, + .data = ix.data, + }, + &account_keys, + 1, + ); + } + return .{ .parsed = .{ + .account_keys = try parseLegacyMessageAccounts( + arena, + message, + &reserved_account_keys, + ), + .recent_blockhash = message.recent_blockhash, + .instructions = instructions, + .address_table_lookups = null, + } }; + }, + else => { + var instructions = try arena.alloc( + parse_instruction.UiCompiledInstruction, + message.instructions.len, + ); + for (message.instructions, 0..) |ix, i| { + instructions[i] = .{ + .programIdIndex = ix.program_index, + .accounts = try arena.dupe(u8, ix.account_indexes), + .data = blk: { + var ret = try arena.alloc(u8, base58.encodedMaxSize(ix.data.len)); + break :blk ret[0..base58.Table.BITCOIN.encode(ret, ix.data)]; + }, + .stackHeight = 1, + }; + } + + return .{ .raw = .{ + .header = .{ + .numRequiredSignatures = message.signature_count, + .numReadonlySignedAccounts = message.readonly_signed_count, + .numReadonlyUnsignedAccounts = message.readonly_unsigned_count, + }, + .account_keys = try arena.dupe(Pubkey, message.account_keys), + .recent_blockhash = message.recent_blockhash, + .instructions = instructions, + .address_table_lookups = null, + } }; + }, + } +} + +/// Encode a v0 transaction message to JSON format +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L859 +fn jsonEncodeV0TransactionMessage( + arena: Allocator, + message: sig.core.transaction.Message, +) !GetBlock.Response.UiMessage { + var instructions = try arena.alloc( + parse_instruction.UiCompiledInstruction, + message.instructions.len, + ); + for (message.instructions, 0..) |ix, i| { + instructions[i] = .{ + .programIdIndex = ix.program_index, + .accounts = try arena.dupe(u8, ix.account_indexes), + .data = blk: { + var ret = try arena.alloc(u8, base58.encodedMaxSize(ix.data.len)); + break :blk ret[0..base58.Table.BITCOIN.encode(ret, ix.data)]; + }, + .stackHeight = 1, + }; + } + + var address_table_lookups = try arena.alloc( + GetBlock.Response.AddressTableLookup, + message.address_lookups.len, + ); + for (message.address_lookups, 0..) |lookup, i| { + address_table_lookups[i] = .{ + .accountKey = lookup.table_address, + .writableIndexes = try arena.dupe(u8, lookup.writable_indexes), + .readonlyIndexes = try arena.dupe(u8, lookup.readonly_indexes), + }; + } + + return .{ .raw = .{ + .header = .{ + .numRequiredSignatures = message.signature_count, + .numReadonlySignedAccounts = message.readonly_signed_count, + .numReadonlyUnsignedAccounts = message.readonly_unsigned_count, + }, + .account_keys = try arena.dupe(Pubkey, message.account_keys), + .recent_blockhash = message.recent_blockhash, + .instructions = instructions, + .address_table_lookups = address_table_lookups, + } }; +} + +/// Encode a v0 transaction message with metadata to JSON format +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L824 +fn jsonEncodeV0TransactionMessageWithMeta( + arena: Allocator, + message: sig.core.transaction.Message, + meta: sig.ledger.transaction_status.TransactionStatusMeta, + encoding: TransactionEncoding, +) !GetBlock.Response.UiMessage { + switch (encoding) { + .jsonParsed => { + var reserved_account_keys = try ReservedAccounts.initAllActivated(arena); + const account_keys = AccountKeys.init( + message.account_keys, + meta.loaded_addresses, + ); + + var instructions = try arena.alloc( + parse_instruction.UiInstruction, + message.instructions.len, + ); + for (message.instructions, 0..) |ix, i| { + instructions[i] = try parse_instruction.parseUiInstruction( + arena, + .{ + .program_id_index = ix.program_index, + .accounts = ix.account_indexes, + .data = ix.data, + }, + &account_keys, + 1, + ); + } + + var address_table_lookups = try arena.alloc( + GetBlock.Response.AddressTableLookup, + message.address_lookups.len, + ); + for (message.address_lookups, 0..) |lookup, i| { + address_table_lookups[i] = .{ + .accountKey = lookup.table_address, + .writableIndexes = try arena.dupe(u8, lookup.writable_indexes), + .readonlyIndexes = try arena.dupe(u8, lookup.readonly_indexes), + }; + } + + return .{ .parsed = .{ + .account_keys = try parseV0MessageAccounts( + arena, + message, + account_keys, + &reserved_account_keys, + ), + .recent_blockhash = message.recent_blockhash, + .instructions = instructions, + .address_table_lookups = address_table_lookups, + } }; + }, + else => |_| return try jsonEncodeV0TransactionMessage( + arena, + message, + ), + } +} + +/// Parse account keys for a legacy transaction message +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_accounts.rs#L7 +fn parseLegacyMessageAccounts( + arena: Allocator, + message: sig.core.transaction.Message, + reserved_account_keys: *const ReservedAccounts, +) ![]const GetBlock.Response.ParsedAccount { + var accounts = try arena.alloc( + GetBlock.Response.ParsedAccount, + message.account_keys.len, + ); + for (message.account_keys, 0..) |account_key, i| { + accounts[i] = .{ + .pubkey = account_key, + .writable = message.isWritable( + i, + null, + reserved_account_keys, + ), + .signer = message.isSigner(i), + .source = .transaction, + }; + } + return accounts; +} + +/// Parse account keys for a versioned transaction message +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_accounts.rs#L21 +fn parseV0MessageAccounts( + arena: Allocator, + message: sig.core.transaction.Message, + account_keys: AccountKeys, + reserved_account_keys: *const ReservedAccounts, +) ![]const GetBlock.Response.ParsedAccount { + const loaded_addresses: LoadedAddresses = account_keys.dynamic_keys orelse .{ + .writable = &.{}, + .readonly = &.{}, + }; + const total_len = account_keys.len(); + var accounts = try arena.alloc(GetBlock.Response.ParsedAccount, total_len); + + for (0..total_len) |i| { + const account_key = account_keys.get(i).?; + accounts[i] = .{ + .pubkey = account_key, + .writable = message.isWritable(i, .{ + .writable = loaded_addresses.writable, + .readonly = loaded_addresses.readonly, + }, reserved_account_keys), + .signer = message.isSigner(i), + .source = if (i < message.account_keys.len) .transaction else .lookupTable, + }; + } + return accounts; +} + +/// Parse transaction and its metadata into the UiTransactionStatusMeta format for the jsonParsed encoding +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L200 +fn parseUiTransactionStatusMeta( + arena: Allocator, + meta: sig.ledger.transaction_status.TransactionStatusMeta, + static_keys: []const Pubkey, + show_rewards: bool, +) !GetBlock.Response.UiTransactionStatusMeta { + const account_keys = AccountKeys.init( + static_keys, + meta.loaded_addresses, + ); + + // Build status field + const status: GetBlock.Response.UiTransactionResultStatus = if (meta.status) |err| + .{ .Ok = null, .Err = err } + else + .{ .Ok = .{}, .Err = null }; + + // Convert inner instructions + const inner_instructions: []const parse_instruction.UiInnerInstructions = blk: { + if (meta.inner_instructions) |iis| { + var inner_instructions = try arena.alloc( + parse_instruction.UiInnerInstructions, + iis.len, + ); + for (iis, 0..) |ii, i| { + inner_instructions[i] = try parse_instruction.parseUiInnerInstructions( + arena, + ii, + &account_keys, + ); + } + break :blk inner_instructions; + } else break :blk &.{}; + }; + + // Convert token balances + const pre_token_balances = if (meta.pre_token_balances) |balances| + try convertTokenBalances(arena, balances) + else + &.{}; + + const post_token_balances = if (meta.post_token_balances) |balances| + try convertTokenBalances(arena, balances) + else + &.{}; + + // Convert return data + const return_data = if (meta.return_data) |rd| + try convertReturnData(arena, rd) + else + null; + + // Duplicate log messages (original memory will be freed with block.deinit) + const log_messages: []const []const u8 = if (meta.log_messages) |logs| blk: { + const duped = try arena.alloc([]const u8, logs.len); + for (logs, 0..) |log, i| { + duped[i] = try arena.dupe(u8, log); + } + break :blk duped; + } else &.{}; + + const rewards = if (show_rewards) try convertRewards( + arena, + meta.rewards, + ) else &.{}; + + return .{ + .err = meta.status, + .status = status, + .fee = meta.fee, + .preBalances = try arena.dupe(u64, meta.pre_balances), + .postBalances = try arena.dupe(u64, meta.post_balances), + .innerInstructions = .{ .value = inner_instructions }, + .logMessages = .{ .value = log_messages }, + .preTokenBalances = .{ .value = pre_token_balances }, + .postTokenBalances = .{ .value = post_token_balances }, + .rewards = .{ .value = rewards }, + .loadedAddresses = .skip, + .returnData = if (return_data) |rd| .{ .value = rd } else .skip, + .computeUnitsConsumed = if (meta.compute_units_consumed) |cuc| .{ + .value = cuc, + } else .skip, + .costUnits = if (meta.cost_units) |cu| .{ .value = cu } else .skip, + }; +} + +/// Encode a transaction for transactionDetails=accounts +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L477 +fn buildJsonAccounts( + arena: Allocator, + tx_with_meta: sig.ledger.Reader.TransactionWithStatusMeta, + max_supported_version: ?u8, + show_rewards: bool, +) !GetBlock.Response.EncodedTransactionWithStatusMeta { + switch (tx_with_meta) { + .missing_metadata => |tx| return .{ + .version = null, + .transaction = try buildTransactionJsonAccounts( + arena, + tx, + ), + .meta = null, + }, + .complete => |vtx| return try buildJsonAccountsWithMeta( + arena, + vtx, + max_supported_version, + show_rewards, + ), + } +} + +/// Parse json accounts for a transaction without metadata +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L733 +fn buildTransactionJsonAccounts( + arena: Allocator, + transaction: sig.core.Transaction, +) !GetBlock.Response.EncodedTransaction { + var reserved_account_keys = try ReservedAccounts.initAllActivated(arena); + return .{ .accounts = .{ + .signatures = try arena.dupe(Signature, transaction.signatures), + .accountKeys = try parseLegacyMessageAccounts( + arena, + transaction.msg, + &reserved_account_keys, + ), + } }; +} + +/// Parse json accounts for a versioned transaction with metadata +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L555 +fn buildJsonAccountsWithMeta( + arena: Allocator, + tx_with_meta: sig.ledger.Reader.VersionedTransactionWithStatusMeta, + max_supported_version: ?u8, + show_rewards: bool, +) !GetBlock.Response.EncodedTransactionWithStatusMeta { + const version = try validateVersion( + tx_with_meta.transaction.version, + max_supported_version, + ); + const reserved_account_keys = try ReservedAccounts.initAllActivated( + arena, + ); + + const account_keys = switch (tx_with_meta.transaction.version) { + .legacy => try parseLegacyMessageAccounts( + arena, + tx_with_meta.transaction.msg, + &reserved_account_keys, + ), + .v0 => try parseV0MessageAccounts( + arena, + tx_with_meta.transaction.msg, + AccountKeys.init( + tx_with_meta.transaction.msg.account_keys, + tx_with_meta.meta.loaded_addresses, + ), + &reserved_account_keys, + ), + }; + + return .{ + .transaction = .{ .accounts = .{ + .signatures = try arena.dupe(Signature, tx_with_meta.transaction.signatures), + .accountKeys = account_keys, + } }, + .meta = try buildSimpleUiTransactionStatusMeta( + arena, + tx_with_meta.meta, + show_rewards, + ), + .version = version, + }; +} + +/// Build a simplified UiTransactionStatusMeta with only the fields required for transactionDetails=accounts +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L168 +fn buildSimpleUiTransactionStatusMeta( + arena: Allocator, + meta: sig.ledger.transaction_status.TransactionStatusMeta, + show_rewards: bool, +) !GetBlock.Response.UiTransactionStatusMeta { + return .{ + .err = meta.status, + .status = if (meta.status) |err| + .{ .Ok = null, .Err = err } + else + .{ .Ok = .{}, .Err = null }, + .fee = meta.fee, + .preBalances = try arena.dupe(u64, meta.pre_balances), + .postBalances = try arena.dupe(u64, meta.post_balances), + .innerInstructions = .skip, + .logMessages = .skip, + .preTokenBalances = .{ .value = if (meta.pre_token_balances) |balances| + try LedgerHookContext.convertTokenBalances(arena, balances) + else + &.{} }, + .postTokenBalances = .{ .value = if (meta.post_token_balances) |balances| + try LedgerHookContext.convertTokenBalances(arena, balances) + else + &.{} }, + .rewards = if (show_rewards) rewards: { + if (meta.rewards) |rewards| { + const converted = try arena.alloc(GetBlock.Response.UiReward, rewards.len); + for (rewards, 0..) |reward, i| { + converted[i] = try GetBlock.Response.UiReward.fromLedgerReward(reward); + } + break :rewards .{ .value = converted }; + } else break :rewards .{ .value = &.{} }; + } else .skip, + .loadedAddresses = .skip, + .returnData = .skip, + .computeUnitsConsumed = .skip, + .costUnits = .skip, + }; +} + +/// Convert inner instructions to wire format. +fn convertInnerInstructions( + arena: Allocator, + inner_instructions: []const sig.ledger.transaction_status.InnerInstructions, +) ![]const parse_instruction.UiInnerInstructions { + const result = try arena.alloc( + parse_instruction.UiInnerInstructions, + inner_instructions.len, + ); + + for (inner_instructions, 0..) |ii, i| { + const instructions = try arena.alloc( + parse_instruction.UiInstruction, + ii.instructions.len, + ); + + for (ii.instructions, 0..) |inner_ix, j| { + const data_str = blk: { + var ret = try arena.alloc( + u8, + base58.encodedMaxSize(inner_ix.instruction.data.len), + ); + break :blk ret[0..base58.Table.BITCOIN.encode( + ret, + inner_ix.instruction.data, + )]; + }; + + instructions[j] = .{ .compiled = .{ + .programIdIndex = inner_ix.instruction.program_id_index, + .accounts = try arena.dupe(u8, inner_ix.instruction.accounts), + .data = data_str, + .stackHeight = inner_ix.stack_height, + } }; + } + + result[i] = .{ + .index = ii.index, + .instructions = instructions, + }; + } + + return result; +} + +/// Convert token balances to wire format. +fn convertTokenBalances( + arena: Allocator, + balances: []const sig.ledger.transaction_status.TransactionTokenBalance, +) ![]const GetBlock.Response.UiTransactionTokenBalance { + const result = try arena.alloc( + GetBlock.Response.UiTransactionTokenBalance, + balances.len, + ); + + for (balances, 0..) |b, i| { + result[i] = .{ + .accountIndex = b.account_index, + .mint = b.mint, + .owner = b.owner, + .programId = b.program_id, + .uiTokenAmount = .{ + .amount = try arena.dupe(u8, b.ui_token_amount.amount), + .decimals = b.ui_token_amount.decimals, + .uiAmount = b.ui_token_amount.ui_amount, + .uiAmountString = try arena.dupe(u8, b.ui_token_amount.ui_amount_string), + }, + }; + } + + return result; +} + +/// Convert loaded addresses to wire format. +fn convertLoadedAddresses( + arena: Allocator, + loaded: LoadedAddresses, +) !GetBlock.Response.UiLoadedAddresses { + return .{ + .writable = try arena.dupe(Pubkey, loaded.writable), + .readonly = try arena.dupe(Pubkey, loaded.readonly), + }; +} + +/// Convert return data to wire format. +fn convertReturnData( + arena: Allocator, + return_data: sig.ledger.transaction_status.TransactionReturnData, +) !GetBlock.Response.UiTransactionReturnData { + // Base64 encode the return data + const encoded_len = std.base64.standard.Encoder.calcSize(return_data.data.len); + const base64_data = try arena.alloc(u8, encoded_len); + _ = std.base64.standard.Encoder.encode(base64_data, return_data.data); + + return .{ + .programId = return_data.program_id, + .data = .{ base64_data, .base64 }, + }; +} + +/// Convert internal reward format to RPC response format. +fn convertRewards( + arena: Allocator, + internal_rewards: ?[]const sig.ledger.meta.Reward, +) ![]const GetBlock.Response.UiReward { + if (internal_rewards == null) return &.{}; + const rewards_value = internal_rewards orelse return &.{}; + const rewards = try arena.alloc(GetBlock.Response.UiReward, rewards_value.len); + + for (rewards_value, 0..) |r, i| { + rewards[i] = try GetBlock.Response.UiReward.fromLedgerReward(r); + } + return rewards; +} + +fn convertBlockRewards( + arena: Allocator, + block_rewards: *const sig.replay.rewards.BlockRewards, +) ![]const GetBlock.Response.UiReward { + const items = block_rewards.items(); + const rewards = try arena.alloc(GetBlock.Response.UiReward, items.len); + + for (items, 0..) |r, i| { + rewards[i] = .{ + .pubkey = r.pubkey, + .lamports = r.reward_info.lamports, + .postBalance = r.reward_info.post_balance, + .rewardType = switch (r.reward_info.reward_type) { + .fee => .Fee, + .rent => .Rent, + .staking => .Staking, + .voting => .Voting, + }, + .commission = r.reward_info.commission, + }; + } + return rewards; +} + +test "validateVersion: legacy with max_supported_version" { + const result = try LedgerHookContext.validateVersion(.legacy, 0); + try std.testing.expect(result != null); + try std.testing.expect(result.? == .legacy); +} + +test "validateVersion: v0 with max_supported_version >= 0" { + const result = try LedgerHookContext.validateVersion(.v0, 0); + try std.testing.expect(result != null); + try std.testing.expectEqual(@as(u8, 0), result.?.number); +} + +test "validateVersion: legacy without max_supported_version returns null" { + const result = try LedgerHookContext.validateVersion(.legacy, null); + try std.testing.expect(result == null); +} + +test "validateVersion: v0 without max_supported_version errors" { + const result = LedgerHookContext.validateVersion(.v0, null); + try std.testing.expectError(error.UnsupportedTransactionVersion, result); +} + +test "buildSimpleUiTransactionStatusMeta: basic" { + const arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try LedgerHookContext.buildSimpleUiTransactionStatusMeta(allocator, meta, false); + + // Basic fields + try std.testing.expectEqual(@as(u64, 0), result.fee); + try std.testing.expect(result.err == null); + // innerInstructions and logMessages should be skipped for accounts mode + try std.testing.expect(result.innerInstructions == .skip); + try std.testing.expect(result.logMessages == .skip); + // show_rewards false → skip + try std.testing.expect(result.rewards == .skip); +} + +test "buildSimpleUiTransactionStatusMeta: show_rewards true with empty rewards" { + const arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try LedgerHookContext.buildSimpleUiTransactionStatusMeta(allocator, meta, true); + + // show_rewards true but meta.rewards is null → empty value + try std.testing.expect(result.rewards == .value); +} + +test "encodeLegacyTransactionMessage: json encoding" { + const arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const msg = sig.core.transaction.Message{ + .signature_count = 1, + .readonly_signed_count = 0, + .readonly_unsigned_count = 1, + .account_keys = &.{ Pubkey.ZEROES, Pubkey{ .data = [_]u8{0xFF} ** 32 } }, + .recent_blockhash = sig.core.Hash.ZEROES, + .instructions = &.{}, + .address_lookups = &.{}, + }; + + const result = try LedgerHookContext.encodeLegacyTransactionMessage(allocator, msg, .json); + // Result should be a raw message + const raw = result.raw; + + try std.testing.expectEqual(@as(u8, 1), raw.header.numRequiredSignatures); + try std.testing.expectEqual(@as(u8, 0), raw.header.numReadonlySignedAccounts); + try std.testing.expectEqual(@as(u8, 1), raw.header.numReadonlyUnsignedAccounts); + try std.testing.expectEqual(@as(usize, 2), raw.account_keys.len); + try std.testing.expectEqual(@as(usize, 0), raw.instructions.len); + // Legacy should have no address table lookups + try std.testing.expect(raw.address_table_lookups == null); +} + +test "jsonEncodeV0TransactionMessage: with address lookups" { + const arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const msg = sig.core.transaction.Message{ + .signature_count = 1, + .readonly_signed_count = 0, + .readonly_unsigned_count = 0, + .account_keys = &.{Pubkey.ZEROES}, + .recent_blockhash = sig.core.Hash.ZEROES, + .instructions = &.{}, + .address_lookups = &.{.{ + .table_address = Pubkey{ .data = [_]u8{0xAA} ** 32 }, + .writable_indexes = &[_]u8{ 0, 1 }, + .readonly_indexes = &[_]u8{2}, + }}, + }; + + const result = try LedgerHookContext.jsonEncodeV0TransactionMessage(allocator, msg); + const raw = result.raw; + + try std.testing.expectEqual(@as(usize, 1), raw.account_keys.len); + // V0 should have address table lookups + try std.testing.expect(raw.address_table_lookups != null); + try std.testing.expectEqual(@as(usize, 1), raw.address_table_lookups.?.len); + try std.testing.expectEqualSlices( + u8, + &.{ 0, 1 }, + raw.address_table_lookups.?[0].writableIndexes, + ); + try std.testing.expectEqualSlices(u8, &.{2}, raw.address_table_lookups.?[0].readonlyIndexes); + + // Clean up + arena.free(raw.account_keys); + for (raw.address_table_lookups.?) |atl| { + arena.free(atl.writableIndexes); + arena.free(atl.readonlyIndexes); + } + arena.free(raw.address_table_lookups.?); +} + +test "encodeLegacyTransactionMessage: base64 encoding" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const msg = sig.core.transaction.Message{ + .signature_count = 1, + .readonly_signed_count = 0, + .readonly_unsigned_count = 1, + .account_keys = &.{ Pubkey{ .data = [_]u8{0x11} ** 32 }, Pubkey.ZEROES }, + .recent_blockhash = sig.core.Hash.ZEROES, + .instructions = &.{}, + .address_lookups = &.{}, + }; + + // Non-json encodings fall through to the else branch producing raw messages + const result = try LedgerHookContext.encodeLegacyTransactionMessage(allocator, msg, .base64); + const raw = result.raw; + + try std.testing.expectEqual(@as(u8, 1), raw.header.numRequiredSignatures); + try std.testing.expectEqual(@as(usize, 2), raw.account_keys.len); + try std.testing.expect(raw.address_table_lookups == null); + + arena.free(raw.account_keys); +} + +test "encodeTransactionWithoutMeta: base64 encoding" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const tx = sig.core.Transaction.EMPTY; + + const result = try LedgerHookContext.encodeTransactionWithoutMeta(allocator, tx, .base64); + const binary = result.binary; + + try std.testing.expect(binary[1] == .base64); + // base64 encoded data should be non-empty (even empty tx has some bincode overhead) + try std.testing.expect(binary[0].len > 0); +} + +test "encodeTransactionWithoutMeta: json encoding" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const tx = sig.core.Transaction.EMPTY; + + const result = try LedgerHookContext.encodeTransactionWithoutMeta(allocator, tx, .json); + const json = result.json; + + // Should produce a json result with signatures and message + try std.testing.expectEqual(@as(usize, 0), json.signatures.len); + // Message should be a raw (non-parsed) message for legacy + const raw = json.message.raw; + try std.testing.expectEqual(@as(u8, 0), raw.header.numRequiredSignatures); + try std.testing.expect(raw.address_table_lookups == null); +} + +test "encodeTransactionWithoutMeta: base58 encoding" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const tx = sig.core.Transaction.EMPTY; + + const result = try LedgerHookContext.encodeTransactionWithoutMeta(allocator, tx, .base58); + const binary = result.binary; + + try std.testing.expect(binary[1] == .base58); + try std.testing.expect(binary[0].len > 0); +} + +test "encodeTransactionWithoutMeta: legacy binary encoding" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const tx = sig.core.Transaction.EMPTY; + + const result = try LedgerHookContext.encodeTransactionWithoutMeta(allocator, tx, .binary); + const legacy_binary = result.legacy_binary; + + try std.testing.expect(legacy_binary.len > 0); +} + +test "parseUiTransactionStatusMetaFromLedger: always includes loadedAddresses" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try parseUiTransactionStatusMetaFromLedger( + allocator, + meta, + true, + ); + defer { + arena.free(result.preBalances); + arena.free(result.postBalances); + if (result.loadedAddresses == .value) { + arena.free(result.loadedAddresses.value.writable); + arena.free(result.loadedAddresses.value.readonly); + } + } + // loadedAddresses should always have a value + try std.testing.expect(result.loadedAddresses == .value); +} + +test "parseUiTransactionStatusMetaFromLedger: show_rewards false skips rewards" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try parseUiTransactionStatusMetaFromLedger( + allocator, + meta, + false, + ); + defer { + arena.free(result.preBalances); + arena.free(result.postBalances); + } + // Rewards should be .none (serialized as null) when show_rewards is false + try std.testing.expect(result.rewards == .none); +} + +test "parseUiTransactionStatusMetaFromLedger: show_rewards true includes rewards" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try parseUiTransactionStatusMetaFromLedger( + allocator, + meta, + true, + ); + defer { + arena.free(result.preBalances); + arena.free(result.postBalances); + } + // Rewards should be present (as value) when show_rewards is true + try std.testing.expect(result.rewards != .skip); +} + +test "parseUiTransactionStatusMetaFromLedger: compute_units_consumed present" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + var meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + meta.compute_units_consumed = 42_000; + const result = try parseUiTransactionStatusMetaFromLedger( + allocator, + meta, + false, + ); + try std.testing.expect(result.computeUnitsConsumed == .value); + try std.testing.expectEqual(@as(u64, 42_000), result.computeUnitsConsumed.value); +} + +test "parseUiTransactionStatusMetaFromLedger: compute_units_consumed absent" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer arena.reset(.free_all); + const allocator = arena.allocator(); + + const meta = sig.ledger.transaction_status.TransactionStatusMeta.EMPTY_FOR_TEST; + const result = try parseUiTransactionStatusMetaFromLedger( + allocator, + meta, + false, + ); + try std.testing.expect(result.computeUnitsConsumed == .skip); +} diff --git a/src/rpc/hook_contexts/lib.zig b/src/rpc/hook_contexts/lib.zig new file mode 100644 index 0000000000..4347d266df --- /dev/null +++ b/src/rpc/hook_contexts/lib.zig @@ -0,0 +1 @@ +pub const Ledger = @import("Ledger.zig"); diff --git a/src/rpc/lib.zig b/src/rpc/lib.zig index dbe76ca405..7cfea7245d 100644 --- a/src/rpc/lib.zig +++ b/src/rpc/lib.zig @@ -1,6 +1,8 @@ pub const client = @import("client.zig"); +pub const hook_contexts = @import("hook_contexts/lib.zig"); pub const http = @import("http.zig"); pub const methods = @import("methods.zig"); +pub const parse_instruction = @import("parse_instruction/lib.zig"); pub const request = @import("request.zig"); pub const response = @import("response.zig"); pub const server = @import("server/lib.zig"); diff --git a/src/rpc/methods.zig b/src/rpc/methods.zig index ac392d4378..4ffa90a0bb 100644 --- a/src/rpc/methods.zig +++ b/src/rpc/methods.zig @@ -11,10 +11,13 @@ const std = @import("std"); const sig = @import("../sig.zig"); const rpc = @import("lib.zig"); +const base58 = @import("base58"); +const parse_instruction = @import("parse_instruction/lib.zig"); const Allocator = std.mem.Allocator; const ParseOptions = std.json.ParseOptions; +const Hash = sig.core.Hash; const Pubkey = sig.core.Pubkey; const Signature = sig.core.Signature; const Slot = sig.core.Slot; @@ -302,18 +305,601 @@ pub const GetHealth = struct { }; pub const GetBlock = struct { - config: ?Config = null, + /// The slot to get the block for (first positional argument) + slot: Slot, + encoding_or_config: ?EncodingOrConfig = null, pub const Config = struct { + /// Only `confirmed` and `finalized` are supported. `processed` is rejected. commitment: ?common.Commitment = null, - encoding: ?enum { json, jsonParsed, base58, base64 } = null, - transactionDetails: ?[]const u8 = null, - maxSupportedTransactionVersion: ?u64 = null, + encoding: ?common.TransactionEncoding = null, + transactionDetails: ?common.TransactionDetails = null, + maxSupportedTransactionVersion: ?u8 = null, rewards: ?bool = null, + + pub fn getCommitment(self: Config) common.Commitment { + return self.commitment orelse Commitment.finalized; + } + + pub fn getEncoding(self: Config) common.TransactionEncoding { + return self.encoding orelse common.TransactionEncoding.json; + } + + pub fn getTransactionDetails(self: Config) common.TransactionDetails { + return self.transactionDetails orelse common.TransactionDetails.full; + } + + pub fn getMaxSupportedTransactionVersion(self: Config) u8 { + return self.maxSupportedTransactionVersion orelse 0; + } + + pub fn getRewards(self: Config) bool { + return self.rewards orelse true; + } + }; + + /// RPC spec allows either a config or just an encoding + /// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/rpc-client-types/src/config.rs#L233 + pub const EncodingOrConfig = union(enum) { + encoding: common.TransactionEncoding, + config: Config, + + pub fn jsonParseFromValue( + allocator: std.mem.Allocator, + source: std.json.Value, + options: std.json.ParseOptions, + ) std.json.ParseFromValueError!EncodingOrConfig { + return switch (source) { + .string => |s| .{ + .encoding = std.meta.stringToEnum(common.TransactionEncoding, s) orelse + return error.InvalidEnumTag, + }, + .object => .{ .config = try std.json.innerParseFromValue( + Config, + allocator, + source, + options, + ) }, + else => error.UnexpectedToken, + }; + } + + pub fn jsonStringify(self: EncodingOrConfig, jw: anytype) !void { + switch (self) { + .encoding => |enc| try jw.write(@tagName(enc)), + .config => |c| try jw.write(c), + } + } }; - // TODO: response - pub const Response = noreturn; + pub fn resolveConfig(self: GetBlock) Config { + const eoc = self.encoding_or_config orelse return Config{}; + return switch (eoc) { + .encoding => |enc| Config{ + .encoding = enc, + }, + .config => |c| c, + }; + } + + /// Response for getBlock RPC method (UiConfirmedBlock equivalent) + pub const Response = struct { + /// The blockhash of the previous block + previousBlockhash: Hash, + /// The blockhash of this block + blockhash: Hash, + /// The slot of the parent block + parentSlot: u64, + /// Transactions in the block (present when transactionDetails is full or accounts) + transactions: ?[]const EncodedTransactionWithStatusMeta = null, + /// Transaction signatures (present when transactionDetails is signatures) + signatures: ?[]const Signature = null, + /// Block rewards (present when rewards=true, which is the default) + rewards: ?[]const UiReward = null, + /// Number of reward partitions (if applicable) + numRewardPartitions: ?u64 = null, + /// Estimated production time as Unix timestamp (seconds since epoch) + blockTime: ?i64 = null, + /// Block height + blockHeight: ?u64 = null, + + pub fn jsonStringify(self: Response, jw: anytype) !void { + try jw.beginObject(); + if (self.blockHeight) |h| { + try jw.objectField("blockHeight"); + try jw.write(h); + } + if (self.blockTime) |t| { + try jw.objectField("blockTime"); + try jw.write(t); + } + try jw.objectField("blockhash"); + try jw.write(self.blockhash); + try jw.objectField("parentSlot"); + try jw.write(self.parentSlot); + try jw.objectField("previousBlockhash"); + try jw.write(self.previousBlockhash); + if (self.rewards) |r| { + try jw.objectField("rewards"); + try jw.write(r); + } + if (self.transactions) |txs| { + try jw.objectField("transactions"); + try jw.write(txs); + } + if (self.signatures) |sigs| { + try jw.objectField("signatures"); + try jw.write(sigs); + } + try jw.endObject(); + } + + /// Write a `[]const u8` as a JSON array of integers instead of a string. + /// Zig's JSON writer treats `[]const u8` as a string, but Agave's serde + /// serializes `Vec` as an array of integers (e.g. `[0, 1, 4]`). + fn writeU8SliceAsIntArray(slice: []const u8, jw: anytype) !void { + try jw.beginArray(); + for (slice) |byte| { + try jw.write(byte); + } + try jw.endArray(); + } + + /// Encoded transaction with status metadata for RPC response. + pub const EncodedTransactionWithStatusMeta = struct { + /// The transaction - either base64 encoded binary or JSON structure + transaction: EncodedTransaction, + /// Transaction status metadata + meta: ?UiTransactionStatusMeta = null, + /// Transaction version ("legacy" or version number) + version: ?TransactionVersion = null, + + pub const TransactionVersion = union(enum) { + legacy, + number: u8, + + pub fn jsonStringify(self: TransactionVersion, jw: anytype) !void { + switch (self) { + .legacy => try jw.write("legacy"), + .number => |n| try jw.write(n), + } + } + }; + + pub fn jsonStringify(self: EncodedTransactionWithStatusMeta, jw: anytype) !void { + try jw.beginObject(); + if (self.meta) |m| { + try jw.objectField("meta"); + try jw.write(m); + } + try jw.objectField("transaction"); + try jw.write(self.transaction); + if (self.version) |v| { + try jw.objectField("version"); + try v.jsonStringify(jw); + } + try jw.endObject(); + } + }; + + /// Encoded transaction - can be either base64/base58 binary or JSON structure. + /// For base64/base58: serializes as [data, encoding] array + /// For JSON: serializes as object with signatures and message + pub const EncodedTransaction = union(enum) { + legacy_binary: []const u8, + /// Binary encoding: [base64_data, "base64"] or [base58_data, "base58"] + binary: struct { + []const u8, + enum { base58, base64 }, + }, + /// JSON encoding: object with signatures and message + json: struct { + signatures: []const Signature, + message: UiMessage, + }, + accounts: struct { + signatures: []const Signature, + accountKeys: []const ParsedAccount, + }, + + pub fn jsonStringify(self: EncodedTransaction, jw: anytype) !void { + switch (self) { + .legacy_binary => |b| try jw.write(b), + .binary => |b| try jw.write(b), + .json => |j| try jw.write(j), + .accounts => |a| try jw.write(a), + } + } + }; + + pub const UiMessage = union(enum) { + parsed: UiParsedMessage, + raw: UiRawMessage, + + pub fn jsonStringify(self: UiMessage, jw: anytype) !void { + switch (self) { + .parsed => |p| try jw.write(p), + .raw => |r| try jw.write(r), + } + } + }; + + pub const UiParsedMessage = struct { + account_keys: []const ParsedAccount, + recent_blockhash: Hash, + instructions: []const parse_instruction.UiInstruction, + address_table_lookups: ?[]const AddressTableLookup = null, + + pub fn jsonStringify(self: UiParsedMessage, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accountKeys"); + try jw.write(self.account_keys); + try jw.objectField("recentBlockhash"); + try jw.write(self.recent_blockhash); + try jw.objectField("instructions"); + try jw.write(self.instructions); + if (self.address_table_lookups) |atl| { + try jw.objectField("addressTableLookups"); + try jw.write(atl); + } + try jw.endObject(); + } + }; + + pub const MessageHeader = struct { + numRequiredSignatures: u8, + numReadonlySignedAccounts: u8, + numReadonlyUnsignedAccounts: u8, + }; + + pub const UiRawMessage = struct { + header: MessageHeader, + account_keys: []const Pubkey, + recent_blockhash: Hash, + instructions: []const parse_instruction.UiCompiledInstruction, + address_table_lookups: ?[]const AddressTableLookup = null, + + pub fn jsonStringify(self: UiRawMessage, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accountKeys"); + try jw.write(self.account_keys); + try jw.objectField("header"); + try jw.write(self.header); + try jw.objectField("recentBlockhash"); + try jw.write(self.recent_blockhash); + try jw.objectField("instructions"); + try jw.write(self.instructions); + if (self.address_table_lookups) |atl| { + try jw.objectField("addressTableLookups"); + try jw.write(atl); + } + try jw.endObject(); + } + }; + + /// JSON-encoded message + pub const EncodedMessage = struct { + accountKeys: []const Pubkey, + header: MessageHeader, + recentBlockhash: Hash, + instructions: []const EncodedInstruction, + addressTableLookups: ?[]const AddressTableLookup = null, + + pub fn jsonStringify(self: EncodedMessage, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accountKeys"); + try jw.write(self.accountKeys); + try jw.objectField("header"); + try jw.write(self.header); + try jw.objectField("recentBlockhash"); + try jw.write(self.recentBlockhash); + try jw.objectField("instructions"); + try jw.write(self.instructions); + if (self.addressTableLookups) |atl| { + try jw.objectField("addressTableLookups"); + try jw.write(atl); + } + try jw.endObject(); + } + }; + + pub const EncodedInstruction = struct { + programIdIndex: u8, + accounts: []const u8, + data: []const u8, + stackHeight: ?u32 = null, + + pub fn jsonStringify(self: EncodedInstruction, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("programIdIndex"); + try jw.write(self.programIdIndex); + try jw.objectField("accounts"); + try writeU8SliceAsIntArray(self.accounts, jw); + try jw.objectField("data"); + try jw.write(self.data); + if (self.stackHeight) |sh| { + try jw.objectField("stackHeight"); + try jw.write(sh); + } + try jw.endObject(); + } + }; + + pub const AddressTableLookup = struct { + accountKey: Pubkey, + writableIndexes: []const u8, + readonlyIndexes: []const u8, + + pub fn jsonStringify(self: AddressTableLookup, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accountKey"); + try jw.write(self.accountKey); + try jw.objectField("readonlyIndexes"); + try writeU8SliceAsIntArray(self.readonlyIndexes, jw); + try jw.objectField("writableIndexes"); + try writeU8SliceAsIntArray(self.writableIndexes, jw); + try jw.endObject(); + } + }; + + /// Account key with metadata (for jsonParsed and accounts modes) + pub const ParsedAccount = struct { + pubkey: Pubkey, + writable: bool, + signer: bool, + source: ParsedAccountSource, + + pub fn jsonStringify(self: ParsedAccount, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("pubkey"); + try jw.write(self.pubkey); + try jw.objectField("signer"); + try jw.write(self.signer); + try jw.objectField("source"); + try jw.write(@tagName(self.source)); + try jw.objectField("writable"); + try jw.write(self.writable); + try jw.endObject(); + } + }; + + pub const ParsedAccountSource = enum { + transaction, + lookupTable, + }; + + /// UI representation of transaction status metadata + pub const UiTransactionStatusMeta = struct { + err: ?sig.ledger.transaction_status.TransactionError = null, + status: UiTransactionResultStatus, + fee: u64, + preBalances: []const u64, + postBalances: []const u64, + innerInstructions: JsonSkippable([]const parse_instruction.UiInnerInstructions) = .{ + .value = &.{}, + }, + logMessages: JsonSkippable([]const []const u8) = .{ + .value = &.{}, + }, + preTokenBalances: JsonSkippable([]const UiTransactionTokenBalance) = .{ + .value = &.{}, + }, + postTokenBalances: JsonSkippable([]const UiTransactionTokenBalance) = .{ + .value = &.{}, + }, + rewards: JsonSkippable([]const UiReward) = .{ .value = &.{} }, + loadedAddresses: JsonSkippable(UiLoadedAddresses) = .skip, + returnData: JsonSkippable(UiTransactionReturnData) = .skip, + computeUnitsConsumed: JsonSkippable(u64) = .skip, + costUnits: JsonSkippable(u64) = .skip, + + pub fn jsonStringify(self: UiTransactionStatusMeta, jw: anytype) !void { + try jw.beginObject(); + if (self.computeUnitsConsumed != .skip) { + try jw.objectField("computeUnitsConsumed"); + try jw.write(self.computeUnitsConsumed); + } + if (self.costUnits != .skip) { + try jw.objectField("costUnits"); + try jw.write(self.costUnits); + } + try jw.objectField("err"); + try jw.write(self.err); + try jw.objectField("fee"); + try jw.write(self.fee); + if (self.innerInstructions != .skip) { + try jw.objectField("innerInstructions"); + try jw.write(self.innerInstructions); + } + if (self.loadedAddresses != .skip) { + try jw.objectField("loadedAddresses"); + try jw.write(self.loadedAddresses); + } + if (self.logMessages != .skip) { + try jw.objectField("logMessages"); + try jw.write(self.logMessages); + } + try jw.objectField("postBalances"); + try jw.write(self.postBalances); + try jw.objectField("postTokenBalances"); + try jw.write(self.postTokenBalances); + try jw.objectField("preBalances"); + try jw.write(self.preBalances); + try jw.objectField("preTokenBalances"); + try jw.write(self.preTokenBalances); + if (self.returnData != .skip) { + try jw.objectField("returnData"); + try jw.write(self.returnData); + } + if (self.rewards != .skip) { + try jw.objectField("rewards"); + try jw.write(self.rewards); + } + try jw.objectField("status"); + try jw.write(self.status); + try jw.endObject(); + } + }; + + /// Transaction result status for RPC compatibility. + /// Serializes as `{"Ok": null}` on success or `{"Err": }` on failure. + pub const UiTransactionResultStatus = struct { + Ok: ?struct {} = null, + Err: ?sig.ledger.transaction_status.TransactionError = null, + + pub fn jsonStringify(self: UiTransactionResultStatus, jw: anytype) !void { + try jw.beginObject(); + if (self.Err) |err| { + try jw.objectField("Err"); + try jw.write(err); + } else { + try jw.objectField("Ok"); + try jw.write(null); + } + try jw.endObject(); + } + }; + + /// Token balance for RPC response (placeholder) + pub const UiTransactionTokenBalance = struct { + accountIndex: u8, + mint: Pubkey, + owner: ?Pubkey = null, + programId: ?Pubkey = null, + uiTokenAmount: UiTokenAmount, + + pub fn jsonStringify(self: UiTransactionTokenBalance, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accountIndex"); + try jw.write(self.accountIndex); + try jw.objectField("mint"); + try jw.write(self.mint); + if (self.owner) |o| { + try jw.objectField("owner"); + try jw.write(o); + } + if (self.programId) |p| { + try jw.objectField("programId"); + try jw.write(p); + } + try jw.objectField("uiTokenAmount"); + try jw.write(self.uiTokenAmount); + try jw.endObject(); + } + }; + + pub const UiTokenAmount = struct { + amount: []const u8, + decimals: u8, + uiAmount: ?f64 = null, + uiAmountString: []const u8, + + pub fn jsonStringify(self: UiTokenAmount, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("amount"); + try jw.write(self.amount); + try jw.objectField("decimals"); + try jw.write(self.decimals); + if (self.uiAmount) |ua| { + try jw.objectField("uiAmount"); + try writeExactFloat(jw, ua); + } + try jw.objectField("uiAmountString"); + try jw.write(self.uiAmountString); + try jw.endObject(); + } + + /// Write an f64 as a JSON number matching Rust's serde_json output. + /// Zig's std.json serializes 3.0 as "3e0", but serde serializes it as "3.0". + fn writeExactFloat(jw: anytype, value: f64) !void { + var buf: [64]u8 = undefined; + const result = std.fmt.bufPrint(&buf, "{d}", .{value}) catch unreachable; + if (std.mem.indexOf(u8, result, ".") == null) { + try jw.print("{s}.0", .{result}); + } else { + try jw.print("{s}", .{result}); + } + } + }; + + pub const UiLoadedAddresses = struct { + readonly: []const Pubkey, + writable: []const Pubkey, + }; + + pub const UiTransactionReturnData = struct { + programId: Pubkey, + data: struct { []const u8, enum { base64 } }, + + pub fn jsonStringify(self: UiTransactionReturnData, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("programId"); + try jw.write(self.programId); + try jw.objectField("data"); + try jw.beginArray(); + try jw.write(self.data.@"0"); + try jw.write(@tagName(self.data.@"1")); + try jw.endArray(); + try jw.endObject(); + } + }; + + pub const UiReward = struct { + /// The public key of the account that received the reward (base-58 encoded) + pubkey: Pubkey, + /// Number of lamports credited or debited + lamports: i64, + /// Account balance in lamports after the reward was applied + postBalance: u64, + /// Type of reward + rewardType: ?RewardType = null, + /// Vote account commission when reward was credited (for voting/staking rewards) + commission: ?u8 = null, + + pub const RewardType = enum { + Fee, + Rent, + Staking, + Voting, + + pub fn jsonStringify(self: RewardType, jw: anytype) !void { + try jw.write(@tagName(self)); + } + }; + + pub fn jsonStringify(self: UiReward, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("pubkey"); + try jw.write(self.pubkey); + try jw.objectField("lamports"); + try jw.write(self.lamports); + try jw.objectField("postBalance"); + try jw.write(self.postBalance); + try jw.objectField("rewardType"); + try jw.write(self.rewardType); + try jw.objectField("commission"); + try jw.write(self.commission); + try jw.endObject(); + } + + pub fn fromLedgerReward( + reward: sig.ledger.meta.Reward, + ) !UiReward { + return .{ + .pubkey = reward.pubkey, + .lamports = reward.lamports, + .postBalance = reward.post_balance, + .rewardType = if (reward.reward_type) |rt| switch (rt) { + .fee => RewardType.Fee, + .rent => RewardType.Rent, + .staking => RewardType.Staking, + .voting => RewardType.Voting, + } else null, + .commission = reward.commission, + }; + } + }; + }; }; pub const GetBlockCommitment = struct { @@ -714,6 +1300,21 @@ pub const common = struct { /// Shred version shredVersion: ?u16 = null, }; + + pub const TransactionEncoding = enum { + binary, + base58, + base64, + json, + jsonParsed, + }; + + pub const TransactionDetails = enum { + full, + accounts, + signatures, + none, + }; }; pub const RpcHookContext = struct { @@ -895,10 +1496,26 @@ pub const StaticHookContext = struct { genesis_hash: sig.core.Hash, pub fn getGenesisHash( - self: *const @This(), + self: *const StaticHookContext, _: std.mem.Allocator, _: GetGenesisHash, ) !GetGenesisHash.Response { return .{ .hash = self.genesis_hash }; } }; + +fn JsonSkippable(comptime T: type) type { + return union(enum) { + value: T, + none, + skip, + + pub fn jsonStringify(self: JsonSkippable(T), jw: anytype) !void { + switch (self) { + .value => |v| try jw.write(v), + .none => try jw.write(null), + .skip => {}, + } + } + }; +} diff --git a/src/rpc/parse_instruction/AccountKeys.zig b/src/rpc/parse_instruction/AccountKeys.zig new file mode 100644 index 0000000000..f1c515b4d9 --- /dev/null +++ b/src/rpc/parse_instruction/AccountKeys.zig @@ -0,0 +1,120 @@ +const sig = @import("../../sig.zig"); + +const Pubkey = sig.core.Pubkey; + +const AccountKeys = @This(); + +static_keys: []const Pubkey, +dynamic_keys: ?sig.ledger.transaction_status.LoadedAddresses, + +pub fn init( + static_keys: []const Pubkey, + dynamic_keys: ?sig.ledger.transaction_status.LoadedAddresses, +) AccountKeys { + return .{ + .static_keys = static_keys, + .dynamic_keys = dynamic_keys, + }; +} + +pub fn keySegmentIter(self: *const AccountKeys) [3][]const Pubkey { + if (self.dynamic_keys) |dynamic_keys| { + return .{ + self.static_keys, + dynamic_keys.writable, + dynamic_keys.readonly, + }; + } else { + return .{ self.static_keys, &.{}, &.{} }; + } +} + +pub fn get(self: *const AccountKeys, index: usize) ?Pubkey { + var index_tracker = index; + for (self.keySegmentIter()) |key_segment| { + if (index_tracker < key_segment.len) { + return key_segment[index_tracker]; + } + index_tracker = index_tracker -| key_segment.len; + } + return null; +} + +pub fn len(self: *const AccountKeys) usize { + var ret: usize = 0; + for (self.keySegmentIter()) |key_segment| { + ret = ret +| key_segment.len; + } + return ret; +} + +pub fn isEmpty(self: *const AccountKeys) bool { + return self.len() == 0; +} + +const testing = @import("std").testing; + +test "static keys only" { + const key0 = Pubkey{ .data = [_]u8{1} ** 32 }; + const key1 = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ key0, key1 }; + + const ak = AccountKeys.init(&static_keys, null); + try testing.expectEqual(@as(usize, 2), ak.len()); + try testing.expect(!ak.isEmpty()); + try testing.expectEqual(key0, ak.get(0).?); + try testing.expectEqual(key1, ak.get(1).?); + try testing.expectEqual(@as(?Pubkey, null), ak.get(2)); +} + +test "with dynamic keys" { + const key0 = Pubkey{ .data = [_]u8{1} ** 32 }; + const writable_key = Pubkey{ .data = [_]u8{3} ** 32 }; + const readonly_key = Pubkey{ .data = [_]u8{4} ** 32 }; + const static_keys = [_]Pubkey{key0}; + const writable = [_]Pubkey{writable_key}; + const readonly = [_]Pubkey{readonly_key}; + + const ak = AccountKeys.init(&static_keys, .{ + .writable = &writable, + .readonly = &readonly, + }); + try testing.expectEqual(@as(usize, 3), ak.len()); + try testing.expectEqual(key0, ak.get(0).?); // static + try testing.expectEqual(writable_key, ak.get(1).?); // writable dynamic + try testing.expectEqual(readonly_key, ak.get(2).?); // readonly dynamic + try testing.expectEqual(@as(?Pubkey, null), ak.get(3)); // out of bounds +} + +test "empty" { + const ak = AccountKeys.init(&.{}, null); + try testing.expectEqual(@as(usize, 0), ak.len()); + try testing.expect(ak.isEmpty()); + try testing.expectEqual(@as(?Pubkey, null), ak.get(0)); +} + +test "keySegmentIter without dynamic" { + const key0 = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{key0}; + const ak = AccountKeys.init(&static_keys, null); + + const segments = ak.keySegmentIter(); + try testing.expectEqual(@as(usize, 1), segments[0].len); + try testing.expectEqual(@as(usize, 0), segments[1].len); + try testing.expectEqual(@as(usize, 0), segments[2].len); +} + +test "keySegmentIter with dynamic" { + const static_keys = [_]Pubkey{Pubkey.ZEROES}; + const writable = [_]Pubkey{ Pubkey{ .data = [_]u8{1} ** 32 }, Pubkey{ .data = [_]u8{2} ** 32 } }; + const readonly = [_]Pubkey{Pubkey{ .data = [_]u8{3} ** 32 }}; + + const ak = AccountKeys.init(&static_keys, .{ + .writable = &writable, + .readonly = &readonly, + }); + const segments = ak.keySegmentIter(); + try testing.expectEqual(@as(usize, 1), segments[0].len); + try testing.expectEqual(@as(usize, 2), segments[1].len); + try testing.expectEqual(@as(usize, 1), segments[2].len); +} diff --git a/src/rpc/parse_instruction/lib.zig b/src/rpc/parse_instruction/lib.zig new file mode 100644 index 0000000000..fc94caf18d --- /dev/null +++ b/src/rpc/parse_instruction/lib.zig @@ -0,0 +1,6267 @@ +//! Instruction parsers for jsonParsed encoding mode. +//! +//! Parses compiled instructions from known programs (vote, system, spl-memo) +//! into structured JSON representations matching Agave's output format. +//! Unknown programs fall back to partially decoded representation. + +const std = @import("std"); +const sig = @import("../../sig.zig"); +const base58 = @import("base58"); +pub const AccountKeys = @import("AccountKeys.zig"); + +const Allocator = std.mem.Allocator; +const JsonValue = std.json.Value; +const ObjectMap = std.json.ObjectMap; + +const AddressLookupTableInstruction = sig.runtime.program.address_lookup_table.Instruction; +const BpfUpgradeableLoaderInstruction = sig.runtime.program.bpf_loader.v3.Instruction; +const Hash = sig.core.Hash; +const Pubkey = sig.core.Pubkey; +const StakeAuthorize = sig.runtime.program.stake.state.StakeStateV2.StakeAuthorize; +const StakeInstruction = sig.runtime.program.stake.Instruction; +const StakeLockupArgs = sig.runtime.program.stake.LockupArgs; +const SystemInstruction = sig.runtime.program.system.Instruction; + +/// SPL Associated Token Account program ID +const SPL_ASSOCIATED_TOKEN_ACC_ID: Pubkey = .parse("ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL"); + +/// SPL Memo v1 program ID +const SPL_MEMO_V1_ID: Pubkey = .parse("Memo1UhkJRfHyvLMcVucJwxXeuD728EqVDDwQDxFMNo"); +/// SPL Memo v3 program ID +const SPL_MEMO_V3_ID: Pubkey = .parse("MemoSq4gqABAXKb96qnH8TysNcWxMyWCqXgDLGmfcHr"); + +/// BPF Loader v2 instruction enum (bincode serialized u32) +const BpfLoaderInstruction = union(enum(u32)) { + /// Write program data into a Buffer account. + /// # Account references + /// 0. `[writable]` Account to write to + write: struct { + offset: u32, + bytes: []const u8, + }, + /// Finalize a program (make it executable) + /// # Account references + /// 0. `[writable, signer]` The program account + /// 1. `[]` Rent sysvar + finalize, +}; + +/// Associated Token Account instruction enum (borsh serialized u8) +const AssociatedTokenAccountInstruction = enum(u8) { + /// Create an associated token account for the given wallet address and token mint. + /// Accounts: + /// 0. `[writeable, signer]` Funding account + /// 1. `[writeable]` Associated token account address + /// 2. `[]` Wallet address for the account + /// 3. `[]` The token mint + /// 4. `[]` System program + /// 5. `[]` SPL Token program + create = 0, + /// Create an associated token account for the given wallet address and token mint, + /// if it doesn't already exist. + create_idempotent = 1, + /// Recover nested associated token account. + recover_nested = 2, +}; + +pub const ParsableProgram = enum { + addressLookupTable, + splAssociatedTokenAccount, + splMemo, + splToken, + bpfLoader, + bpfUpgradeableLoader, + stake, + system, + vote, + + pub const PARSABLE_PROGRAMS = [_]struct { Pubkey, ParsableProgram }{ + .{ + sig.runtime.program.address_lookup_table.ID, + .addressLookupTable, + }, + .{ + SPL_ASSOCIATED_TOKEN_ACC_ID, + .splAssociatedTokenAccount, + }, + .{ SPL_MEMO_V1_ID, .splMemo }, + .{ SPL_MEMO_V3_ID, .splMemo }, + .{ sig.runtime.program.bpf_loader.v2.ID, .bpfLoader }, + .{ sig.runtime.program.bpf_loader.v3.ID, .bpfUpgradeableLoader }, + .{ sig.runtime.program.stake.ID, .stake }, + .{ sig.runtime.program.system.ID, .system }, + .{ sig.runtime.program.vote.ID, .vote }, + .{ sig.runtime.ids.TOKEN_PROGRAM_ID, .splToken }, + .{ sig.runtime.ids.TOKEN_2022_PROGRAM_ID, .splToken }, + }; + + pub fn fromID(program_id: Pubkey) ?ParsableProgram { + inline for (PARSABLE_PROGRAMS) |entry| { + if (program_id.equals(&entry[0])) return entry[1]; + } + return null; + } +}; + +pub const UiInnerInstructions = struct { + index: u8, + instructions: []const UiInstruction, + + pub fn jsonStringify(self: UiInnerInstructions, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("index"); + try jw.write(self.index); + try jw.objectField("instructions"); + try jw.beginArray(); + for (self.instructions) |ixn| { + try ixn.jsonStringify(jw); + } + try jw.endArray(); + try jw.endObject(); + } +}; + +pub const UiInstruction = union(enum) { + compiled: UiCompiledInstruction, + parsed: *const UiParsedInstruction, + + pub fn jsonStringify(self: UiInstruction, jw: anytype) !void { + switch (self) { + .compiled => |c| try c.jsonStringify(jw), + .parsed => |p| try p.jsonStringify(jw), + } + } +}; + +pub const UiParsedInstruction = union(enum) { + parsed: ParsedInstruction, + partially_decoded: UiPartiallyDecodedInstruction, + + pub fn jsonStringify(self: UiParsedInstruction, jw: anytype) !void { + switch (self) { + .parsed => |p| try p.jsonStringify(jw), + .partially_decoded => |pd| try pd.jsonStringify(jw), + } + } +}; + +pub const UiCompiledInstruction = struct { + programIdIndex: u8, + accounts: []const u8, + data: []const u8, + stackHeight: ?u32 = null, + + pub fn jsonStringify(self: UiCompiledInstruction, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accounts"); + try writeByteArrayAsJsonArray(jw, self.accounts); + try jw.objectField("data"); + try jw.write(self.data); + try jw.objectField("programIdIndex"); + try jw.write(self.programIdIndex); + if (self.stackHeight) |sh| { + try jw.objectField("stackHeight"); + try jw.write(sh); + } + try jw.endObject(); + } + + fn writeByteArrayAsJsonArray(jw: anytype, bytes: []const u8) @TypeOf(jw.*).Error!void { + try jw.beginArray(); + for (bytes) |b| { + try jw.write(b); + } + try jw.endArray(); + } +}; + +pub const UiPartiallyDecodedInstruction = struct { + programId: []const u8, + accounts: []const []const u8, + data: []const u8, + stackHeight: ?u32 = null, + + pub fn jsonStringify(self: UiPartiallyDecodedInstruction, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("accounts"); + try jw.write(self.accounts); + try jw.objectField("data"); + try jw.write(self.data); + try jw.objectField("programId"); + try jw.write(self.programId); + if (self.stackHeight) |sh| { + try jw.objectField("stackHeight"); + try jw.write(sh); + } + try jw.endObject(); + } +}; + +/// A parsed or partially-decoded instruction for jsonParsed mode. +/// In jsonParsed mode, known programs produce structured parsed output, +/// while unknown programs fall back to partially decoded representation. +pub const ParsedInstruction = struct { + /// Program name: "vote", "system", "spl-memo" + program: []const u8, + /// Program ID as base58 string + program_id: []const u8, + /// Pre-serialized JSON for the "parsed" field. + /// For vote/system: `{"type":"...", "info":{...}}` + /// For spl-memo: `""` + parsed: std.json.Value, + /// Stack height + stack_height: ?u32 = null, + + pub fn jsonStringify(self: ParsedInstruction, jw: anytype) !void { + try jw.beginObject(); + try jw.objectField("parsed"); + try jw.write(self.parsed); + try jw.objectField("program"); + try jw.write(self.program); + try jw.objectField("programId"); + try jw.write(self.program_id); + if (self.stack_height) |sh| { + try jw.objectField("stackHeight"); + try jw.write(sh); + } + try jw.endObject(); + } +}; + +fn allocParsed( + arena: Allocator, + value: UiParsedInstruction, +) !UiInstruction { + const ptr = try arena.create(UiParsedInstruction); + ptr.* = value; + return .{ .parsed = ptr }; +} + +pub fn parseUiInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, + stack_height: ?u32, +) !UiInstruction { + const ixn_idx: usize = @intCast(instruction.program_id_index); + const program_id = account_keys.get(ixn_idx).?; + return parseInstruction( + arena, + program_id, + instruction, + account_keys, + stack_height, + ) catch { + return allocParsed(arena, .{ .partially_decoded = try makeUiPartiallyDecodedInstruction( + arena, + instruction, + account_keys, + stack_height, + ) }); + }; +} + +pub fn parseUiInnerInstructions( + arena: Allocator, + inner_instructions: sig.ledger.transaction_status.InnerInstructions, + account_keys: *const AccountKeys, +) !UiInnerInstructions { + var instructions = try arena.alloc(UiInstruction, inner_instructions.instructions.len); + for (inner_instructions.instructions, 0..) |ixn, i| { + instructions[i] = try parseUiInstruction( + arena, + ixn.instruction, + account_keys, + ixn.stack_height, + ); + } + return .{ + .index = inner_instructions.index, + .instructions = instructions, + }; +} + +/// Try to parse a compiled instruction into a structured parsed instruction. +/// Falls back to partially decoded representation on failure. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_instruction.rs#L95 +pub fn parseInstruction( + arena: Allocator, + program_id: Pubkey, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, + stack_height: ?u32, +) !UiInstruction { + const program_name = ParsableProgram.fromID(program_id) orelse return error.ProgramNotParsable; + + switch (program_name) { + .addressLookupTable => { + return allocParsed(arena, .{ .parsed = .{ + .program = "address-lookup-table", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseAddressLookupTableInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .splAssociatedTokenAccount => { + return allocParsed(arena, .{ .parsed = .{ + .program = "spl-associated-token-account", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseAssociatedTokenInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .splMemo => { + return allocParsed(arena, .{ .parsed = .{ + .program = "spl-memo", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseMemoInstruction(arena, instruction.data), + .stack_height = stack_height, + } }); + }, + .splToken => { + return allocParsed(arena, .{ .parsed = .{ + .program = "spl-token", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseTokenInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .bpfLoader => { + return allocParsed(arena, .{ .parsed = .{ + .program = "bpf-loader", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseBpfLoaderInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .bpfUpgradeableLoader => { + return allocParsed(arena, .{ .parsed = .{ + .program = "bpf-upgradeable-loader", + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseBpfUpgradeableLoaderInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .stake => { + return allocParsed(arena, .{ .parsed = .{ + .program = @tagName(program_name), + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseStakeInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .system => { + return allocParsed(arena, .{ .parsed = .{ + .program = @tagName(program_name), + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseSystemInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + .vote => { + return allocParsed(arena, .{ .parsed = .{ + .program = @tagName(program_name), + .program_id = try arena.dupe(u8, program_id.base58String().constSlice()), + .parsed = try parseVoteInstruction( + arena, + instruction, + account_keys, + ), + .stack_height = stack_height, + } }); + }, + } +} + +/// Fallback decoded representation of a compiled instruction +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/lib.rs#L96 +pub fn makeUiPartiallyDecodedInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, + stack_height: ?u32, +) !UiPartiallyDecodedInstruction { + const program_id_index: usize = @intCast(instruction.program_id_index); + const program_id_str = if (account_keys.get(program_id_index)) |pk| + try arena.dupe(u8, pk.base58String().constSlice()) + else + try arena.dupe(u8, "unknown"); + + var accounts = try arena.alloc([]const u8, instruction.accounts.len); + for (instruction.accounts, 0..) |acct_idx, i| { + accounts[i] = if (account_keys.get(@intCast(acct_idx))) |pk| + try arena.dupe(u8, pk.base58String().constSlice()) + else + try arena.dupe(u8, "unknown"); + } + + return .{ + .programId = program_id_str, + .accounts = accounts, + .data = blk: { + const buf = try arena.alloc(u8, base58.encodedMaxSize(instruction.data.len)); + const len = base58.Table.BITCOIN.encode(buf, instruction.data); + break :blk try arena.dupe(u8, buf[0..len]); + }, + .stackHeight = stack_height, + }; +} + +/// Parse an SPL Memo instruction. The data is simply UTF-8 text. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_instruction.rs#L131 +fn parseMemoInstruction(arena: Allocator, data: []const u8) !JsonValue { + // Validate UTF-8 + if (!std.unicode.utf8ValidateSlice(data)) return error.InvalidUtf8; + + // Return as a JSON string value + return .{ .string = try arena.dupe(u8, data) }; +} + +/// Parse a vote instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_vote.rs#L11 +fn parseVoteInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice( + arena, + sig.runtime.program.vote.Instruction, + instruction.data, + .{}, + ) catch { + return error.DeserializationFailed; + }; + for (instruction.accounts) |acc_idx| { + // Runtime should prevent this from ever happening + if (acc_idx >= account_keys.len()) return error.InstructionKeyMismatch; + } + + var result = ObjectMap.init(arena); + + switch (ix) { + .initialize_account => |init_acct| { + try checkNumVoteAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("node", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("authorizedVoter", try pubkeyToValue( + arena, + init_acct.authorized_voter, + )); + try info.put("authorizedWithdrawer", try pubkeyToValue( + arena, + init_acct.authorized_withdrawer, + )); + try info.put("commission", .{ .integer = @intCast(init_acct.commission) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initialize" }); + }, + .authorize => |auth| { + try checkNumVoteAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("newAuthority", try pubkeyToValue(arena, auth.new_authority)); + try info.put("authorityType", voteAuthorizeToValue(auth.vote_authorize)); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorize" }); + }, + .authorize_with_seed => |aws| { + try checkNumVoteAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("authorityBaseKey", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("authorityOwner", try pubkeyToValue( + arena, + aws.current_authority_derived_key_owner, + )); + try info.put("authoritySeed", .{ .string = aws.current_authority_derived_key_seed }); + try info.put("authorityType", voteAuthorizeToValue(aws.authorization_type)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("newAuthority", try pubkeyToValue(arena, aws.new_authority)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeWithSeed" }); + }, + .authorize_checked_with_seed => |acws| { + try checkNumVoteAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("authorityBaseKey", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("authorityOwner", try pubkeyToValue( + arena, + acws.current_authority_derived_key_owner, + )); + try info.put("authoritySeed", .{ .string = acws.current_authority_derived_key_seed }); + try info.put("authorityType", voteAuthorizeToValue(acws.authorization_type)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("newAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeCheckedWithSeed" }); + }, + .vote => |v| { + try checkNumVoteAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("slotHashesSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("vote", try voteToValue(arena, v.vote)); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "vote" }); + }, + .update_vote_state => |vsu| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("voteStateUpdate", try voteStateUpdateToValue( + arena, + vsu.vote_state_update, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updatevotestate" }); + }, + .update_vote_state_switch => |vsus| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("hash", try hashToValue(arena, vsus.hash)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("voteStateUpdate", try voteStateUpdateToValue( + arena, + vsus.vote_state_update, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updatevotestateswitch" }); + }, + .compact_update_vote_state => |cvsu| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("voteStateUpdate", try voteStateUpdateToValue( + arena, + cvsu.vote_state_update, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "compactupdatevotestate" }); + }, + .compact_update_vote_state_switch => |cvsus| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("hash", try hashToValue(arena, cvsus.hash)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("voteStateUpdate", try voteStateUpdateToValue( + arena, + cvsus.vote_state_update, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "compactupdatevotestateswitch" }); + }, + .tower_sync => |ts| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("towerSync", try towerSyncToValue(arena, ts.tower_sync)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "towersync" }); + }, + .tower_sync_switch => |tss| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("hash", try hashToValue(arena, tss.hash)); + try info.put("towerSync", try towerSyncToValue(arena, tss.tower_sync)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "towersyncswitch" }); + }, + .withdraw => |lamports| { + try checkNumVoteAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("withdrawAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdraw" }); + }, + .update_validator_identity => { + try checkNumVoteAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("newValidatorIdentity", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("withdrawAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateValidatorIdentity" }); + }, + .update_commission => |commission| { + try checkNumVoteAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("commission", .{ .integer = @intCast(commission) }); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("withdrawAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateCommission" }); + }, + .vote_switch => |vs| { + try checkNumVoteAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("hash", try hashToValue(arena, vs.hash)); + try info.put("slotHashesSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("vote", try voteToValue(arena, vs.vote)); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "voteSwitch" }); + }, + .authorize_checked => |auth_type| { + try checkNumVoteAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("authorityType", voteAuthorizeToValue(auth_type)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("newAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeChecked" }); + }, + // TODO: .initializeAccount2 + // TODO: .updateCommissionCollector + // TODO: .updateComissionBps + } + + return .{ .object = result }; +} + +fn checkNumVoteAccounts(accounts: []const u8, num: usize) !void { + return checkNumAccounts(accounts, num, ParsableProgram.vote); +} + +/// Convert a Pubkey to a JSON string value +fn pubkeyToValue(arena: Allocator, pubkey: Pubkey) !JsonValue { + return .{ .string = try arena.dupe(u8, pubkey.base58String().constSlice()) }; +} + +/// Convert a Hash to a JSON string value +fn hashToValue(arena: Allocator, hash: Hash) !JsonValue { + return .{ .string = try arena.dupe(u8, hash.base58String().constSlice()) }; +} + +/// Convert VoteAuthorize to a JSON string value +fn voteAuthorizeToValue(auth: sig.runtime.program.vote.vote_instruction.VoteAuthorize) JsonValue { + return .{ .string = switch (auth) { + .voter => "Voter", + .withdrawer => "Withdrawer", + } }; +} + +/// Convert a Vote to a JSON Value object +fn voteToValue(arena: Allocator, vote: sig.runtime.program.vote.state.Vote) !JsonValue { + var obj = ObjectMap.init(arena); + + try obj.put("hash", try hashToValue(arena, vote.hash)); + + var slots_array = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + vote.slots.len, + ); + for (vote.slots) |slot| { + try slots_array.append(.{ .integer = @intCast(slot) }); + } + try obj.put("slots", .{ .array = slots_array }); + + try obj.put("timestamp", if (vote.timestamp) |ts| .{ .integer = ts } else .null); + + return .{ .object = obj }; +} + +/// Convert a VoteStateUpdate to a JSON Value object +fn voteStateUpdateToValue( + arena: Allocator, + vsu: sig.runtime.program.vote.state.VoteStateUpdate, +) !JsonValue { + var obj = ObjectMap.init(arena); + + try obj.put("hash", try hashToValue(arena, vsu.hash)); + try obj.put("lockouts", try lockoutsToValue(arena, vsu.lockouts.items)); + try obj.put("root", if (vsu.root) |root| .{ .integer = @intCast(root) } else .null); + try obj.put("timestamp", if (vsu.timestamp) |ts| .{ .integer = ts } else .null); + + return .{ .object = obj }; +} + +/// Convert a TowerSync to a JSON Value object +fn towerSyncToValue( + arena: Allocator, + ts: sig.runtime.program.vote.state.TowerSync, +) !JsonValue { + var obj = ObjectMap.init(arena); + + try obj.put("blockId", try hashToValue(arena, ts.block_id)); + try obj.put("hash", try hashToValue(arena, ts.hash)); + try obj.put("lockouts", try lockoutsToValue(arena, ts.lockouts.items)); + try obj.put("root", if (ts.root) |root| .{ .integer = @intCast(root) } else .null); + try obj.put("timestamp", if (ts.timestamp) |timestamp| .{ .integer = timestamp } else .null); + + return .{ .object = obj }; +} + +/// Convert an array of Lockouts to a JSON array value +fn lockoutsToValue( + arena: Allocator, + lockouts: []const sig.runtime.program.vote.state.Lockout, +) !JsonValue { + var arr = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + lockouts.len, + ); + + for (lockouts) |lockout| { + var lockout_obj = ObjectMap.init(arena); + try lockout_obj.put( + "confirmation_count", + .{ .integer = @intCast(lockout.confirmation_count) }, + ); + try lockout_obj.put("slot", .{ .integer = @intCast(lockout.slot) }); + try arr.append(.{ .object = lockout_obj }); + } + + return .{ .array = arr }; +} + +/// Parse a system instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_system.rs#L11 +fn parseSystemInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice( + arena, + SystemInstruction, + instruction.data, + .{}, + ) catch { + return error.DeserializationFailed; + }; + for (instruction.accounts) |acc_idx| { + // Runtime should prevent this from ever happening + if (acc_idx >= account_keys.len()) return error.InstructionKeyMismatch; + } + + var result = ObjectMap.init(arena); + + switch (ix) { + .create_account => |ca| { + try checkNumSystemAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("lamports", .{ .integer = @intCast(ca.lamports) }); + try info.put("newAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("owner", try pubkeyToValue(arena, ca.owner)); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("space", .{ .integer = @intCast(ca.space) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "createAccount" }); + }, + .assign => |a| { + try checkNumSystemAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("owner", try pubkeyToValue(arena, a.owner)); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "assign" }); + }, + .transfer => |t| { + try checkNumSystemAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(t.lamports) }); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "transfer" }); + }, + .create_account_with_seed => |cas| { + try checkNumSystemAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("base", try pubkeyToValue(arena, cas.base)); + try info.put("lamports", .{ .integer = @intCast(cas.lamports) }); + try info.put("newAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("owner", try pubkeyToValue(arena, cas.owner)); + try info.put("seed", .{ .string = cas.seed }); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("space", .{ .integer = @intCast(cas.space) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "createAccountWithSeed" }); + }, + .advance_nonce_account => { + try checkNumSystemAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("nonceAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nonceAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("recentBlockhashesSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "advanceNonce" }); + }, + .withdraw_nonce_account => |lamports| { + try checkNumSystemAccounts(instruction.accounts, 5); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("nonceAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nonceAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("recentBlockhashesSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdrawFromNonce" }); + }, + .initialize_nonce_account => |authority| { + try checkNumSystemAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("nonceAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nonceAuthority", try pubkeyToValue(arena, authority)); + try info.put("recentBlockhashesSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeNonce" }); + }, + .authorize_nonce_account => |new_authority| { + try checkNumSystemAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("newAuthorized", try pubkeyToValue(arena, new_authority)); + try info.put("nonceAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nonceAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeNonce" }); + }, + .allocate => |a| { + try checkNumSystemAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("space", .{ .integer = @intCast(a.space) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "allocate" }); + }, + .allocate_with_seed => |aws| { + try checkNumSystemAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("base", try pubkeyToValue(arena, aws.base)); + try info.put("owner", try pubkeyToValue(arena, aws.owner)); + try info.put("seed", .{ .string = aws.seed }); + try info.put("space", .{ .integer = @intCast(aws.space) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "allocateWithSeed" }); + }, + .assign_with_seed => |aws| { + try checkNumSystemAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("base", try pubkeyToValue(arena, aws.base)); + try info.put("owner", try pubkeyToValue(arena, aws.owner)); + try info.put("seed", .{ .string = aws.seed }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "assignWithSeed" }); + }, + .transfer_with_seed => |tws| { + try checkNumSystemAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("lamports", .{ .integer = @intCast(tws.lamports) }); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("sourceBase", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("sourceOwner", try pubkeyToValue(arena, tws.from_owner)); + try info.put("sourceSeed", .{ .string = tws.from_seed }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "transferWithSeed" }); + }, + .upgrade_nonce_account => { + try checkNumSystemAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("nonceAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "upgradeNonce" }); + }, + } + + return .{ .object = result }; +} + +fn checkNumSystemAccounts(accounts: []const u8, num: usize) !void { + return checkNumAccounts(accounts, num, ParsableProgram.system); +} + +/// Parse an address lookup table instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_address_lookup_table.rs#L11 +fn parseAddressLookupTableInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice( + arena, + AddressLookupTableInstruction, + instruction.data, + .{}, + ) catch { + return error.DeserializationFailed; + }; + + for (instruction.accounts) |acc_idx| { + // Runtime should prevent this from ever happening + if (acc_idx >= account_keys.len()) return error.InstructionKeyMismatch; + } + + var result = ObjectMap.init(arena); + + switch (ix) { + .CreateLookupTable => |create| { + try checkNumAddressLookupTableAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("bumpSeed", .{ .integer = @intCast(create.bump_seed) }); + try info.put("lookupTableAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("lookupTableAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("payerAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("recentSlot", .{ .integer = @intCast(create.recent_slot) }); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "createLookupTable" }); + }, + .FreezeLookupTable => { + try checkNumAddressLookupTableAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("lookupTableAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("lookupTableAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "freezeLookupTable" }); + }, + .ExtendLookupTable => |extend| { + try checkNumAddressLookupTableAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("lookupTableAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("lookupTableAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Build newAddresses array + var new_addresses_array = try std.array_list.AlignedManaged( + JsonValue, + null, + ).initCapacity( + arena, + extend.new_addresses.len, + ); + for (extend.new_addresses) |addr| { + try new_addresses_array.append(try pubkeyToValue(arena, addr)); + } + try info.put("newAddresses", .{ .array = new_addresses_array }); + // Optional payer and system program (only if >= 4 accounts) + if (instruction.accounts.len >= 4) { + try info.put("payerAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "extendLookupTable" }); + }, + .DeactivateLookupTable => { + try checkNumAddressLookupTableAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("lookupTableAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("lookupTableAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "deactivateLookupTable" }); + }, + .CloseLookupTable => { + try checkNumAddressLookupTableAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("lookupTableAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("lookupTableAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("recipient", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "closeLookupTable" }); + }, + } + + return .{ .object = result }; +} + +/// Parse a stake instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_stake.rs#L11 +fn parseStakeInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice(arena, StakeInstruction, instruction.data, .{}) catch { + return error.DeserializationFailed; + }; + + var result = ObjectMap.init(arena); + + switch (ix) { + .initialize => |init| { + try checkNumStakeAccounts(instruction.accounts, 2); + const authorized, const lockup = init; + var info = ObjectMap.init(arena); + // authorized object + var authorized_obj = ObjectMap.init(arena); + try authorized_obj.put("staker", try pubkeyToValue(arena, authorized.staker)); + try authorized_obj.put("withdrawer", try pubkeyToValue( + arena, + authorized.withdrawer, + )); + try info.put("authorized", .{ .object = authorized_obj }); + // lockup object + var lockup_obj = ObjectMap.init(arena); + try lockup_obj.put("custodian", try pubkeyToValue(arena, lockup.custodian)); + try lockup_obj.put("epoch", .{ .integer = @intCast(lockup.epoch) }); + try lockup_obj.put("unixTimestamp", .{ .integer = lockup.unix_timestamp }); + try info.put("lockup", .{ .object = lockup_obj }); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initialize" }); + }, + .authorize => |auth| { + try checkNumStakeAccounts(instruction.accounts, 3); + const new_authorized, const authority_type = auth; + var info = ObjectMap.init(arena); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("authorityType", stakeAuthorizeToValue(authority_type)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Optional custodian + if (instruction.accounts.len >= 4) { + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + } + try info.put("newAuthority", try pubkeyToValue(arena, new_authorized)); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorize" }); + }, + .delegate_stake => { + try checkNumStakeAccounts(instruction.accounts, 6); + var info = ObjectMap.init(arena); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try info.put("stakeConfigAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("stakeHistorySysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "delegate" }); + }, + .split => |lamports| { + try checkNumStakeAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("newSplitAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "split" }); + }, + .withdraw => |lamports| { + try checkNumStakeAccounts(instruction.accounts, 5); + var info = ObjectMap.init(arena); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + // Optional custodian + if (instruction.accounts.len >= 6) { + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + } + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeHistorySysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("withdrawAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdraw" }); + }, + .deactivate => { + try checkNumStakeAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "deactivate" }); + }, + .set_lockup => |lockup_args| { + try checkNumStakeAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lockup", try lockupArgsToValue(arena, lockup_args)); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setLockup" }); + }, + .merge => { + try checkNumStakeAccounts(instruction.accounts, 5); + var info = ObjectMap.init(arena); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("stakeHistorySysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "merge" }); + }, + .authorize_with_seed => |aws| { + try checkNumStakeAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("authorityBase", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authorityOwner", try pubkeyToValue(arena, aws.authority_owner)); + try info.put("authoritySeed", .{ .string = aws.authority_seed }); + try info.put("authorityType", stakeAuthorizeToValue(aws.stake_authorize)); + // Optional clockSysvar + if (instruction.accounts.len >= 3) { + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + } + // Optional custodian + if (instruction.accounts.len >= 4) { + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + } + try info.put("newAuthorized", try pubkeyToValue(arena, aws.new_authorized_pubkey)); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeWithSeed" }); + }, + .initialize_checked => { + try checkNumStakeAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("staker", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("withdrawer", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeChecked" }); + }, + .authorize_checked => |authority_type| { + try checkNumStakeAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("authorityType", stakeAuthorizeToValue(authority_type)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Optional custodian + if (instruction.accounts.len >= 5) { + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + } + try info.put("newAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeChecked" }); + }, + .authorize_checked_with_seed => |acws| { + try checkNumStakeAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("authorityBase", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authorityOwner", try pubkeyToValue(arena, acws.authority_owner)); + try info.put("authoritySeed", .{ .string = acws.authority_seed }); + try info.put("authorityType", stakeAuthorizeToValue(acws.stake_authorize)); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + // Optional custodian + if (instruction.accounts.len >= 5) { + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + } + try info.put("newAuthorized", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "authorizeCheckedWithSeed" }); + }, + .set_lockup_checked => |lockup_args| { + try checkNumStakeAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + var lockup_obj = ObjectMap.init(arena); + if (lockup_args.epoch) |epoch| { + try lockup_obj.put("epoch", .{ .integer = @intCast(epoch) }); + } + if (lockup_args.unix_timestamp) |ts| { + try lockup_obj.put("unixTimestamp", .{ .integer = ts }); + } + // Optional new custodian from account + if (instruction.accounts.len >= 3) { + try lockup_obj.put("custodian", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + } + try info.put("lockup", .{ .object = lockup_obj }); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setLockupChecked" }); + }, + .get_minimum_delegation => { + const info = ObjectMap.init(arena); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "getMinimumDelegation" }); + }, + .deactivate_delinquent => { + try checkNumStakeAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("referenceVoteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "deactivateDelinquent" }); + }, + ._redelegate => { + try checkNumStakeAccounts(instruction.accounts, 5); + var info = ObjectMap.init(arena); + try info.put("newStakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("stakeAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("stakeConfigAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("voteAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "redelegate" }); + }, + .move_stake => |lamports| { + try checkNumStakeAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "moveStake" }); + }, + .move_lamports => |lamports| { + try checkNumStakeAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("lamports", .{ .integer = @intCast(lamports) }); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("stakeAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "moveLamports" }); + }, + } + + return .{ .object = result }; +} + +fn checkNumStakeAccounts(accounts: []const u8, num: usize) !void { + return checkNumAccounts(accounts, num, ParsableProgram.stake); +} + +/// Convert StakeAuthorize to a JSON string value +fn stakeAuthorizeToValue(auth: StakeAuthorize) JsonValue { + return .{ .string = switch (auth) { + .staker => "Staker", + .withdrawer => "Withdrawer", + } }; +} + +/// Convert LockupArgs to a JSON Value object +fn lockupArgsToValue(arena: Allocator, lockup_args: StakeLockupArgs) !JsonValue { + var obj = ObjectMap.init(arena); + + if (lockup_args.custodian) |custodian| { + try obj.put("custodian", try pubkeyToValue(arena, custodian)); + } + if (lockup_args.epoch) |epoch| { + try obj.put("epoch", .{ .integer = @intCast(epoch) }); + } + if (lockup_args.unix_timestamp) |ts| { + try obj.put("unixTimestamp", .{ .integer = ts }); + } + + return .{ .object = obj }; +} + +/// Parse a BPF upgradeable loader instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_bpf_loader.rs#L48 +fn parseBpfUpgradeableLoaderInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice( + arena, + BpfUpgradeableLoaderInstruction, + instruction.data, + .{}, + ) catch { + return error.DeserializationFailed; + }; + + var result = ObjectMap.init(arena); + + switch (ix) { + .initialize_buffer => { + try checkNumBpfLoaderAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + // Optional authority + if (instruction.accounts.len > 1) { + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeBuffer" }); + }, + .write => |w| { + try checkNumBpfLoaderAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Base64 encode the bytes + const base64_encoder = std.base64.standard; + const encoded_len = base64_encoder.Encoder.calcSize(w.bytes.len); + const encoded = try arena.alloc(u8, encoded_len); + _ = base64_encoder.Encoder.encode(encoded, w.bytes); + try info.put("bytes", .{ .string = encoded }); + try info.put("offset", .{ .integer = @intCast(w.offset) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "write" }); + }, + .deploy_with_max_data_len => |deploy| { + try checkNumBpfLoaderAccounts(instruction.accounts, 8); + var info = ObjectMap.init(arena); + try info.put("maxDataLen", .{ .integer = @intCast(deploy.max_data_len) }); + try info.put("payerAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("programDataAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("programAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("bufferAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[6])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[7])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "deployWithMaxDataLen" }); + }, + .upgrade => { + try checkNumBpfLoaderAccounts(instruction.accounts, 7); + var info = ObjectMap.init(arena); + try info.put("programDataAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("programAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("bufferAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("spillAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("clockSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[6])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "upgrade" }); + }, + .set_authority => { + try checkNumBpfLoaderAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Optional new authority + if (instruction.accounts.len > 2) { + if (account_keys.get(@intCast(instruction.accounts[2]))) |new_auth| { + try info.put("newAuthority", try pubkeyToValue(arena, new_auth)); + } else { + try info.put("newAuthority", .null); + } + } else { + try info.put("newAuthority", .null); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setAuthority" }); + }, + .set_authority_checked => { + try checkNumBpfLoaderAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("newAuthority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setAuthorityChecked" }); + }, + .close => { + try checkNumBpfLoaderAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("recipient", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + // Optional program account + if (instruction.accounts.len > 3) { + if (account_keys.get(@intCast(instruction.accounts[3]))) |prog| { + try info.put("programAccount", try pubkeyToValue(arena, prog)); + } else { + try info.put("programAccount", .null); + } + } else { + try info.put("programAccount", .null); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "close" }); + }, + .extend_program => |ext| { + try checkNumBpfLoaderAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("additionalBytes", .{ .integer = @intCast(ext.additional_bytes) }); + try info.put("programDataAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("programAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + // Optional system program + if (instruction.accounts.len > 2) { + if (account_keys.get(@intCast(instruction.accounts[2]))) |sys| { + try info.put("systemProgram", try pubkeyToValue(arena, sys)); + } else { + try info.put("systemProgram", .null); + } + } else { + try info.put("systemProgram", .null); + } + // Optional payer + if (instruction.accounts.len > 3) { + if (account_keys.get(@intCast(instruction.accounts[3]))) |payer| { + try info.put("payerAccount", try pubkeyToValue(arena, payer)); + } else { + try info.put("payerAccount", .null); + } + } else { + try info.put("payerAccount", .null); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "extendProgram" }); + }, + .migrate => { + try checkNumBpfLoaderAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("programDataAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("programAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "migrate" }); + }, + .extend_program_checked => |ext| { + try checkNumBpfLoaderAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("additionalBytes", .{ .integer = @intCast(ext.additional_bytes) }); + try info.put("programDataAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("programAccount", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("authority", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + // Optional system program + if (instruction.accounts.len > 3) { + if (account_keys.get(@intCast(instruction.accounts[3]))) |sys| { + try info.put("systemProgram", try pubkeyToValue(arena, sys)); + } else { + try info.put("systemProgram", .null); + } + } else { + try info.put("systemProgram", .null); + } + // Optional payer + if (instruction.accounts.len > 4) { + if (account_keys.get(@intCast(instruction.accounts[4]))) |payer| { + try info.put("payerAccount", try pubkeyToValue(arena, payer)); + } else { + try info.put("payerAccount", .null); + } + } else { + try info.put("payerAccount", .null); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "extendProgramChecked" }); + }, + } + + return .{ .object = result }; +} + +fn checkNumBpfLoaderAccounts( + accounts: []const u8, + num: usize, +) !void { + return checkNumAccounts(accounts, num, ParsableProgram.bpfLoader); +} + +fn checkNumBpfUpgradeableLoaderAccounts( + accounts: []const u8, + num: usize, +) !void { + return checkNumAccounts(accounts, num, ParsableProgram.bpfUpgradeableLoader); +} + +fn checkNumAddressLookupTableAccounts( + accounts: []const u8, + num: usize, +) !void { + return checkNumAccounts(accounts, num, .addressLookupTable); +} + +fn checkNumAccounts( + accounts: []const u8, + num: usize, + parsable_program: ParsableProgram, +) !void { + if (accounts.len < num) { + return switch (parsable_program) { + .addressLookupTable => error.NotEnoughAddressLookupTableAccounts, + .splAssociatedTokenAccount => error.NotEnoughSplAssociatedTokenAccountAccounts, + .splMemo => error.NotEnoughSplMemoAccounts, + .splToken => error.NotEnoughSplTokenAccounts, + .bpfLoader => error.NotEnoughBpfLoaderAccounts, + .bpfUpgradeableLoader => error.NotEnoughBpfUpgradeableLoaderAccounts, + .stake => error.NotEnoughStakeAccounts, + .system => error.NotEnoughSystemAccounts, + .vote => error.NotEnoughVoteAccounts, + }; + } +} + +/// Parse a BPF Loader v2 instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_bpf_loader.rs#L13 +fn parseBpfLoaderInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + const ix = sig.bincode.readFromSlice( + arena, + BpfLoaderInstruction, + instruction.data, + .{}, + ) catch { + return error.DeserializationFailed; + }; + + // Validate account keys + if (instruction.accounts.len == 0 or instruction.accounts[0] >= account_keys.len()) { + return error.InstructionKeyMismatch; + } + + var result = ObjectMap.init(arena); + + switch (ix) { + .write => |w| { + try checkNumBpfLoaderAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("offset", .{ .integer = @intCast(w.offset) }); + // Base64 encode the bytes + const base64_encoder = std.base64.standard; + const encoded_len = base64_encoder.Encoder.calcSize(w.bytes.len); + const encoded = try arena.alloc(u8, encoded_len); + _ = base64_encoder.Encoder.encode(encoded, w.bytes); + try info.put("bytes", .{ .string = encoded }); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "write" }); + }, + .finalize => { + try checkNumBpfLoaderAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "finalize" }); + }, + } + + return .{ .object = result }; +} + +/// Parse an Associated Token Account instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_associated_token.rs#L11 +fn parseAssociatedTokenInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + // Validate account indices don't exceed account_keys length + for (instruction.accounts) |acc_idx| { + if (acc_idx >= account_keys.len()) { + return error.InstructionKeyMismatch; + } + } + + // Parse instruction - empty data means Create, otherwise try borsh deserialize + const ata_instruction: AssociatedTokenAccountInstruction = if (instruction.data.len == 0) + .create + else blk: { + if (instruction.data.len < 1) return error.DeserializationFailed; + break :blk std.meta.intToEnum(AssociatedTokenAccountInstruction, instruction.data[0]) catch { + return error.DeserializationFailed; + }; + }; + + var result = ObjectMap.init(arena); + + switch (ata_instruction) { + .create => { + try checkNumAssociatedTokenAccounts(instruction.accounts, 6); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("wallet", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("tokenProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "create" }); + }, + .create_idempotent => { + try checkNumAssociatedTokenAccounts(instruction.accounts, 6); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("wallet", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("tokenProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "createIdempotent" }); + }, + .recover_nested => { + try checkNumAssociatedTokenAccounts(instruction.accounts, 7); + var info = ObjectMap.init(arena); + try info.put("nestedSource", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nestedMint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("nestedOwner", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try info.put("ownerMint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[4])).?, + )); + try info.put("wallet", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[5])).?, + )); + try info.put("tokenProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[6])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "recoverNested" }); + }, + } + + return .{ .object = result }; +} + +fn checkNumAssociatedTokenAccounts(accounts: []const u8, num: usize) !void { + return checkNumAccounts(accounts, num, .splAssociatedTokenAccount); +} + +/// SPL Token instruction tag (first byte) +/// [agave] https://github.com/solana-program/token/blob/f403c97ed4522469c2e320b8b4a2941f24c40a5e/interface/src/instruction.rs#L478 +const TokenInstructionTag = enum(u8) { + initializeMint = 0, + initializeAccount = 1, + initializeMultisig = 2, + transfer = 3, + approve = 4, + revoke = 5, + setAuthority = 6, + mintTo = 7, + burn = 8, + closeAccount = 9, + freezeAccount = 10, + thawAccount = 11, + transferChecked = 12, + approveChecked = 13, + mintToChecked = 14, + burnChecked = 15, + initializeAccount2 = 16, + syncNative = 17, + initializeAccount3 = 18, + initializeMultisig2 = 19, + initializeMint2 = 20, + getAccountDataSize = 21, + initializeImmutableOwner = 22, + amountToUiAmount = 23, + uiAmountToAmount = 24, + initializeMintCloseAuthority = 25, + // Extensions start at higher values + transferFeeExtension = 26, + confidentialTransferExtension = 27, + defaultAccountStateExtension = 28, + reallocate = 29, + memoTransferExtension = 30, + createNativeMint = 31, + initializeNonTransferableMint = 32, + interestBearingMintExtension = 33, + cpiGuardExtension = 34, + initializePermanentDelegate = 35, + transferHookExtension = 36, + confidentialTransferFeeExtension = 37, + withdrawExcessLamports = 38, + metadataPointerExtension = 39, + groupPointerExtension = 40, + groupMemberPointerExtension = 41, + confidentialMintBurnExtension = 42, + scaledUiAmountExtension = 43, + pausableExtension = 44, +}; + +/// Authority type for SetAuthority instruction +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token.rs#L730 +const TokenAuthorityType = enum(u8) { + mintTokens = 0, + freezeAccount = 1, + accountOwner = 2, + closeAccount = 3, + transferFeeConfig = 4, + withheldWithdraw = 5, + closeMint = 6, + interestRate = 7, + permanentDelegate = 8, + confidentialTransferMint = 9, + transferHookProgramId = 10, + confidentialTransferFeeConfig = 11, + metadataPointer = 12, + groupPointer = 13, + groupMemberPointer = 14, + scaledUiAmount = 15, + pause = 16, +}; + +/// Parse an SPL Token instruction into a JSON Value. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token.rs#L30 +fn parseTokenInstruction( + arena: Allocator, + instruction: sig.ledger.transaction_status.CompiledInstruction, + account_keys: *const AccountKeys, +) !JsonValue { + // Validate account indices don't exceed account_keys length + for (instruction.accounts) |acc_idx| { + if (acc_idx >= account_keys.len()) { + return error.InstructionKeyMismatch; + } + } + + if (instruction.data.len == 0) { + return error.DeserializationFailed; + } + + const tag = std.meta.intToEnum(TokenInstructionTag, instruction.data[0]) catch { + return error.DeserializationFailed; + }; + + var result = ObjectMap.init(arena); + + switch (tag) { + .initializeMint => { + try checkNumTokenAccounts(instruction.accounts, 2); + if (instruction.data.len < 35) return error.DeserializationFailed; + const decimals = instruction.data[1]; + const mint_authority = Pubkey{ .data = instruction.data[2..34].* }; + // freeze_authority is optional: 1 byte tag + 32 bytes pubkey + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("decimals", .{ .integer = @intCast(decimals) }); + try info.put("mintAuthority", try pubkeyToValue(arena, mint_authority)); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + if (instruction.data.len >= 67 and instruction.data[34] == 1) { + const freeze_authority = Pubkey{ .data = instruction.data[35..67].* }; + try info.put("freezeAuthority", try pubkeyToValue(arena, freeze_authority)); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMint" }); + }, + .initializeMint2 => { + try checkNumTokenAccounts(instruction.accounts, 1); + if (instruction.data.len < 35) return error.DeserializationFailed; + const decimals = instruction.data[1]; + const mint_authority = Pubkey{ .data = instruction.data[2..34].* }; + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("decimals", .{ .integer = @intCast(decimals) }); + try info.put("mintAuthority", try pubkeyToValue(arena, mint_authority)); + if (instruction.data.len >= 67 and instruction.data[34] == 1) { + const freeze_authority = Pubkey{ .data = instruction.data[35..67].* }; + try info.put("freezeAuthority", try pubkeyToValue(arena, freeze_authority)); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMint2" }); + }, + .initializeAccount => { + try checkNumTokenAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("owner", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[3])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeAccount" }); + }, + .initializeAccount2 => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 33) return error.DeserializationFailed; + const owner = Pubkey{ .data = instruction.data[1..33].* }; + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("owner", try pubkeyToValue(arena, owner)); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeAccount2" }); + }, + .initializeAccount3 => { + try checkNumTokenAccounts(instruction.accounts, 2); + if (instruction.data.len < 33) return error.DeserializationFailed; + const owner = Pubkey{ .data = instruction.data[1..33].* }; + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("owner", try pubkeyToValue(arena, owner)); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeAccount3" }); + }, + .initializeMultisig => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 2) return error.DeserializationFailed; + const m = instruction.data[1]; + var info = ObjectMap.init(arena); + try info.put("multisig", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("rentSysvar", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + var signers = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + instruction.accounts[2..].len, + ); + for (instruction.accounts[2..]) |signer_idx| { + try signers.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(signer_idx)).?, + )); + } + try info.put("signers", .{ .array = signers }); + try info.put("m", .{ .integer = @intCast(m) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMultisig" }); + }, + .initializeMultisig2 => { + try checkNumTokenAccounts(instruction.accounts, 2); + if (instruction.data.len < 2) return error.DeserializationFailed; + const m = instruction.data[1]; + var info = ObjectMap.init(arena); + try info.put("multisig", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + var signers = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + instruction.accounts[1..].len, + ); + for (instruction.accounts[1..]) |signer_idx| { + try signers.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(signer_idx)).?, + )); + } + try info.put("signers", .{ .array = signers }); + try info.put("m", .{ .integer = @intCast(m) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMultisig2" }); + }, + .transfer => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 9) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("amount", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{amount}, + ) }); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "transfer" }); + }, + .approve => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 9) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("delegate", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("amount", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{amount}, + ) }); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "owner", + "multisigOwner", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "approve" }); + }, + .revoke => { + try checkNumTokenAccounts(instruction.accounts, 2); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try parseSigners( + arena, + &info, + 1, + account_keys, + instruction.accounts, + "owner", + "multisigOwner", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "revoke" }); + }, + .setAuthority => { + try checkNumTokenAccounts(instruction.accounts, 2); + if (instruction.data.len < 3) return error.DeserializationFailed; + const authority_type = std.meta.intToEnum( + TokenAuthorityType, + instruction.data[1], + ) catch TokenAuthorityType.mintTokens; + const owned_field = switch (authority_type) { + .mintTokens, + .freezeAccount, + .transferFeeConfig, + .withheldWithdraw, + .closeMint, + .interestRate, + .permanentDelegate, + .confidentialTransferMint, + .transferHookProgramId, + .confidentialTransferFeeConfig, + .metadataPointer, + .groupPointer, + .groupMemberPointer, + .scaledUiAmount, + .pause, + => "mint", + .accountOwner, .closeAccount => "account", + }; + var info = ObjectMap.init(arena); + try info.put(owned_field, try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("authorityType", .{ .string = @tagName(authority_type) }); + // new_authority: COption - 1 byte tag + 32 bytes pubkey + if (instruction.data.len >= 35 and instruction.data[2] == 1) { + const new_authority = Pubkey{ .data = instruction.data[3..35].* }; + try info.put("newAuthority", try pubkeyToValue(arena, new_authority)); + } else { + try info.put("newAuthority", .null); + } + try parseSigners( + arena, + &info, + 1, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setAuthority" }); + }, + .mintTo => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 9) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("amount", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{amount}, + ) }); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "mintAuthority", + "multisigMintAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "mintTo" }); + }, + .burn => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 9) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("amount", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{amount}, + ) }); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "burn" }); + }, + .closeAccount => { + try checkNumTokenAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "owner", + "multisigOwner", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "closeAccount" }); + }, + .freezeAccount => { + try checkNumTokenAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "freezeAuthority", + "multisigFreezeAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "freezeAccount" }); + }, + .thawAccount => { + try checkNumTokenAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "freezeAuthority", + "multisigFreezeAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "thawAccount" }); + }, + .transferChecked => { + try checkNumTokenAccounts(instruction.accounts, 4); + if (instruction.data.len < 10) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + const decimals = instruction.data[9]; + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("tokenAmount", try tokenAmountToUiAmount(arena, amount, decimals)); + try parseSigners( + arena, + &info, + 3, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "transferChecked" }); + }, + .approveChecked => { + try checkNumTokenAccounts(instruction.accounts, 4); + if (instruction.data.len < 10) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + const decimals = instruction.data[9]; + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("delegate", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try info.put("tokenAmount", try tokenAmountToUiAmount(arena, amount, decimals)); + try parseSigners( + arena, + &info, + 3, + account_keys, + instruction.accounts, + "owner", + "multisigOwner", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "approveChecked" }); + }, + .mintToChecked => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 10) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + const decimals = instruction.data[9]; + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("tokenAmount", try tokenAmountToUiAmount(arena, amount, decimals)); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "mintAuthority", + "multisigMintAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "mintToChecked" }); + }, + .burnChecked => { + try checkNumTokenAccounts(instruction.accounts, 3); + if (instruction.data.len < 10) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + const decimals = instruction.data[9]; + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("tokenAmount", try tokenAmountToUiAmount(arena, amount, decimals)); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "burnChecked" }); + }, + .syncNative => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "syncNative" }); + }, + .getAccountDataSize => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + // Extension types are in remaining data, but we'll skip detailed parsing for now + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "getAccountDataSize" }); + }, + .initializeImmutableOwner => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeImmutableOwner" }); + }, + .amountToUiAmount => { + try checkNumTokenAccounts(instruction.accounts, 1); + if (instruction.data.len < 9) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, instruction.data[1..9], .little); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("amount", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{amount}, + ) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "amountToUiAmount" }); + }, + .uiAmountToAmount => { + try checkNumTokenAccounts(instruction.accounts, 1); + // ui_amount is a string in remaining bytes + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + if (instruction.data.len > 1) { + try info.put("uiAmount", .{ .string = instruction.data[1..] }); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "uiAmountToAmount" }); + }, + .initializeMintCloseAuthority => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + // close_authority: COption + if (instruction.data.len >= 34 and instruction.data[1] == 1) { + const close_authority = Pubkey{ .data = instruction.data[2..34].* }; + try info.put("closeAuthority", try pubkeyToValue(arena, close_authority)); + } else { + try info.put("closeAuthority", .null); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMintCloseAuthority" }); + }, + .createNativeMint => { + try checkNumTokenAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("payer", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("nativeMint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "createNativeMint" }); + }, + .initializeNonTransferableMint => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeNonTransferableMint" }); + }, + .initializePermanentDelegate => { + try checkNumTokenAccounts(instruction.accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + if (instruction.data.len >= 33) { + const delegate = Pubkey{ .data = instruction.data[1..33].* }; + try info.put("delegate", try pubkeyToValue(arena, delegate)); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializePermanentDelegate" }); + }, + .withdrawExcessLamports => { + try checkNumTokenAccounts(instruction.accounts, 3); + var info = ObjectMap.init(arena); + try info.put("source", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try parseSigners( + arena, + &info, + 2, + account_keys, + instruction.accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdrawExcessLamports" }); + }, + .reallocate => { + try checkNumTokenAccounts(instruction.accounts, 4); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[0])).?, + )); + try info.put("payer", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[1])).?, + )); + try info.put("systemProgram", try pubkeyToValue( + arena, + account_keys.get(@intCast(instruction.accounts[2])).?, + )); + try parseSigners( + arena, + &info, + 3, + account_keys, + instruction.accounts, + "owner", + "multisigOwner", + ); + // extension_types in remaining data - skip for now + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "reallocate" }); + }, + .transferFeeExtension => { + const ext_data = instruction.data[1..]; + const sub_result = try parseTransferFeeExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .confidentialTransferExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseConfidentialTransferExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .defaultAccountStateExtension => { + if (instruction.data.len <= 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseDefaultAccountStateExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .memoTransferExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseMemoTransferExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .interestBearingMintExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseInterestBearingMintExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .cpiGuardExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseCpiGuardExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .transferHookExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseTransferHookExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .confidentialTransferFeeExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseConfidentialTransferFeeExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .metadataPointerExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseMetadataPointerExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .groupPointerExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseGroupPointerExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .groupMemberPointerExtension => { + if (instruction.data.len < 2) return error.DeserializationFailed; + const ext_data = instruction.data[1..]; + const sub_result = try parseGroupMemberPointerExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .confidentialMintBurnExtension => { + const ext_data = instruction.data[1..]; + const sub_result = try parseConfidentialMintBurnExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .scaledUiAmountExtension => { + const ext_data = instruction.data[1..]; + const sub_result = try parseScaledUiAmountExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + .pausableExtension => { + const ext_data = instruction.data[1..]; + const sub_result = try parsePausableExtension( + arena, + ext_data, + instruction.accounts, + account_keys, + ); + return sub_result; + }, + } + + return .{ .object = result }; +} + +fn checkNumTokenAccounts(accounts: []const u8, num: usize) !void { + return checkNumAccounts(accounts, num, .splToken); +} + +/// Helper to read an OptionalNonZeroPubkey (32 bytes, all zeros = None) +fn readOptionalNonZeroPubkey(data: []const u8, offset: usize) ?Pubkey { + if (data.len < offset + 32) return null; + const bytes = data[offset..][0..32]; + if (std.mem.eql(u8, bytes, &([_]u8{0} ** 32))) return null; + return Pubkey{ .data = bytes.* }; +} + +/// Helper to read a COption: 4 bytes tag (LE) + 32 bytes pubkey if tag == 1 +/// Returns the pubkey if present, null if tag == 0, and the number of bytes consumed. +fn readCOptionPubkey(data: []const u8, offset: usize) !struct { pubkey: ?Pubkey, len: usize } { + if (data.len < offset + 4) return error.DeserializationFailed; + const tag = std.mem.readInt(u32, data[offset..][0..4], .little); + if (tag == 0) { + return .{ .pubkey = null, .len = 4 }; + } else if (tag == 1) { + if (data.len < offset + 4 + 32) return error.DeserializationFailed; + return .{ .pubkey = Pubkey{ .data = data[offset + 4 ..][0..32].* }, .len = 36 }; + } else { + return error.DeserializationFailed; + } +} + +/// Parse a TransferFee extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/transfer_fee.rs +fn parseTransferFeeExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + const data = ext_data[1..]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // InitializeTransferFeeConfig + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + // COption transfer_fee_config_authority + const auth1 = try readCOptionPubkey(data, 0); + if (auth1.pubkey) |pk| { + try info.put("transferFeeConfigAuthority", try pubkeyToValue(arena, pk)); + } + // COption withdraw_withheld_authority + const auth2 = try readCOptionPubkey(data, auth1.len); + if (auth2.pubkey) |pk| { + try info.put("withdrawWithheldAuthority", try pubkeyToValue(arena, pk)); + } + const fee_offset = auth1.len + auth2.len; + if (data.len < fee_offset + 10) return error.DeserializationFailed; + const basis_points = std.mem.readInt(u16, data[fee_offset..][0..2], .little); + const maximum_fee = std.mem.readInt(u64, data[fee_offset + 2 ..][0..8], .little); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put("transferFeeBasisPoints", .{ .integer = @intCast(basis_points) }); + try info.put("maximumFee", .{ .integer = @intCast(maximum_fee) }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeTransferFeeConfig" }); + }, + // TransferCheckedWithFee + 1 => { + try checkNumTokenAccounts(accounts, 4); + if (data.len < 17) return error.DeserializationFailed; + const amount = std.mem.readInt(u64, data[0..8], .little); + const decimals = data[8]; + const fee = std.mem.readInt(u64, data[9..17], .little); + var info = ObjectMap.init(arena); + try info.put( + "source", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "destination", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try info.put("tokenAmount", try tokenAmountToUiAmount(arena, amount, decimals)); + try info.put("feeAmount", try tokenAmountToUiAmount(arena, fee, decimals)); + try parseSigners( + arena, + &info, + 3, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "transferCheckedWithFee" }); + }, + // WithdrawWithheldTokensFromMint + 2 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "feeRecipient", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try parseSigners( + arena, + &info, + 2, + account_keys, + accounts, + "withdrawWithheldAuthority", + "multisigWithdrawWithheldAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdrawWithheldTokensFromMint" }); + }, + // WithdrawWithheldTokensFromAccounts + 3 => { + if (data.len < 1) return error.DeserializationFailed; + const num_token_accounts = data[0]; + try checkNumTokenAccounts(accounts, 3 + @as(usize, num_token_accounts)); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "feeRecipient", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + // Source accounts are the last num_token_accounts + const first_source = accounts.len - @as(usize, num_token_accounts); + var source_accounts = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + num_token_accounts, + ); + for (accounts[first_source..]) |acc_idx| { + try source_accounts.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(acc_idx)).?, + )); + } + try info.put("sourceAccounts", .{ .array = source_accounts }); + try parseSigners( + arena, + &info, + 2, + account_keys, + accounts[0..first_source], + "withdrawWithheldAuthority", + "multisigWithdrawWithheldAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdrawWithheldTokensFromAccounts" }); + }, + // HarvestWithheldTokensToMint + 4 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + var source_accounts = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + if (accounts.len > 1) accounts.len - 1 else 0, + ); + for (accounts[1..]) |acc_idx| { + try source_accounts.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(acc_idx)).?, + )); + } + try info.put("sourceAccounts", .{ .array = source_accounts }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "harvestWithheldTokensToMint" }); + }, + // SetTransferFee + 5 => { + try checkNumTokenAccounts(accounts, 2); + if (data.len < 10) return error.DeserializationFailed; + const basis_points = std.mem.readInt(u16, data[0..2], .little); + const maximum_fee = std.mem.readInt(u64, data[2..10], .little); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try info.put("transferFeeBasisPoints", .{ .integer = @intCast(basis_points) }); + try info.put("maximumFee", .{ .integer = @intCast(maximum_fee) }); + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "transferFeeConfigAuthority", + "multisigtransferFeeConfigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "setTransferFee" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a ConfidentialTransfer extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/confidential_transfer.rs +fn parseConfidentialTransferExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // InitializeMint + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + // Authority is an OptionalNonZeroPubkey (32 bytes) + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + // TODO: parse autoApproveNewAccounts and auditorElGamalPubkey from data + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeConfidentialTransferMint" }); + }, + // UpdateMint + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "confidentialTransferMintAuthority", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateConfidentialTransferMint" }); + }, + // ConfigureAccount + 2 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "configureConfidentialTransferAccount" }); + }, + // ApproveAccount + 3 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "confidentialTransferAuditorAuthority", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "approveConfidentialTransferAccount" }); + }, + // EmptyAccount + 4 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "emptyConfidentialTransferAccount" }); + }, + // Deposit + 5 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "source", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "destination", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + // Parse amount and decimals from data if available + if (ext_data.len >= 10) { + const amount = std.mem.readInt(u64, ext_data[1..9], .little); + const decimals = ext_data[9]; + try info.put("amount", .{ .integer = @intCast(amount) }); + try info.put("decimals", .{ .integer = @intCast(decimals) }); + } + try parseSigners(arena, &info, 3, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "depositConfidentialTransfer" }); + }, + // Withdraw + 6 => { + try checkNumTokenAccounts(accounts, 4); + var info = ObjectMap.init(arena); + try info.put( + "source", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "destination", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "withdrawConfidentialTransfer" }); + }, + // Transfer + 7 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "source", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "destination", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "confidentialTransfer" }); + }, + // ApplyPendingBalance + 8 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "applyPendingConfidentialTransferBalance" }); + }, + // EnableConfidentialCredits + 9 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "enableConfidentialTransferConfidentialCredits" }); + }, + // DisableConfidentialCredits + 10 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "disableConfidentialTransferConfidentialCredits" }); + }, + // EnableNonConfidentialCredits + 11 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put( + "type", + .{ .string = "enableConfidentialTransferNonConfidentialCredits" }, + ); + }, + // DisableNonConfidentialCredits + 12 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put( + "type", + .{ .string = "disableConfidentialTransferNonConfidentialCredits" }, + ); + }, + // TransferWithFee + 13 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "source", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "destination", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "confidentialTransferWithFee" }); + }, + // ConfigureAccountWithRegistry + 14 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[1])).?), + ); + try info.put( + "registry", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[2])).?), + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "configureConfidentialAccountWithRegistry" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a DefaultAccountState extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/default_account_state.rs +fn parseDefaultAccountStateExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 2) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + // Account state is the byte after the sub-tag + const account_state_byte = ext_data[1]; + const account_state: []const u8 = switch (account_state_byte) { + 0 => "uninitialized", + 1 => "initialized", + 2 => "frozen", + else => return error.DeserializationFailed, + }; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put("accountState", .{ .string = account_state }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeDefaultAccountState" }); + }, + // Update + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try info.put("accountState", .{ .string = account_state }); + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "freezeAuthority", + "multisigFreezeAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateDefaultAccountState" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a MemoTransfer extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/memo_transfer.rs +fn parseMemoTransferExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Enable + 0 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "enableRequiredMemoTransfers" }); + }, + // Disable + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "account", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "disableRequiredMemoTransfers" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse an InterestBearingMint extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/interest_bearing_mint.rs +fn parseInterestBearingMintExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { rate_authority: COption, rate: i16 } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + // COption rate_authority followed by i16 rate + if (ext_data.len >= 1 + 4) { + const auth = try readCOptionPubkey(ext_data, 1); + if (auth.pubkey) |pk| { + try info.put("rateAuthority", try pubkeyToValue(arena, pk)); + } else { + try info.put("rateAuthority", .null); + } + const rate_offset = 1 + auth.len; + if (ext_data.len >= rate_offset + 2) { + const rate = std.mem.readInt(i16, ext_data[rate_offset..][0..2], .little); + try info.put("rate", .{ .integer = @intCast(rate) }); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeInterestBearingConfig" }); + }, + // UpdateRate { rate: i16 } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put( + "mint", + try pubkeyToValue(arena, account_keys.get(@intCast(accounts[0])).?), + ); + if (ext_data.len >= 3) { + const rate = std.mem.readInt(i16, ext_data[1..3], .little); + try info.put("newRate", .{ .integer = @intCast(rate) }); + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "rateAuthority", + "multisigRateAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateInterestBearingConfigRate" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a CpiGuard extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/cpi_guard.rs +fn parseCpiGuardExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Enable + 0 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "enableCpiGuard" }); + }, + // Disable + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "disableCpiGuard" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a TransferHook extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/transfer_hook.rs +fn parseTransferHookExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey, program_id: OptionalNonZeroPubkey } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + if (ext_data.len >= 65) { + if (readOptionalNonZeroPubkey(ext_data, 33)) |pk| { + try info.put("programId", try pubkeyToValue(arena, pk)); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeTransferHook" }); + }, + // Update { program_id: OptionalNonZeroPubkey } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("programId", try pubkeyToValue(arena, pk)); + } + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateTransferHook" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a ConfidentialTransferFee extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/confidential_transfer_fee.rs +fn parseConfidentialTransferFeeExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // InitializeConfidentialTransferFeeConfig + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + // OptionalNonZeroPubkey authority (32 bytes) + PodElGamalPubkey (32 bytes) + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeConfidentialTransferFeeConfig" }); + }, + // WithdrawWithheldTokensFromMint + 1 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try info.put("feeRecipient", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put( + "type", + .{ .string = "withdrawWithheldConfidentialTransferTokensFromMint" }, + ); + }, + // WithdrawWithheldTokensFromAccounts + 2 => { + try checkNumTokenAccounts(accounts, 3); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try info.put("feeRecipient", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put( + "type", + .{ .string = "withdrawWithheldConfidentialTransferTokensFromAccounts" }, + ); + }, + // HarvestWithheldTokensToMint + 3 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + var source_accounts = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + if (accounts.len > 1) accounts.len - 1 else 0, + ); + for (accounts[1..]) |acc_idx| { + try source_accounts.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(acc_idx)).?, + )); + } + try info.put("sourceAccounts", .{ .array = source_accounts }); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "harvestWithheldConfidentialTransferTokensToMint" }); + }, + // EnableHarvestToMint + 4 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "enableConfidentialTransferFeeHarvestToMint" }); + }, + // DisableHarvestToMint + 5 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("account", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 1, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "disableConfidentialTransferFeeHarvestToMint" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a MetadataPointer extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/metadata_pointer.rs +fn parseMetadataPointerExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey, metadata_address: OptionalNonZeroPubkey } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + if (ext_data.len >= 65) { + if (readOptionalNonZeroPubkey(ext_data, 33)) |pk| { + try info.put("metadataAddress", try pubkeyToValue(arena, pk)); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeMetadataPointer" }); + }, + // Update { metadata_address: OptionalNonZeroPubkey } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("metadataAddress", try pubkeyToValue(arena, pk)); + } + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateMetadataPointer" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a GroupPointer extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/group_pointer.rs +fn parseGroupPointerExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey, group_address: OptionalNonZeroPubkey } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + if (ext_data.len >= 65) { + if (readOptionalNonZeroPubkey(ext_data, 33)) |pk| { + try info.put("groupAddress", try pubkeyToValue(arena, pk)); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeGroupPointer" }); + }, + // Update { group_address: OptionalNonZeroPubkey } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("groupAddress", try pubkeyToValue(arena, pk)); + } + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateGroupPointer" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a GroupMemberPointer extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/group_member_pointer.rs +fn parseGroupMemberPointerExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey, member_address: OptionalNonZeroPubkey } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } + } + if (ext_data.len >= 65) { + if (readOptionalNonZeroPubkey(ext_data, 33)) |pk| { + try info.put("memberAddress", try pubkeyToValue(arena, pk)); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeGroupMemberPointer" }); + }, + // Update { member_address: OptionalNonZeroPubkey } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("memberAddress", try pubkeyToValue(arena, pk)); + } + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateGroupMemberPointer" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a ConfidentialMintBurn extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/confidential_mint_burn.rs +fn parseConfidentialMintBurnExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // InitializeMint + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeConfidentialMintBurnMint" }); + }, + // RotateSupplyElGamalPubkey + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "rotateConfidentialMintBurnSupplyElGamalPubkey" }); + }, + // UpdateDecryptableSupply + 2 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateConfidentialMintBurnDecryptableSupply" }); + }, + // Mint + 3 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "confidentialMint" }); + }, + // Burn + 4 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("destination", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[1])).?, + )); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "confidentialBurn" }); + }, + // ApplyPendingBurn + 5 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners(arena, &info, 0, account_keys, accounts, "owner", "multisigOwner"); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "applyPendingBurn" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a ScaledUiAmount extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/scaled_ui_amount.rs +fn parseScaledUiAmountExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey, multiplier: f64 } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } else { + try info.put("authority", .null); + } + } + if (ext_data.len >= 41) { + const multiplier_bytes = ext_data[33..41]; + const multiplier: f64 = @bitCast(std.mem.readInt( + u64, + multiplier_bytes[0..8], + .little, + )); + try info.put("multiplier", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{multiplier}, + ) }); + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializeScaledUiAmountConfig" }); + }, + // UpdateMultiplier { multiplier: f64, effective_timestamp: i64 } + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 9) { + const multiplier: f64 = @bitCast(std.mem.readInt(u64, ext_data[1..9], .little)); + try info.put("newMultiplier", .{ .string = try std.fmt.allocPrint( + arena, + "{d}", + .{multiplier}, + ) }); + } + if (ext_data.len >= 17) { + const timestamp = std.mem.readInt(i64, ext_data[9..17], .little); + try info.put("newMultiplierTimestamp", .{ .integer = timestamp }); + } + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "updateMultiplier" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse a Pausable extension sub-instruction. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token/extension/pausable.rs +fn parsePausableExtension( + arena: Allocator, + ext_data: []const u8, + accounts: []const u8, + account_keys: *const AccountKeys, +) !JsonValue { + if (ext_data.len < 1) return error.DeserializationFailed; + const sub_tag = ext_data[0]; + + var result = ObjectMap.init(arena); + + switch (sub_tag) { + // Initialize { authority: OptionalNonZeroPubkey } + 0 => { + try checkNumTokenAccounts(accounts, 1); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + if (ext_data.len >= 33) { + if (readOptionalNonZeroPubkey(ext_data, 1)) |pk| { + try info.put("authority", try pubkeyToValue(arena, pk)); + } else { + try info.put("authority", .null); + } + } + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "initializePausableConfig" }); + }, + // Pause + 1 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "pause" }); + }, + // Resume + 2 => { + try checkNumTokenAccounts(accounts, 2); + var info = ObjectMap.init(arena); + try info.put("mint", try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[0])).?, + )); + try parseSigners( + arena, + &info, + 1, + account_keys, + accounts, + "authority", + "multisigAuthority", + ); + try result.put("info", .{ .object = info }); + try result.put("type", .{ .string = "resume" }); + }, + else => return error.DeserializationFailed, + } + + return .{ .object = result }; +} + +/// Parse signers for SPL Token instructions. +/// [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/transaction-status/src/parse_token.rs#L850 +fn parseSigners( + arena: Allocator, + info: *ObjectMap, + last_nonsigner_index: usize, + account_keys: *const AccountKeys, + accounts: []const u8, + owner_field_name: []const u8, + multisig_field_name: []const u8, +) !void { + if (accounts.len > last_nonsigner_index + 1) { + // Multisig case + var signers = try std.array_list.AlignedManaged(JsonValue, null).initCapacity( + arena, + accounts[last_nonsigner_index + 1 ..].len, + ); + for (accounts[last_nonsigner_index + 1 ..]) |signer_idx| { + try signers.append(try pubkeyToValue( + arena, + account_keys.get(@intCast(signer_idx)).?, + )); + } + try info.put(multisig_field_name, try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[last_nonsigner_index])).?, + )); + try info.put("signers", .{ .array = signers }); + } else { + // Single signer case + try info.put(owner_field_name, try pubkeyToValue( + arena, + account_keys.get(@intCast(accounts[last_nonsigner_index])).?, + )); + } +} + +/// Convert token amount to UI amount format matching Agave's token_amount_to_ui_amount_v3. +fn tokenAmountToUiAmount(arena: Allocator, amount: u64, decimals: u8) !JsonValue { + var obj = ObjectMap.init(arena); + + const amount_str = try std.fmt.allocPrint(arena, "{d}", .{amount}); + try obj.put("amount", .{ .string = amount_str }); + try obj.put("decimals", .{ .integer = @intCast(decimals) }); + + // Calculate UI amount + if (decimals == 0) { + const ui_amount_str = try std.fmt.allocPrint(arena, "{d}", .{amount}); + try obj.put("uiAmount", .{ .number_string = try exactFloat( + arena, + @floatFromInt(amount), + ) }); + try obj.put("uiAmountString", .{ .string = ui_amount_str }); + } else { + const divisor: f64 = std.math.pow(f64, 10.0, @floatFromInt(decimals)); + const ui_amount: f64 = @as(f64, @floatFromInt(amount)) / divisor; + try obj.put("uiAmount", .{ .number_string = try exactFloat(arena, ui_amount) }); + const ui_amount_str = try sig.runtime.spl_token.realNumberStringTrimmed( + arena, + amount, + decimals, + ); + try obj.put("uiAmountString", .{ .string = ui_amount_str }); + } + + return .{ .object = obj }; +} + +/// Format an f64 as a JSON number string matching Rust's serde_json output. +/// Zig's std.json serializes 3.0 as "3e0", but serde serializes it as "3.0". +fn exactFloat(arena: Allocator, value: f64) ![]const u8 { + var buf: [64]u8 = undefined; + const result = std.fmt.bufPrint(&buf, "{d}", .{value}) catch unreachable; + // {d} format omits the decimal point for whole numbers (e.g. "3" instead of "3.0"). + // Append ".0" to match serde's behavior of always including a decimal for floats. + if (std.mem.indexOf(u8, result, ".") == null) { + return std.fmt.allocPrint(arena, "{s}.0", .{result}); + } + return arena.dupe(u8, result); +} + +/// Format a UI amount with the specified number of decimal places. +fn formatUiAmount(arena: Allocator, value: f64, decimals: u8) ![]const u8 { + // Format the float value manually with the right precision + var buf: [64]u8 = undefined; + const result = std.fmt.bufPrint(&buf, "{d}", .{value}) catch return error.FormatError; + + // Find decimal point + const dot_idx = std.mem.indexOf(u8, result, ".") orelse { + // No decimal point, add trailing zeros + var output = try std.ArrayList(u8).initCapacity(arena, result.len + 1 + decimals); + try output.appendSlice(arena, result); + try output.append(arena, '.'); + for (0..decimals) |_| { + try output.append(arena, '0'); + } + return try output.toOwnedSlice(arena); + }; + + // Has decimal point - pad or truncate to desired precision + const after_dot = result.len - dot_idx - 1; + if (after_dot >= decimals) { + const slice = result[0 .. dot_idx + 1 + decimals]; + var output = try std.ArrayList(u8).initCapacity( + arena, + slice.len, + ); + // Truncate + try output.appendSlice(arena, slice); + return try output.toOwnedSlice(arena); + } else { + var output = try std.ArrayList(u8).initCapacity( + arena, + result.len + (decimals - after_dot), + ); + // Pad with zeros + try output.appendSlice(arena, result); + for (0..(decimals - after_dot)) |_| { + try output.append(arena, '0'); + } + return try output.toOwnedSlice(arena); + } +} + +test "parse_instruction.ParsableProgram.fromID: known programs" { + try std.testing.expectEqual( + ParsableProgram.system, + ParsableProgram.fromID(sig.runtime.program.system.ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.vote, + ParsableProgram.fromID(sig.runtime.program.vote.ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.stake, + ParsableProgram.fromID(sig.runtime.program.stake.ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.bpfUpgradeableLoader, + ParsableProgram.fromID(sig.runtime.program.bpf_loader.v3.ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.bpfLoader, + ParsableProgram.fromID(sig.runtime.program.bpf_loader.v2.ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.splToken, + ParsableProgram.fromID(sig.runtime.ids.TOKEN_PROGRAM_ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.splToken, + ParsableProgram.fromID(sig.runtime.ids.TOKEN_2022_PROGRAM_ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.addressLookupTable, + ParsableProgram.fromID(sig.runtime.program.address_lookup_table.ID).?, + ); +} + +test "parse_instruction.ParsableProgram.fromID: unknown program returns null" { + // Note: Pubkey.ZEROES matches the system program, so use different values + try std.testing.expectEqual( + @as(?ParsableProgram, null), + ParsableProgram.fromID(Pubkey{ .data = [_]u8{0xAB} ** 32 }), + ); + try std.testing.expectEqual( + @as(?ParsableProgram, null), + ParsableProgram.fromID(Pubkey{ .data = [_]u8{0xFF} ** 32 }), + ); +} + +test "parse_instruction.ParsableProgram.fromID: spl-memo programs" { + try std.testing.expectEqual( + ParsableProgram.splMemo, + ParsableProgram.fromID(SPL_MEMO_V1_ID).?, + ); + try std.testing.expectEqual( + ParsableProgram.splMemo, + ParsableProgram.fromID(SPL_MEMO_V3_ID).?, + ); +} + +test "parse_instruction.ParsableProgram.fromID: spl-associated-token-account" { + try std.testing.expectEqual( + ParsableProgram.splAssociatedTokenAccount, + ParsableProgram.fromID(SPL_ASSOCIATED_TOKEN_ACC_ID).?, + ); +} + +test "parse_instruction.parseMemoInstruction: valid UTF-8" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + const result = try parseMemoInstruction(allocator, "hello world"); + try std.testing.expectEqualStrings("hello world", result.string); +} + +test "parse_instruction.parseMemoInstruction: empty data" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + const result = try parseMemoInstruction(allocator, ""); + try std.testing.expectEqualStrings("", result.string); +} + +test makeUiPartiallyDecodedInstruction { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + const key0 = Pubkey{ .data = [_]u8{1} ** 32 }; + const key1 = Pubkey{ .data = [_]u8{2} ** 32 }; + const key2 = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ key0, key1, key2 }; + const account_keys = AccountKeys.init(&static_keys, null); + + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 2, + .accounts = &.{ 0, 1 }, + .data = &.{ 1, 2, 3 }, + }; + + const result = try makeUiPartiallyDecodedInstruction( + allocator, + instruction, + &account_keys, + 3, + ); + + // Verify program ID is base58 of key2 + try std.testing.expectEqualStrings( + key2.base58String().constSlice(), + result.programId, + ); + // Verify accounts are resolved to base58 strings + try std.testing.expectEqual(@as(usize, 2), result.accounts.len); + try std.testing.expectEqualStrings( + key0.base58String().constSlice(), + result.accounts[0], + ); + try std.testing.expectEqualStrings( + key1.base58String().constSlice(), + result.accounts[1], + ); + // stackHeight preserved + try std.testing.expectEqual(@as(?u32, 3), result.stackHeight); +} + +test "parse_instruction.parseUiInstruction: unknown program falls back to partially decoded" { + // Use arena allocator since parse functions allocate many small objects + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + // Use a random pubkey that's not a known program + const unknown_program = Pubkey{ .data = [_]u8{0xFF} ** 32 }; + const key0 = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{ key0, unknown_program }; + const account_keys = AccountKeys.init(&static_keys, null); + + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 1, // unknown_program + .accounts = &.{0}, + .data = &.{42}, + }; + + const result = try parseUiInstruction( + allocator, + instruction, + &account_keys, + null, + ); + + // Should be a parsed variant (partially decoded) + switch (result) { + .parsed => |p| { + switch (p.*) { + .partially_decoded => |pd| { + try std.testing.expectEqualStrings( + unknown_program.base58String().constSlice(), + pd.programId, + ); + try std.testing.expectEqual(@as(usize, 1), pd.accounts.len); + }, + .parsed => return error.UnexpectedResult, + } + }, + .compiled => return error.UnexpectedResult, + } +} + +test "parse_instruction.parseInstruction: system transfer" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const system_id = sig.runtime.program.system.ID; + const sender = Pubkey{ .data = [_]u8{1} ** 32 }; + const receiver = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ sender, receiver, system_id }; + const account_keys = AccountKeys.init(&static_keys, null); + + // Build a system transfer instruction (bincode encoded) + // SystemInstruction::Transfer { lamports: u64 } is tag 2 (u32) + lamports (u64) + var data: [12]u8 = undefined; + std.mem.writeInt(u32, data[0..4], 2, .little); // transfer variant + std.mem.writeInt(u64, data[4..12], 1_000_000, .little); // 1M lamports + + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 2, + .accounts = &.{ 0, 1 }, + .data = &data, + }; + + const result = try parseInstruction( + allocator, + system_id, + instruction, + &account_keys, + null, + ); + + // Verify it's a parsed instruction + switch (result) { + .parsed => |p| { + switch (p.*) { + .parsed => |pi| { + try std.testing.expectEqualStrings("system", pi.program); + // Verify the parsed JSON contains "transfer" type + const type_val = pi.parsed.object.get("type").?; + try std.testing.expectEqualStrings("transfer", type_val.string); + // Verify the info contains lamports + const info_val = pi.parsed.object.get("info").?; + const lamports = info_val.object.get("lamports").?; + try std.testing.expectEqual(@as(i64, 1_000_000), lamports.integer); + }, + .partially_decoded => return error.UnexpectedResult, + } + }, + .compiled => return error.UnexpectedResult, + } +} + +test "parse_instruction.parseInstruction: spl-memo" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const memo_id = SPL_MEMO_V3_ID; + const signer = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{ signer, memo_id }; + const account_keys = AccountKeys.init(&static_keys, null); + + const memo_text = "Hello, Solana!"; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 1, + .accounts = &.{0}, + .data = memo_text, + }; + + const result = try parseInstruction( + allocator, + memo_id, + instruction, + &account_keys, + null, + ); + + switch (result) { + .parsed => |p| { + switch (p.*) { + .parsed => |pi| { + try std.testing.expectEqualStrings("spl-memo", pi.program); + // Memo parsed value is a JSON string + try std.testing.expectEqualStrings("Hello, Solana!", pi.parsed.string); + }, + .partially_decoded => return error.UnexpectedResult, + } + }, + .compiled => return error.UnexpectedResult, + } +} + +/// Helper to build token extension instruction data: +/// [outer_tag, sub_tag, ...payload] +fn buildExtensionData(comptime outer_tag: u8, sub_tag: u8, payload: []const u8) []const u8 { + var data: [512]u8 = undefined; + data[0] = outer_tag; + data[1] = sub_tag; + if (payload.len > 0) { + @memcpy(data[2..][0..payload.len], payload); + } + return data[0 .. 2 + payload.len]; +} + +/// Helper to set up test account keys for extension tests +fn setupExtensionTestKeys(comptime n: usize) struct { keys: [n]Pubkey, account_keys: AccountKeys } { + var keys: [n]Pubkey = undefined; + for (0..n) |i| { + keys[i] = Pubkey{ .data = [_]u8{@intCast(i + 1)} ** 32 }; + } + return .{ .keys = keys, .account_keys = undefined }; +} + +test "parseTransferFeeExtension: initializeTransferFeeConfig" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth1 = Pubkey{ .data = [_]u8{2} ** 32 }; + const auth2 = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth1, auth2 }; + const account_keys = AccountKeys.init(&static_keys, null); + + // Build data: sub_tag=0, COption(1, auth1), COption(1, auth2), u16 basis_points, u64 max_fee + var payload: [82]u8 = undefined; + // COption tag=1 (Some) for auth1 + std.mem.writeInt(u32, payload[0..4], 1, .little); + @memcpy(payload[4..36], &auth1.data); + // COption tag=1 (Some) for auth2 + std.mem.writeInt(u32, payload[36..40], 1, .little); + @memcpy(payload[40..72], &auth2.data); + // transfer_fee_basis_points=100 + std.mem.writeInt(u16, payload[72..74], 100, .little); + // maximum_fee=1000000 + std.mem.writeInt(u64, payload[74..82], 1000000, .little); + + const result = try parseTransferFeeExtension( + allocator, + &([_]u8{0} ++ payload), + &.{0}, + &account_keys, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqualStrings( + "initializeTransferFeeConfig", + result.object.get("type").?.string, + ); + try std.testing.expectEqual(@as(i64, 100), info.get("transferFeeBasisPoints").?.integer); + try std.testing.expectEqual(@as(i64, 1000000), info.get("maximumFee").?.integer); + try std.testing.expect(info.get("transferFeeConfigAuthority") != null); + try std.testing.expect(info.get("withdrawWithheldAuthority") != null); +} + +test "parseTransferFeeExtension: setTransferFee" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=5, u16 basis_points, u64 max_fee + var payload: [10]u8 = undefined; + std.mem.writeInt(u16, payload[0..2], 50, .little); + std.mem.writeInt(u64, payload[2..10], 500000, .little); + + const ext_data = [_]u8{5} ++ payload; + const result = try parseTransferFeeExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + const info = result.object.get("info").?.object; + try std.testing.expectEqualStrings("setTransferFee", result.object.get("type").?.string); + try std.testing.expectEqual(@as(i64, 50), info.get("transferFeeBasisPoints").?.integer); + try std.testing.expectEqual(@as(i64, 500000), info.get("maximumFee").?.integer); +} + +test "parseTransferFeeExtension: transferCheckedWithFee" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const source = Pubkey{ .data = [_]u8{1} ** 32 }; + const mint = Pubkey{ .data = [_]u8{2} ** 32 }; + const dest = Pubkey{ .data = [_]u8{3} ** 32 }; + const auth = Pubkey{ .data = [_]u8{4} ** 32 }; + const static_keys = [_]Pubkey{ source, mint, dest, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=1, u64 amount, u8 decimals, u64 fee + var payload: [17]u8 = undefined; + std.mem.writeInt(u64, payload[0..8], 1000, .little); + payload[8] = 6; // decimals + std.mem.writeInt(u64, payload[9..17], 10, .little); + + const ext_data = [_]u8{1} ++ payload; + const result = try parseTransferFeeExtension( + allocator, + &ext_data, + &.{ 0, 1, 2, 3 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "transferCheckedWithFee", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("source") != null); + try std.testing.expect(info.get("mint") != null); + try std.testing.expect(info.get("destination") != null); + try std.testing.expect(info.get("tokenAmount") != null); + try std.testing.expect(info.get("feeAmount") != null); +} + +test "parseTransferFeeExtension: withdrawWithheldTokensFromMint" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const recipient = Pubkey{ .data = [_]u8{2} ** 32 }; + const auth = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, recipient, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{2}; // sub_tag=2, no data + const result = try parseTransferFeeExtension( + allocator, + &ext_data, + &.{ 0, 1, 2 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "withdrawWithheldTokensFromMint", + result.object.get("type").?.string, + ); +} + +test "parseTransferFeeExtension: harvestWithheldTokensToMint" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const source1 = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, source1 }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{4}; // sub_tag=4 + const result = try parseTransferFeeExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings( + "harvestWithheldTokensToMint", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqual(@as(usize, 1), info.get("sourceAccounts").?.array.items.len); +} + +test "parseTransferFeeExtension: invalid sub-tag returns error" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{99}; // invalid sub_tag + try std.testing.expectError(error.DeserializationFailed, parseTransferFeeExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + )); +} + +test "parseTransferFeeExtension: empty data returns error" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + try std.testing.expectError(error.DeserializationFailed, parseTransferFeeExtension( + allocator, + &.{}, + &.{0}, + &account_keys, + )); +} + +test "parseDefaultAccountStateExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0 (Initialize), account_state=2 (Frozen) + const ext_data = [_]u8{ 0, 2 }; + const result = try parseDefaultAccountStateExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeDefaultAccountState", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqualStrings("frozen", info.get("accountState").?.string); +} + +test "parseDefaultAccountStateExtension: update" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const freeze_auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, freeze_auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=1 (Update), account_state=1 (Initialized) + const ext_data = [_]u8{ 1, 1 }; + const result = try parseDefaultAccountStateExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "updateDefaultAccountState", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqualStrings("initialized", info.get("accountState").?.string); + // Should have freezeAuthority (single signer) + try std.testing.expect(info.get("freezeAuthority") != null); +} + +test "parseDefaultAccountStateExtension: invalid account state" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, invalid account_state=5 + const ext_data = [_]u8{ 0, 5 }; + try std.testing.expectError(error.DeserializationFailed, parseDefaultAccountStateExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + )); +} + +test "parseDefaultAccountStateExtension: too few accounts" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // update needs 2 accounts + const ext_data = [_]u8{ 1, 1 }; + try std.testing.expectError(error.NotEnoughSplTokenAccounts, parseDefaultAccountStateExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + )); +} + +test "parseMemoTransferExtension: enable" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{0}; // Enable + const result = try parseMemoTransferExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings( + "enableRequiredMemoTransfers", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("account") != null); + try std.testing.expect(info.get("owner") != null); +} + +test "parseMemoTransferExtension: disable" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{1}; // Disable + const result = try parseMemoTransferExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings( + "disableRequiredMemoTransfers", + result.object.get("type").?.string, + ); +} + +test "parseMemoTransferExtension: multisig signers" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const multisig = Pubkey{ .data = [_]u8{2} ** 32 }; + const signer1 = Pubkey{ .data = [_]u8{3} ** 32 }; + const signer2 = Pubkey{ .data = [_]u8{4} ** 32 }; + const static_keys = [_]Pubkey{ account, multisig, signer1, signer2 }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{0}; // Enable + const result = try parseMemoTransferExtension( + allocator, + &ext_data, + &.{ 0, 1, 2, 3 }, + &account_keys, + ); + const info = result.object.get("info").?.object; + // Multisig case: should have multisigOwner and signers + try std.testing.expect(info.get("multisigOwner") != null); + try std.testing.expect(info.get("signers") != null); + try std.testing.expectEqual(@as(usize, 2), info.get("signers").?.array.items.len); +} + +test "parseInterestBearingMintExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const rate_auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, rate_auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, COption(tag=1, pubkey), i16 rate=500 + var payload: [38]u8 = undefined; + std.mem.writeInt(u32, payload[0..4], 1, .little); // COption tag = Some + @memcpy(payload[4..36], &rate_auth.data); + std.mem.writeInt(i16, payload[36..38], 500, .little); + const ext_data = [_]u8{0} ++ payload; + + const result = try parseInterestBearingMintExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeInterestBearingConfig", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("rateAuthority") != null); + try std.testing.expectEqual(@as(i64, 500), info.get("rate").?.integer); +} + +test "parseInterestBearingMintExtension: updateRate" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=1, i16 rate=750 + var payload: [2]u8 = undefined; + std.mem.writeInt(i16, payload[0..2], 750, .little); + const ext_data = [_]u8{1} ++ payload; + + const result = try parseInterestBearingMintExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "updateInterestBearingConfigRate", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqual(@as(i64, 750), info.get("newRate").?.integer); +} + +test "parseCpiGuardExtension: enable" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{0}; // Enable + const result = try parseCpiGuardExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("enableCpiGuard", result.object.get("type").?.string); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("account") != null); + try std.testing.expect(info.get("owner") != null); +} + +test "parseCpiGuardExtension: disable" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{1}; // Disable + const result = try parseCpiGuardExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("disableCpiGuard", result.object.get("type").?.string); +} + +test "parseCpiGuardExtension: invalid sub-tag" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{42}; // Invalid + try std.testing.expectError(error.DeserializationFailed, parseCpiGuardExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + )); +} + +test "parseTransferHookExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const program = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, program }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, OptionalNonZeroPubkey authority (32), OptionalNonZeroPubkey program_id (32) + var payload: [64]u8 = undefined; + @memcpy(payload[0..32], &auth.data); // authority + @memcpy(payload[32..64], &program.data); // program_id + const ext_data = [_]u8{0} ++ payload; + + const result = try parseTransferHookExtension(allocator, &ext_data, &.{0}, &account_keys); + try std.testing.expectEqualStrings( + "initializeTransferHook", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); + try std.testing.expect(info.get("programId") != null); +} + +test "parseTransferHookExtension: initialize with no authority (zeros)" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // Both authority and program_id are zeros (None) + const payload: [64]u8 = [_]u8{0} ** 64; + const ext_data = [_]u8{0} ++ payload; + + const result = try parseTransferHookExtension(allocator, &ext_data, &.{0}, &account_keys); + try std.testing.expectEqualStrings( + "initializeTransferHook", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + // Zero pubkeys should not appear + try std.testing.expect(info.get("authority") == null); + try std.testing.expect(info.get("programId") == null); +} + +test "parseTransferHookExtension: update" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const new_program = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, new_program }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=1, OptionalNonZeroPubkey program_id (32) + var payload: [32]u8 = undefined; + @memcpy(payload[0..32], &new_program.data); + const ext_data = [_]u8{1} ++ payload; + + const result = try parseTransferHookExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("updateTransferHook", result.object.get("type").?.string); +} + +test "parseMetadataPointerExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const metadata = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, metadata }; + const account_keys = AccountKeys.init(&static_keys, null); + + var payload: [64]u8 = undefined; + @memcpy(payload[0..32], &auth.data); + @memcpy(payload[32..64], &metadata.data); + const ext_data = [_]u8{0} ++ payload; + + const result = try parseMetadataPointerExtension(allocator, &ext_data, &.{0}, &account_keys); + try std.testing.expectEqualStrings( + "initializeMetadataPointer", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); + try std.testing.expect(info.get("metadataAddress") != null); +} + +test "parseMetadataPointerExtension: update" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const new_metadata = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, new_metadata }; + const account_keys = AccountKeys.init(&static_keys, null); + + var payload: [32]u8 = undefined; + @memcpy(payload[0..32], &new_metadata.data); + const ext_data = [_]u8{1} ++ payload; + + const result = try parseMetadataPointerExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "updateMetadataPointer", + result.object.get("type").?.string, + ); +} + +test "parseGroupPointerExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const group = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, group }; + const account_keys = AccountKeys.init(&static_keys, null); + + var payload: [64]u8 = undefined; + @memcpy(payload[0..32], &auth.data); + @memcpy(payload[32..64], &group.data); + const ext_data = [_]u8{0} ++ payload; + + const result = try parseGroupPointerExtension(allocator, &ext_data, &.{0}, &account_keys); + try std.testing.expectEqualStrings( + "initializeGroupPointer", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); + try std.testing.expect(info.get("groupAddress") != null); +} + +test "parseGroupPointerExtension: update" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + const payload: [32]u8 = [_]u8{0} ** 32; // zeros = no group address + const ext_data = [_]u8{1} ++ payload; + + const result = try parseGroupPointerExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("updateGroupPointer", result.object.get("type").?.string); + const info = result.object.get("info").?.object; + // Zero pubkey is None, should not be in output + try std.testing.expect(info.get("groupAddress") == null); +} + +test "parseGroupMemberPointerExtension: initialize and update" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const member = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth, member }; + const account_keys = AccountKeys.init(&static_keys, null); + + // Initialize + var payload_init: [64]u8 = undefined; + @memcpy(payload_init[0..32], &auth.data); + @memcpy(payload_init[32..64], &member.data); + const ext_data_init = [_]u8{0} ++ payload_init; + const result_init = try parseGroupMemberPointerExtension( + allocator, + &ext_data_init, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeGroupMemberPointer", + result_init.object.get("type").?.string, + ); + const info_init = result_init.object.get("info").?.object; + try std.testing.expect(info_init.get("memberAddress") != null); + + // Update + var payload_update: [32]u8 = undefined; + @memcpy(payload_update[0..32], &member.data); + const ext_data_update = [_]u8{1} ++ payload_update; + const result_update = try parseGroupMemberPointerExtension( + allocator, + &ext_data_update, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "updateGroupMemberPointer", + result_update.object.get("type").?.string, + ); +} + +test "parsePausableExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, OptionalNonZeroPubkey authority + var payload: [32]u8 = undefined; + @memcpy(payload[0..32], &auth.data); + const ext_data = [_]u8{0} ++ payload; + + const result = try parsePausableExtension(allocator, &ext_data, &.{0}, &account_keys); + try std.testing.expectEqualStrings( + "initializePausableConfig", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); +} + +test "parsePausableExtension: initialize with no authority" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // All zeros = None authority + const payload = [_]u8{0} ** 32; + const ext_data = [_]u8{0} ++ payload; + + const result = try parsePausableExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializePausableConfig", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + // Null authority + try std.testing.expect(info.get("authority").?.null == {}); +} + +test "parsePausableExtension: pause" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{1}; // Pause + const result = try parsePausableExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("pause", result.object.get("type").?.string); +} + +test "parsePausableExtension: resume" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{2}; // Resume + const result = try parsePausableExtension(allocator, &ext_data, &.{ 0, 1 }, &account_keys); + try std.testing.expectEqualStrings("resume", result.object.get("type").?.string); +} + +test "parsePausableExtension: invalid sub-tag" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{3}; // Invalid + try std.testing.expectError(error.DeserializationFailed, parsePausableExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + )); +} + +test "parseScaledUiAmountExtension: initialize" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, OptionalNonZeroPubkey authority (32 bytes), f64 multiplier (8 bytes) + var payload: [40]u8 = undefined; + @memcpy(payload[0..32], &auth.data); // authority + const multiplier: f64 = 1.5; + std.mem.writeInt(u64, payload[32..40], @bitCast(multiplier), .little); + const ext_data = [_]u8{0} ++ payload; + + const result = try parseScaledUiAmountExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeScaledUiAmountConfig", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); + try std.testing.expect(info.get("multiplier") != null); +} + +test "parseScaledUiAmountExtension: updateMultiplier" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=1, f64 multiplier (8 bytes), i64 timestamp (8 bytes) + var payload: [16]u8 = undefined; + const multiplier: f64 = 2.0; + std.mem.writeInt(u64, payload[0..8], @bitCast(multiplier), .little); + std.mem.writeInt(i64, payload[8..16], 1700000000, .little); + const ext_data = [_]u8{1} ++ payload; + + const result = try parseScaledUiAmountExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "updateMultiplier", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("newMultiplier") != null); + try std.testing.expectEqual( + @as(i64, 1700000000), + info.get("newMultiplierTimestamp").?.integer, + ); +} + +test "parseConfidentialTransferExtension: approveAccount" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const mint = Pubkey{ .data = [_]u8{2} ** 32 }; + const authority = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ account, mint, authority }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{3}; // ApproveAccount + const result = try parseConfidentialTransferExtension( + allocator, + &ext_data, + &.{ 0, 1, 2 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "approveConfidentialTransferAccount", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("account") != null); + try std.testing.expect(info.get("mint") != null); + try std.testing.expect(info.get("confidentialTransferAuditorAuthority") != null); +} + +test "parseConfidentialTransferExtension: configureAccountWithRegistry" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const mint = Pubkey{ .data = [_]u8{2} ** 32 }; + const registry = Pubkey{ .data = [_]u8{3} ** 32 }; + const static_keys = [_]Pubkey{ account, mint, registry }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{14}; // ConfigureAccountWithRegistry + const result = try parseConfidentialTransferExtension( + allocator, + &ext_data, + &.{ 0, 1, 2 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "configureConfidentialAccountWithRegistry", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("registry") != null); +} + +test "parseConfidentialTransferExtension: enableDisableCredits" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + // Enable confidential credits (tag=9) + const ext_data_enable = [_]u8{9}; + const result_enable = try parseConfidentialTransferExtension( + allocator, + &ext_data_enable, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "enableConfidentialTransferConfidentialCredits", + result_enable.object.get("type").?.string, + ); + + // Disable confidential credits (tag=10) + const ext_data_disable = [_]u8{10}; + const result_disable = try parseConfidentialTransferExtension( + allocator, + &ext_data_disable, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "disableConfidentialTransferConfidentialCredits", + result_disable.object.get("type").?.string, + ); + + // Enable non-confidential credits (tag=11) + const ext_data_enable_nc = [_]u8{11}; + const result_enable_nc = try parseConfidentialTransferExtension( + allocator, + &ext_data_enable_nc, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "enableConfidentialTransferNonConfidentialCredits", + result_enable_nc.object.get("type").?.string, + ); + + // Disable non-confidential credits (tag=12) + const ext_data_disable_nc = [_]u8{12}; + const result_disable_nc = try parseConfidentialTransferExtension( + allocator, + &ext_data_disable_nc, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "disableConfidentialTransferNonConfidentialCredits", + result_disable_nc.object.get("type").?.string, + ); +} + +test "parseConfidentialTransferExtension: invalid sub-tag" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{99}; + try std.testing.expectError(error.DeserializationFailed, parseConfidentialTransferExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + )); +} + +test "parseConfidentialTransferFeeExtension: initializeConfig" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // sub_tag=0, OptionalNonZeroPubkey authority (32 bytes) + var payload: [32]u8 = undefined; + @memcpy(payload[0..32], &auth.data); + const ext_data = [_]u8{0} ++ payload; + + const result = try parseConfidentialTransferFeeExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeConfidentialTransferFeeConfig", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expect(info.get("authority") != null); +} + +test "parseConfidentialTransferFeeExtension: harvestWithheldTokensToMint" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const source = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, source }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{3}; // HarvestWithheldTokensToMint + const result = try parseConfidentialTransferFeeExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "harvestWithheldConfidentialTransferTokensToMint", + result.object.get("type").?.string, + ); + const info = result.object.get("info").?.object; + try std.testing.expectEqual(@as(usize, 1), info.get("sourceAccounts").?.array.items.len); +} + +test "parseConfidentialTransferFeeExtension: enableDisableHarvestToMint" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + // Enable (tag=4) + const ext_enable = [_]u8{4}; + const result_enable = try parseConfidentialTransferFeeExtension( + allocator, + &ext_enable, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "enableConfidentialTransferFeeHarvestToMint", + result_enable.object.get("type").?.string, + ); + + // Disable (tag=5) + const ext_disable = [_]u8{5}; + const result_disable = try parseConfidentialTransferFeeExtension( + allocator, + &ext_disable, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings( + "disableConfidentialTransferFeeHarvestToMint", + result_disable.object.get("type").?.string, + ); +} + +test "parseConfidentialMintBurnExtension: initializeMint" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{0}; + const result = try parseConfidentialMintBurnExtension( + allocator, + &ext_data, + &.{0}, + &account_keys, + ); + try std.testing.expectEqualStrings( + "initializeConfidentialMintBurnMint", + result.object.get("type").?.string, + ); +} + +test "parseConfidentialMintBurnExtension: applyPendingBurn" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + const ext_data = [_]u8{5}; // ApplyPendingBurn + const result = try parseConfidentialMintBurnExtension( + allocator, + &ext_data, + &.{ 0, 1 }, + &account_keys, + ); + try std.testing.expectEqualStrings("applyPendingBurn", result.object.get("type").?.string); +} + +test "parseTokenInstruction: defaultAccountState extension via outer dispatch" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // outer tag=28 (defaultAccountStateExtension), sub_tag=0 (Initialize), account_state=2 (Frozen) + const data = [_]u8{ 28, 0, 2 }; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 0, + .accounts = &.{0}, + .data = &data, + }; + + const result = try parseTokenInstruction(allocator, instruction, &account_keys); + try std.testing.expectEqualStrings( + "initializeDefaultAccountState", + result.object.get("type").?.string, + ); +} + +test "parseTokenInstruction: memoTransfer extension via outer dispatch" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + // outer tag=30 (memoTransferExtension), sub_tag=0 (Enable) + const data = [_]u8{ 30, 0 }; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 0, + .accounts = &.{ 0, 1 }, + .data = &data, + }; + + const result = try parseTokenInstruction(allocator, instruction, &account_keys); + try std.testing.expectEqualStrings( + "enableRequiredMemoTransfers", + result.object.get("type").?.string, + ); +} + +test "parseTokenInstruction: cpiGuard extension via outer dispatch" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const account = Pubkey{ .data = [_]u8{1} ** 32 }; + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ account, owner }; + const account_keys = AccountKeys.init(&static_keys, null); + + // outer tag=34 (cpiGuardExtension), sub_tag=1 (Disable) + const data = [_]u8{ 34, 1 }; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 0, + .accounts = &.{ 0, 1 }, + .data = &data, + }; + + const result = try parseTokenInstruction(allocator, instruction, &account_keys); + try std.testing.expectEqualStrings("disableCpiGuard", result.object.get("type").?.string); +} + +test "parseTokenInstruction: pausable extension via outer dispatch" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const auth = Pubkey{ .data = [_]u8{2} ** 32 }; + const static_keys = [_]Pubkey{ mint, auth }; + const account_keys = AccountKeys.init(&static_keys, null); + + // outer tag=44 (pausableExtension), sub_tag=1 (Pause) + const data = [_]u8{ 44, 1 }; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 0, + .accounts = &.{ 0, 1 }, + .data = &data, + }; + + const result = try parseTokenInstruction(allocator, instruction, &account_keys); + try std.testing.expectEqualStrings("pause", result.object.get("type").?.string); +} + +test "parseTokenInstruction: extension with insufficient data returns error" { + var arena = std.heap.ArenaAllocator.init(std.testing.allocator); + defer _ = arena.reset(.free_all); + const allocator = arena.allocator(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + const static_keys = [_]Pubkey{mint}; + const account_keys = AccountKeys.init(&static_keys, null); + + // outer tag=28 (defaultAccountStateExtension) with no sub-data + const data = [_]u8{28}; + const instruction = sig.ledger.transaction_status.CompiledInstruction{ + .program_id_index = 0, + .accounts = &.{0}, + .data = &data, + }; + + try std.testing.expectError(error.DeserializationFailed, parseTokenInstruction( + allocator, + instruction, + &account_keys, + )); +} + +test "readOptionalNonZeroPubkey: non-zero returns pubkey" { + const data = [_]u8{0xAA} ** 64; + const result = readOptionalNonZeroPubkey(&data, 0); + try std.testing.expect(result != null); + try std.testing.expectEqual([_]u8{0xAA} ** 32, result.?.data); +} + +test "readOptionalNonZeroPubkey: zeros returns null" { + const data = [_]u8{0} ** 64; + const result = readOptionalNonZeroPubkey(&data, 0); + try std.testing.expect(result == null); +} + +test "readOptionalNonZeroPubkey: offset" { + var data: [48]u8 = undefined; + @memset(data[0..16], 0); + @memset(data[16..48], 0xBB); + const result = readOptionalNonZeroPubkey(&data, 16); + try std.testing.expect(result != null); + try std.testing.expectEqual([_]u8{0xBB} ** 32, result.?.data); +} + +test "readOptionalNonZeroPubkey: insufficient data returns null" { + const data = [_]u8{0xAA} ** 16; // Only 16 bytes, need 32 + const result = readOptionalNonZeroPubkey(&data, 0); + try std.testing.expect(result == null); +} + +test "readCOptionPubkey: Some variant" { + var data: [36]u8 = undefined; + std.mem.writeInt(u32, data[0..4], 1, .little); // tag = Some + @memset(data[4..36], 0xCC); + const result = try readCOptionPubkey(&data, 0); + try std.testing.expect(result.pubkey != null); + try std.testing.expectEqual(@as(usize, 36), result.len); + try std.testing.expectEqual([_]u8{0xCC} ** 32, result.pubkey.?.data); +} + +test "readCOptionPubkey: None variant" { + var data: [4]u8 = undefined; + std.mem.writeInt(u32, data[0..4], 0, .little); // tag = None + const result = try readCOptionPubkey(&data, 0); + try std.testing.expect(result.pubkey == null); + try std.testing.expectEqual(@as(usize, 4), result.len); +} + +test "readCOptionPubkey: invalid tag" { + var data: [36]u8 = undefined; + std.mem.writeInt(u32, data[0..4], 2, .little); // Invalid tag + try std.testing.expectError(error.DeserializationFailed, readCOptionPubkey(&data, 0)); +} + +test "readCOptionPubkey: insufficient data for tag" { + const data = [_]u8{ 0, 0 }; // Only 2 bytes, need 4 for tag + try std.testing.expectError(error.DeserializationFailed, readCOptionPubkey(&data, 0)); +} + +test "readCOptionPubkey: Some but insufficient data for pubkey" { + var data: [8]u8 = undefined; + std.mem.writeInt(u32, data[0..4], 1, .little); // tag = Some + // Only 4 more bytes, need 32 + try std.testing.expectError(error.DeserializationFailed, readCOptionPubkey(&data, 0)); +} diff --git a/src/rpc/test_serialize.zig b/src/rpc/test_serialize.zig index 00c9d67a19..e3e164a5f8 100644 --- a/src/rpc/test_serialize.zig +++ b/src/rpc/test_serialize.zig @@ -1,14 +1,17 @@ const std = @import("std"); const sig = @import("../sig.zig"); const rpc = @import("lib.zig"); +const parse_instruction = @import("parse_instruction/lib.zig"); const methods = rpc.methods; +const Hash = sig.core.Hash; const Pubkey = sig.core.Pubkey; const Signature = sig.core.Signature; const GetAccountInfo = methods.GetAccountInfo; const GetBalance = methods.GetBalance; +const GetBlock = methods.GetBlock; const GetBlockCommitment = methods.GetBlockCommitment; const GetBlockHeight = methods.GetBlockHeight; const GetEpochInfo = methods.GetEpochInfo; @@ -334,3 +337,758 @@ test GetVoteAccounts { , ); } + +/// Helper to stringify a value and compare against expected JSON. +fn expectJsonStringify(expected: []const u8, value: anytype) !void { + const actual = try std.json.Stringify.valueAlloc(std.testing.allocator, value, .{}); + defer std.testing.allocator.free(actual); + try std.testing.expectEqualStrings(expected, actual); +} + +test "GetBlock.Response serialization - minimal block (no transactions, no rewards)" { + const response = GetBlock.Response{ + .previousBlockhash = Hash.ZEROES, + .blockhash = Hash.ZEROES, + .parentSlot = 99, + }; + try expectJsonStringify( + \\{"blockhash":"11111111111111111111111111111111","parentSlot":99,"previousBlockhash":"11111111111111111111111111111111"} + , response); +} + +test "GetBlock.Response serialization - full block with blockTime and blockHeight" { + const response = GetBlock.Response{ + .previousBlockhash = Hash.ZEROES, + .blockhash = Hash.ZEROES, + .parentSlot = 99, + .blockTime = 1_700_000_000, + .blockHeight = 42, + }; + try expectJsonStringify( + \\{"blockHeight":42,"blockTime":1700000000,"blockhash":"11111111111111111111111111111111","parentSlot":99,"previousBlockhash":"11111111111111111111111111111111"} + , response); +} + +test "GetBlock.Response serialization - block with rewards" { + const rewards = [_]GetBlock.Response.UiReward{.{ + .pubkey = Pubkey.ZEROES, + .lamports = 5000, + .postBalance = 1_000_000_000, + .rewardType = .Fee, + .commission = null, + }}; + + const response = GetBlock.Response{ + .previousBlockhash = Hash.ZEROES, + .blockhash = Hash.ZEROES, + .parentSlot = 99, + .rewards = &rewards, + }; + try expectJsonStringify( + \\{"blockhash":"11111111111111111111111111111111","parentSlot":99,"previousBlockhash":"11111111111111111111111111111111","rewards":[{"pubkey":"11111111111111111111111111111111","lamports":5000,"postBalance":1000000000,"rewardType":"Fee","commission":null}]} + , response); +} + +test "GetBlock.Response serialization - block with signatures" { + const sigs = [_]Signature{Signature.ZEROES}; + + const response = GetBlock.Response{ + .previousBlockhash = Hash.ZEROES, + .blockhash = Hash.ZEROES, + .parentSlot = 99, + .signatures = &sigs, + }; + try expectJsonStringify( + \\{"blockhash":"11111111111111111111111111111111","parentSlot":99,"previousBlockhash":"11111111111111111111111111111111","signatures":["1111111111111111111111111111111111111111111111111111111111111111"]} + , response); +} + +test "UiReward serialization - Fee reward type" { + const reward = GetBlock.Response.UiReward{ + .pubkey = Pubkey.ZEROES, + .lamports = 5000, + .postBalance = 1_000_000_000, + .rewardType = .Fee, + .commission = null, + }; + try expectJsonStringify( + \\{"pubkey":"11111111111111111111111111111111","lamports":5000,"postBalance":1000000000,"rewardType":"Fee","commission":null} + , reward); +} + +test "UiReward serialization - Staking reward with commission" { + const reward = GetBlock.Response.UiReward{ + .pubkey = Pubkey.ZEROES, + .lamports = 100_000, + .postBalance = 5_000_000_000, + .rewardType = .Staking, + .commission = 10, + }; + try expectJsonStringify( + \\{"pubkey":"11111111111111111111111111111111","lamports":100000,"postBalance":5000000000,"rewardType":"Staking","commission":10} + , reward); +} + +test "UiReward serialization - all reward types" { + // Test all four reward types serialize with correct capitalization + inline for (.{ + .{ GetBlock.Response.UiReward.RewardType.Fee, "Fee" }, + .{ GetBlock.Response.UiReward.RewardType.Rent, "Rent" }, + .{ GetBlock.Response.UiReward.RewardType.Staking, "Staking" }, + .{ GetBlock.Response.UiReward.RewardType.Voting, "Voting" }, + }) |pair| { + const actual = try std.json.Stringify.valueAlloc(std.testing.allocator, pair[0], .{}); + defer std.testing.allocator.free(actual); + const expected = "\"" ++ pair[1] ++ "\""; + try std.testing.expectEqualStrings(expected, actual); + } +} + +test "UiReward.fromLedgerReward" { + const ledger_reward = sig.ledger.transaction_status.Reward{ + .pubkey = Pubkey.ZEROES, + .lamports = 5000, + .post_balance = 1_000_000_000, + .reward_type = .fee, + .commission = null, + }; + const ui_reward = try GetBlock.Response.UiReward.fromLedgerReward(ledger_reward); + try std.testing.expectEqual(Pubkey.ZEROES, ui_reward.pubkey); + try std.testing.expectEqual(@as(i64, 5000), ui_reward.lamports); + try std.testing.expectEqual(@as(u64, 1_000_000_000), ui_reward.postBalance); + try std.testing.expectEqual(GetBlock.Response.UiReward.RewardType.Fee, ui_reward.rewardType.?); + try std.testing.expectEqual(@as(?u8, null), ui_reward.commission); +} + +test "UiReward.fromLedgerReward - all reward type mappings" { + const mappings = .{ + .{ @as(?sig.replay.rewards.RewardType, .fee), GetBlock.Response.UiReward.RewardType.Fee }, + .{ @as(?sig.replay.rewards.RewardType, .rent), GetBlock.Response.UiReward.RewardType.Rent }, + .{ @as(?sig.replay.rewards.RewardType, .staking), GetBlock.Response.UiReward.RewardType.Staking }, + .{ @as(?sig.replay.rewards.RewardType, .voting), GetBlock.Response.UiReward.RewardType.Voting }, + }; + inline for (mappings) |pair| { + const ledger_reward = sig.ledger.transaction_status.Reward{ + .pubkey = Pubkey.ZEROES, + .lamports = 0, + .post_balance = 0, + .reward_type = pair[0], + .commission = null, + }; + const ui_reward = try GetBlock.Response.UiReward.fromLedgerReward(ledger_reward); + try std.testing.expectEqual(pair[1], ui_reward.rewardType.?); + } +} + +test "UiReward.fromLedgerReward - null reward type" { + const ledger_reward = sig.ledger.transaction_status.Reward{ + .pubkey = Pubkey.ZEROES, + .lamports = 0, + .post_balance = 0, + .reward_type = null, + .commission = null, + }; + const ui_reward = try GetBlock.Response.UiReward.fromLedgerReward(ledger_reward); + try std.testing.expectEqual(@as(?GetBlock.Response.UiReward.RewardType, null), ui_reward.rewardType); +} + +test "UiTransactionResultStatus serialization - success" { + const status = GetBlock.Response.UiTransactionResultStatus{ + .Ok = .{}, + .Err = null, + }; + try expectJsonStringify( + \\{"Ok":null} + , status); +} + +test "UiTransactionResultStatus serialization - error" { + const status = GetBlock.Response.UiTransactionResultStatus{ + .Ok = null, + .Err = .InsufficientFundsForFee, + }; + try expectJsonStringify( + \\{"Err":"InsufficientFundsForFee"} + , status); +} + +test "TransactionVersion serialization - legacy" { + const version = GetBlock.Response.EncodedTransactionWithStatusMeta.TransactionVersion{ .legacy = {} }; + try expectJsonStringify( + \\"legacy" + , version); +} + +test "TransactionVersion serialization - number" { + const version = GetBlock.Response.EncodedTransactionWithStatusMeta.TransactionVersion{ .number = 0 }; + try expectJsonStringify("0", version); +} + +test "EncodedTransaction serialization - binary base64" { + const tx = GetBlock.Response.EncodedTransaction{ + .binary = .{ "AQID", .base64 }, + }; + try expectJsonStringify( + \\["AQID","base64"] + , tx); +} + +test "EncodedTransaction serialization - binary base58" { + const tx = GetBlock.Response.EncodedTransaction{ + .binary = .{ "2j", .base58 }, + }; + try expectJsonStringify( + \\["2j","base58"] + , tx); +} + +test "EncodedTransaction serialization - legacy binary" { + const tx = GetBlock.Response.EncodedTransaction{ + .legacy_binary = "some_base58_data", + }; + try expectJsonStringify( + \\"some_base58_data" + , tx); +} + +test "EncodedTransactionWithStatusMeta serialization - minimal" { + const tx_with_meta = GetBlock.Response.EncodedTransactionWithStatusMeta{ + .transaction = .{ .binary = .{ "AQID", .base64 } }, + .meta = null, + .version = null, + }; + try expectJsonStringify( + \\{"transaction":["AQID","base64"]} + , tx_with_meta); +} + +test "EncodedTransactionWithStatusMeta serialization - with version" { + const tx_with_meta = GetBlock.Response.EncodedTransactionWithStatusMeta{ + .transaction = .{ .binary = .{ "AQID", .base64 } }, + .meta = null, + .version = .legacy, + }; + try expectJsonStringify( + \\{"transaction":["AQID","base64"],"version":"legacy"} + , tx_with_meta); +} + +test "UiTransactionStatusMeta serialization - success with balances" { + const pre_balances = [_]u64{ 1_000_000_000, 500_000_000 }; + const post_balances = [_]u64{ 999_995_000, 500_005_000 }; + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 5000, + .preBalances = &pre_balances, + .postBalances = &post_balances, + }; + try expectJsonStringify( + \\{"err":null,"fee":5000,"innerInstructions":[],"logMessages":[],"postBalances":[999995000,500005000],"postTokenBalances":[],"preBalances":[1000000000,500000000],"preTokenBalances":[],"rewards":[],"status":{"Ok":null}} + , meta); +} + +test "UiTransactionStatusMeta serialization - with computeUnitsConsumed" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 5000, + .preBalances = &.{}, + .postBalances = &.{}, + .computeUnitsConsumed = .{ .value = 150_000 }, + }; + try expectJsonStringify( + \\{"computeUnitsConsumed":150000,"err":null,"fee":5000,"innerInstructions":[],"logMessages":[],"postBalances":[],"postTokenBalances":[],"preBalances":[],"preTokenBalances":[],"rewards":[],"status":{"Ok":null}} + , meta); +} + +test "UiTransactionStatusMeta serialization - with loadedAddresses" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 5000, + .preBalances = &.{}, + .postBalances = &.{}, + .loadedAddresses = .{ .value = .{ + .readonly = &.{Pubkey.ZEROES}, + .writable = &.{}, + } }, + }; + try expectJsonStringify( + \\{"err":null,"fee":5000,"innerInstructions":[],"loadedAddresses":{"readonly":["11111111111111111111111111111111"],"writable":[]},"logMessages":[],"postBalances":[],"postTokenBalances":[],"preBalances":[],"preTokenBalances":[],"rewards":[],"status":{"Ok":null}} + , meta); +} + +test "UiTransactionReturnData serialization" { + const return_data = GetBlock.Response.UiTransactionReturnData{ + .programId = Pubkey.ZEROES, + .data = .{ "AQID", .base64 }, + }; + try expectJsonStringify( + \\{"programId":"11111111111111111111111111111111","data":["AQID","base64"]} + , return_data); +} + +test "UiTransactionTokenBalance serialization" { + const token_balance = GetBlock.Response.UiTransactionTokenBalance{ + .accountIndex = 2, + .mint = Pubkey.ZEROES, + .owner = Pubkey.ZEROES, + .programId = Pubkey.ZEROES, + .uiTokenAmount = .{ + .amount = "1000000", + .decimals = 6, + .uiAmount = 1.0, + .uiAmountString = "1", + }, + }; + try expectJsonStringify( + \\{"accountIndex":2,"mint":"11111111111111111111111111111111","owner":"11111111111111111111111111111111","programId":"11111111111111111111111111111111","uiTokenAmount":{"amount":"1000000","decimals":6,"uiAmount":1.0,"uiAmountString":"1"}} + , token_balance); +} + +test "UiTokenAmount serialization - without uiAmount" { + const token_amount = GetBlock.Response.UiTokenAmount{ + .amount = "42", + .decimals = 0, + .uiAmount = null, + .uiAmountString = "42", + }; + try expectJsonStringify( + \\{"amount":"42","decimals":0,"uiAmountString":"42"} + , token_amount); +} + +test "EncodedInstruction serialization" { + const accounts = [_]u8{ 0, 1 }; + const ix = GetBlock.Response.EncodedInstruction{ + .programIdIndex = 2, + .accounts = &accounts, + .data = "3Bxs3zzLZLuLQEYX", + }; + // Note: []const u8 serializes as a string via std.json, not as an integer array. + // The accounts field contains raw byte values, serialized as escaped characters. + try expectJsonStringify( + \\{"programIdIndex":2,"accounts":[0,1],"data":"3Bxs3zzLZLuLQEYX"} + , ix); +} + +test "EncodedInstruction serialization - with stackHeight" { + const ix = GetBlock.Response.EncodedInstruction{ + .programIdIndex = 2, + .accounts = &.{}, + .data = "3Bxs3zzLZLuLQEYX", + .stackHeight = 1, + }; + try expectJsonStringify( + \\{"programIdIndex":2,"accounts":[],"data":"3Bxs3zzLZLuLQEYX","stackHeight":1} + , ix); +} + +test "EncodedMessage serialization" { + const msg = GetBlock.Response.EncodedMessage{ + .accountKeys = &.{Pubkey.ZEROES}, + .header = .{ + .numRequiredSignatures = 1, + .numReadonlySignedAccounts = 0, + .numReadonlyUnsignedAccounts = 1, + }, + .recentBlockhash = Hash.ZEROES, + .instructions = &.{}, + }; + try expectJsonStringify( + \\{"accountKeys":["11111111111111111111111111111111"],"header":{"numRequiredSignatures":1,"numReadonlySignedAccounts":0,"numReadonlyUnsignedAccounts":1},"recentBlockhash":"11111111111111111111111111111111","instructions":[]} + , msg); +} + +test "EncodedMessage serialization - with addressTableLookups" { + const msg = GetBlock.Response.EncodedMessage{ + .accountKeys = &.{}, + .header = .{ + .numRequiredSignatures = 1, + .numReadonlySignedAccounts = 0, + .numReadonlyUnsignedAccounts = 0, + }, + .recentBlockhash = Hash.ZEROES, + .instructions = &.{}, + .addressTableLookups = &.{.{ + .accountKey = Pubkey.ZEROES, + .writableIndexes = &.{0}, + .readonlyIndexes = &.{1}, + }}, + }; + // Note: writableIndexes/readonlyIndexes are []const u8, serialized as strings + try expectJsonStringify( + \\{"accountKeys":[],"header":{"numRequiredSignatures":1,"numReadonlySignedAccounts":0,"numReadonlyUnsignedAccounts":0},"recentBlockhash":"11111111111111111111111111111111","instructions":[],"addressTableLookups":[{"accountKey":"11111111111111111111111111111111","readonlyIndexes":[1],"writableIndexes":[0]}]} + , msg); +} + +test "UiCompiledInstruction serialization" { + const ix = parse_instruction.UiCompiledInstruction{ + .programIdIndex = 3, + .accounts = &.{ 0, 1, 2 }, + .data = "3Bxs3zzLZLuLQEYX", + .stackHeight = 2, + }; + // UiCompiledInstruction serializes accounts as array of integers + try expectJsonStringify( + \\{"accounts":[0,1,2],"data":"3Bxs3zzLZLuLQEYX","programIdIndex":3,"stackHeight":2} + , ix); +} + +test "UiCompiledInstruction serialization - no stackHeight" { + const ix = parse_instruction.UiCompiledInstruction{ + .programIdIndex = 3, + .accounts = &.{}, + .data = "3Bxs3zzLZLuLQEYX", + }; + try expectJsonStringify( + \\{"accounts":[],"data":"3Bxs3zzLZLuLQEYX","programIdIndex":3} + , ix); +} + +test "UiPartiallyDecodedInstruction serialization" { + const ix = parse_instruction.UiPartiallyDecodedInstruction{ + .programId = "11111111111111111111111111111111", + .accounts = &.{"Vote111111111111111111111111111111111111111"}, + .data = "3Bxs3zzLZLuLQEYX", + }; + try expectJsonStringify( + \\{"accounts":["Vote111111111111111111111111111111111111111"],"data":"3Bxs3zzLZLuLQEYX","programId":"11111111111111111111111111111111"} + , ix); +} + +test "ParsedInstruction serialization" { + var info = std.json.ObjectMap.init(std.testing.allocator); + defer info.deinit(); + try info.put("lamports", .{ .integer = 5000 }); + try info.put("source", .{ .string = "11111111111111111111111111111111" }); + + var parsed = std.json.ObjectMap.init(std.testing.allocator); + defer parsed.deinit(); + try parsed.put("type", .{ .string = "transfer" }); + try parsed.put("info", .{ .object = info }); + + // We need to test serialization through jsonStringify, not the struct directly, + // since ObjectMap doesn't have standard serialization. + // Instead, test that a fully constructed ParsedInstruction serializes correctly. + const pi = parse_instruction.ParsedInstruction{ + .program = "system", + .program_id = "11111111111111111111111111111111", + .parsed = .{ .object = parsed }, + .stack_height = null, + }; + + // Verify it contains the expected fields + const output = try std.json.Stringify.valueAlloc(std.testing.allocator, pi, .{}); + defer std.testing.allocator.free(output); + try std.testing.expect(std.mem.indexOf(u8, output, "\"parsed\"") != null); + try std.testing.expect(std.mem.indexOf(u8, output, "\"program\":\"system\"") != null); + try std.testing.expect(std.mem.indexOf(u8, output, "\"programId\":\"11111111111111111111111111111111\"") != null); +} + +test "UiInnerInstructions serialization" { + const inner = parse_instruction.UiInnerInstructions{ + .index = 0, + .instructions = &.{.{ .compiled = .{ + .programIdIndex = 2, + .accounts = &.{0}, + .data = "3Bxs3zzLZLuLQEYX", + .stackHeight = 2, + } }}, + }; + try expectJsonStringify( + \\{"index":0,"instructions":[{"accounts":[0],"data":"3Bxs3zzLZLuLQEYX","programIdIndex":2,"stackHeight":2}]} + , inner); +} + +test "UiInstruction serialization - compiled variant" { + const ix = parse_instruction.UiInstruction{ + .compiled = .{ + .programIdIndex = 1, + .accounts = &.{ 0, 2 }, + .data = "abcd", + }, + }; + try expectJsonStringify( + \\{"accounts":[0,2],"data":"abcd","programIdIndex":1} + , ix); +} + +test "GetBlock request serialization" { + try testRequest(.getBlock, .{ .slot = 430 }, + \\{"jsonrpc":"2.0","id":1,"method":"getBlock","params":[430]} + ); +} + +test "GetBlock request serialization - with encoding only (deprecated)" { + try testRequest(.getBlock, .{ + .slot = 430, + .encoding_or_config = .{ .encoding = .base64 }, + }, + \\{"jsonrpc":"2.0","id":1,"method":"getBlock","params":[430,"base64"]} + ); +} + +test "GetBlock request serialization - with config" { + try testRequest(.getBlock, .{ + .slot = 430, + .encoding_or_config = .{ .config = .{ + .encoding = .json, + .transactionDetails = .full, + .rewards = false, + } }, + }, + \\{"jsonrpc":"2.0","id":1,"method":"getBlock","params":[430,{"commitment":null,"encoding":"json","transactionDetails":"full","maxSupportedTransactionVersion":null,"rewards":false}]} + ); +} + +test "JsonSkippable - value state serializes the inner value" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .computeUnitsConsumed = .{ .value = 42 }, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + // computeUnitsConsumed should appear with value 42 + try std.testing.expect(std.mem.indexOf(u8, json, "\"computeUnitsConsumed\":42") != null); +} + +test "JsonSkippable - skip state omits the field entirely" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .computeUnitsConsumed = .skip, + .loadedAddresses = .skip, + .returnData = .skip, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + // These fields should NOT appear in the output + try std.testing.expect(std.mem.indexOf(u8, json, "computeUnitsConsumed") == null); + try std.testing.expect(std.mem.indexOf(u8, json, "loadedAddresses") == null); + try std.testing.expect(std.mem.indexOf(u8, json, "returnData") == null); +} + +test "JsonSkippable - none state serializes as null" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .rewards = .none, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + // rewards should appear as null + try std.testing.expect(std.mem.indexOf(u8, json, "\"rewards\":null") != null); +} + +test "ParsedAccount serialization - transaction source" { + const account = GetBlock.Response.ParsedAccount{ + .pubkey = Pubkey.ZEROES, + .writable = true, + .signer = true, + .source = .transaction, + }; + try expectJsonStringify( + \\{"pubkey":"11111111111111111111111111111111","signer":true,"source":"transaction","writable":true} + , account); +} + +test "ParsedAccount serialization - lookupTable source" { + const account = GetBlock.Response.ParsedAccount{ + .pubkey = Pubkey.ZEROES, + .writable = false, + .signer = false, + .source = .lookupTable, + }; + try expectJsonStringify( + \\{"pubkey":"11111111111111111111111111111111","signer":false,"source":"lookupTable","writable":false} + , account); +} + +test "AddressTableLookup serialization - indexes as integer arrays" { + const atl = GetBlock.Response.AddressTableLookup{ + .accountKey = Pubkey.ZEROES, + .writableIndexes = &[_]u8{ 0, 1, 4 }, + .readonlyIndexes = &[_]u8{ 2, 3 }, + }; + try expectJsonStringify( + \\{"accountKey":"11111111111111111111111111111111","readonlyIndexes":[2,3],"writableIndexes":[0,1,4]} + , atl); +} + +test "AddressTableLookup serialization - empty indexes" { + const atl = GetBlock.Response.AddressTableLookup{ + .accountKey = Pubkey.ZEROES, + .writableIndexes = &.{}, + .readonlyIndexes = &.{}, + }; + try expectJsonStringify( + \\{"accountKey":"11111111111111111111111111111111","readonlyIndexes":[],"writableIndexes":[]} + , atl); +} + +test "EncodedInstruction serialization - accounts as integer array" { + // Verifies that accounts field is serialized as [0,1,2] not as a string + const ix = GetBlock.Response.EncodedInstruction{ + .programIdIndex = 3, + .accounts = &[_]u8{ 0, 1, 2 }, + .data = "base58data", + }; + try expectJsonStringify( + \\{"programIdIndex":3,"accounts":[0,1,2],"data":"base58data"} + , ix); +} + +test "UiRawMessage serialization - without address table lookups" { + const msg = GetBlock.Response.UiRawMessage{ + .header = .{ + .numRequiredSignatures = 1, + .numReadonlySignedAccounts = 0, + .numReadonlyUnsignedAccounts = 1, + }, + .account_keys = &.{Pubkey.ZEROES}, + .recent_blockhash = Hash.ZEROES, + .instructions = &.{}, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, msg, .{}); + defer std.testing.allocator.free(json); + // Should have accountKeys, header, recentBlockhash, instructions but NOT addressTableLookups + try std.testing.expect(std.mem.indexOf(u8, json, "\"accountKeys\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"header\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"numRequiredSignatures\":1") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "addressTableLookups") == null); +} + +test "UiRawMessage serialization - with address table lookups" { + const atl = GetBlock.Response.AddressTableLookup{ + .accountKey = Pubkey.ZEROES, + .writableIndexes = &[_]u8{0}, + .readonlyIndexes = &.{}, + }; + const msg = GetBlock.Response.UiRawMessage{ + .header = .{ + .numRequiredSignatures = 1, + .numReadonlySignedAccounts = 0, + .numReadonlyUnsignedAccounts = 0, + }, + .account_keys = &.{}, + .recent_blockhash = Hash.ZEROES, + .instructions = &.{}, + .address_table_lookups = &.{atl}, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, msg, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"addressTableLookups\"") != null); +} + +test "UiParsedMessage serialization - without address table lookups" { + const msg = GetBlock.Response.UiParsedMessage{ + .account_keys = &.{}, + .recent_blockhash = Hash.ZEROES, + .instructions = &.{}, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, msg, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"accountKeys\":[]") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"recentBlockhash\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "addressTableLookups") == null); +} + +test "UiMessage serialization - raw variant" { + const msg = GetBlock.Response.UiMessage{ .raw = .{ + .header = .{ + .numRequiredSignatures = 2, + .numReadonlySignedAccounts = 0, + .numReadonlyUnsignedAccounts = 1, + }, + .account_keys = &.{}, + .recent_blockhash = Hash.ZEROES, + .instructions = &.{}, + } }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, msg, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"numRequiredSignatures\":2") != null); +} + +test "EncodedTransaction serialization - accounts variant" { + const account = GetBlock.Response.ParsedAccount{ + .pubkey = Pubkey.ZEROES, + .writable = true, + .signer = true, + .source = .transaction, + }; + const tx = GetBlock.Response.EncodedTransaction{ .accounts = .{ + .signatures = &.{}, + .accountKeys = &.{account}, + } }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, tx, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"accountKeys\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"source\":\"transaction\"") != null); +} + +test "UiTransactionStatusMeta serialization - innerInstructions and logMessages skipped" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .innerInstructions = .skip, + .logMessages = .skip, + .rewards = .skip, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + // innerInstructions, logMessages, and rewards should all be omitted + try std.testing.expect(std.mem.indexOf(u8, json, "innerInstructions") == null); + try std.testing.expect(std.mem.indexOf(u8, json, "logMessages") == null); + try std.testing.expect(std.mem.indexOf(u8, json, "rewards") == null); + // But err, fee, balances, status should still be present + try std.testing.expect(std.mem.indexOf(u8, json, "\"err\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"fee\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"status\"") != null); +} + +test "UiTransactionStatusMeta serialization - costUnits present" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .costUnits = .{ .value = 3428 }, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"costUnits\":3428") != null); +} + +test "UiTransactionStatusMeta serialization - returnData present" { + const meta = GetBlock.Response.UiTransactionStatusMeta{ + .err = null, + .status = .{ .Ok = .{}, .Err = null }, + .fee = 0, + .preBalances = &.{}, + .postBalances = &.{}, + .returnData = .{ .value = .{ + .programId = Pubkey.ZEROES, + .data = .{ "AQID", .base64 }, + } }, + }; + const json = try std.json.Stringify.valueAlloc(std.testing.allocator, meta, .{}); + defer std.testing.allocator.free(json); + try std.testing.expect(std.mem.indexOf(u8, json, "\"returnData\"") != null); + try std.testing.expect(std.mem.indexOf(u8, json, "\"programId\"") != null); +} diff --git a/src/runtime/check_transactions.zig b/src/runtime/check_transactions.zig index 81014c2f77..190422fd95 100644 --- a/src/runtime/check_transactions.zig +++ b/src/runtime/check_transactions.zig @@ -273,7 +273,7 @@ pub const FeeDetails = struct { return sig_count *| lamports_per_signature; } - fn total(self: FeeDetails) u64 { + pub fn total(self: FeeDetails) u64 { return self.prioritization_fee +| self.transaction_fee; } }; diff --git a/src/runtime/cost_model.zig b/src/runtime/cost_model.zig new file mode 100644 index 0000000000..4050a2aea1 --- /dev/null +++ b/src/runtime/cost_model.zig @@ -0,0 +1,331 @@ +/// Cost model for calculating transaction costs for block scheduling and packing. +/// This is different from compute_units_consumed which measures actual CUs used during execution. +/// cost_units is used for block capacity planning and fee calculations. +/// +/// See Agave's cost model: +/// - https://github.com/anza-xyz/agave/blob/v3.1.8/cost-model/src/block_cost_limits.rs +/// - https://github.com/anza-xyz/agave/blob/v3.1.8/cost-model/src/cost_model.rs +const std = @import("std"); +const sig = @import("../sig.zig"); + +const FeatureSet = sig.core.FeatureSet; +const Slot = sig.core.Slot; +const RuntimeTransaction = sig.runtime.transaction_execution.RuntimeTransaction; +const ComputeBudgetLimits = sig.runtime.program.compute_budget.ComputeBudgetLimits; + +// Block cost limit constants from Agave's block_cost_limits.rs +// https://github.com/anza-xyz/agave/blob/v3.1.8/cost-model/src/block_cost_limits.rs + +/// Number of compute units for one signature verification. +pub const SIGNATURE_COST: u64 = 720; + +/// Number of compute units for one write lock. +pub const WRITE_LOCK_UNITS: u64 = 300; + +/// Cluster averaged compute unit to micro-sec conversion rate. +pub const COMPUTE_UNIT_TO_US_RATIO: u64 = 30; + +/// Number of data bytes per compute unit. +/// From Agave: INSTRUCTION_DATA_BYTES_COST = 140 bytes/us / 30 CU/us = 4 bytes/CU +/// This means 1 CU per 4 bytes of instruction data. +pub const INSTRUCTION_DATA_BYTES_PER_UNIT: u64 = 140 / COMPUTE_UNIT_TO_US_RATIO; + +/// Default instruction compute unit limit when not specified via SetComputeUnitLimit. +pub const DEFAULT_INSTRUCTION_COMPUTE_UNIT_LIMIT: u32 = 200_000; + +/// Cost per 32KB of loaded account data. +/// Based on Agave's ACCOUNT_DATA_COST_PAGE_SIZE = 32KB +pub const LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K: u64 = 8; + +/// Page size for loaded accounts data cost calculation (32KB). +pub const ACCOUNT_DATA_COST_PAGE_SIZE: u64 = 32 * 1024; + +/// Static cost for simple vote transactions (when feature is inactive). +/// Breakdown: 2100 (vote CUs) + 720 (1 sig) + 600 (2 write locks) + 8 (loaded data) +pub const SIMPLE_VOTE_USAGE_COST: u64 = sig.runtime.program.vote.COMPUTE_UNITS + + SIGNATURE_COST + + 2 * WRITE_LOCK_UNITS + + LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K; +comptime { + if (SIMPLE_VOTE_USAGE_COST != 3428) @compileError( + "SIMPLE_VOTE_USAGE_COST must be 3428 to match Agave's cost model", + ); +} + +/// Represents the calculated cost units for a transaction. +/// Can be either a static simple vote cost or dynamically calculated. +pub const TransactionCost = union(enum) { + /// Static cost for simple vote transactions (feature inactive) + simple_vote: void, + /// Dynamic cost calculation + transaction: UsageCostDetails, + + /// Returns the total cost units for this transaction. + pub fn total(self: TransactionCost) u64 { + return switch (self) { + .simple_vote => SIMPLE_VOTE_USAGE_COST, + .transaction => |details| details.total(), + }; + } + + pub fn programsExecutionCost(self: TransactionCost) u64 { + return switch (self) { + .simple_vote => sig.runtime.program.vote.COMPUTE_UNITS, + .transaction => |details| details.programs_execution_cost, + }; + } +}; + +/// Detailed cost breakdown for dynamically calculated transactions. +pub const UsageCostDetails = struct { + /// Cost for verifying signatures. + signature_cost: u64, + /// Cost for acquiring write locks. + write_lock_cost: u64, + /// Cost for instruction data bytes. + data_bytes_cost: u64, + /// Cost for program execution (compute units). + programs_execution_cost: u64, + /// Cost for loaded account data size. + loaded_accounts_data_size_cost: u64, + + /// Returns the total cost units for this transaction. + pub fn total(self: UsageCostDetails) u64 { + return self.signature_cost + + self.write_lock_cost + + self.data_bytes_cost + + self.programs_execution_cost + + self.loaded_accounts_data_size_cost; + } +}; + +/// Calculate the cost units for a transaction before execution (estimation). +/// +/// This follows Agave's cost model which calculates costs based on: +/// 1. Number of signatures (720 CU per signature) +/// 2. Number of write locks (300 CU per write lock) +/// 3. Instruction data bytes (1 CU per 4 bytes) +/// 4. Compute unit limit (from compute budget or default) +/// 5. Loaded accounts data size (8 CU per 32KB page) +/// +/// When the `stop_use_static_simple_vote_tx_cost` feature is inactive, +/// simple vote transactions use a static cost of 3428 CU. +/// +/// See: https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/cost-model/src/cost_model.rs#L37 +pub fn calculateTransactionCost( + transaction: *const RuntimeTransaction, + compute_budget_limits: *const ComputeBudgetLimits, + loaded_accounts_data_size: u32, + feature_set: *const FeatureSet, + slot: Slot, +) TransactionCost { + return calculateTransactionCostInternal( + transaction, + compute_budget_limits.compute_unit_limit, + loaded_accounts_data_size, + feature_set, + slot, + ); +} + +/// Calculate the cost units for an executed transaction using actual consumed CUs. +/// +/// This should be used for calculating costs after execution, where we know +/// the actual compute units consumed rather than using the budget limit. +/// This matches Agave's `calculate_cost_for_executed_transaction`. +/// +/// See: https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/cost-model/src/cost_model.rs#L61 +pub fn calculateCostForExecutedTransaction( + transaction: *const RuntimeTransaction, + actual_programs_execution_cost: u64, + loaded_accounts_data_size: u32, + feature_set: *const FeatureSet, + slot: Slot, +) TransactionCost { + return calculateTransactionCostInternal( + transaction, + actual_programs_execution_cost, + loaded_accounts_data_size, + feature_set, + slot, + ); +} + +/// Calculate the total signature verification cost for a transaction. +/// Includes transaction signatures AND precompile instruction signatures. +/// Mirrors Agave's `CostModel::get_signature_cost()`. +/// See: https://github.com/anza-xyz/agave/blob/eb30856ca804831f30d96f034a1cabd65c96184a/cost-model/src/cost_model.rs#L148 +fn getSignatureCost( + transaction: *const RuntimeTransaction, + feature_set: *const FeatureSet, + slot: Slot, +) u64 { + const precompiles = sig.runtime.program.precompiles; + + const ed25519_verify_cost = if (feature_set.active(.ed25519_precompile_verify_strict, slot)) + precompiles.ED25519_VERIFY_STRICT_COST + else + precompiles.ED25519_VERIFY_COST; + + const secp256r1_verify_cost = if (feature_set.active(.enable_secp256r1_precompile, slot)) + precompiles.SECP256R1_VERIFY_COST + else + 0; + + var n_secp256k1_instruction_signatures: u64 = 0; + var n_ed25519_instruction_signatures: u64 = 0; + var n_secp256r1_instruction_signatures: u64 = 0; + + for (transaction.instructions) |instruction| { + if (instruction.instruction_data.len == 0) continue; + + const program_id = instruction.program_meta.pubkey; + if (program_id.equals(&precompiles.secp256k1.ID)) { + n_secp256k1_instruction_signatures +|= instruction.instruction_data[0]; + } + if (program_id.equals(&precompiles.ed25519.ID)) { + n_ed25519_instruction_signatures +|= instruction.instruction_data[0]; + } + if (program_id.equals(&precompiles.secp256r1.ID)) { + n_secp256r1_instruction_signatures +|= instruction.instruction_data[0]; + } + } + + return transaction.signature_count *| precompiles.SIGNATURE_COST +| + n_secp256k1_instruction_signatures *| precompiles.SECP256K1_VERIFY_COST +| + n_ed25519_instruction_signatures *| ed25519_verify_cost +| + n_secp256r1_instruction_signatures *| secp256r1_verify_cost; +} + +/// Internal calculation function used by both pre-execution and post-execution cost calculation. +fn calculateTransactionCostInternal( + transaction: *const RuntimeTransaction, + programs_execution_cost: u64, + loaded_accounts_data_size: u32, + feature_set: *const FeatureSet, + slot: Slot, +) TransactionCost { + // Check if we should remove simple vote cost + const remove_simple_vote_cost = feature_set.active(.remove_simple_vote_from_cost_model, slot); + + if (transaction.isSimpleVoteTransaction() and !remove_simple_vote_cost) { + return .{ .simple_vote = {} }; + } + + // Dynamic calculation + // 1. Signature cost: includes transaction sigs + precompile sigs (ed25519, secp256k1, secp256r1) + const signature_cost = getSignatureCost(transaction, feature_set, slot); + + // 2. Write lock cost: 300 CU per writable account + var write_lock_count: u64 = 0; + for (transaction.accounts.items(.is_writable)) |is_writable| { + if (is_writable) write_lock_count += 1; + } + const write_lock_cost = write_lock_count * WRITE_LOCK_UNITS; + + // 3. Instruction data bytes cost: 1 CU per INSTRUCTION_DATA_BYTES_PER_UNIT bytes (4 bytes) + var total_instruction_data_len: u64 = 0; + for (transaction.instructions) |instruction| { + total_instruction_data_len += instruction.instruction_data.len; + } + // Truncating division (matches Agave) + const data_bytes_cost = total_instruction_data_len / INSTRUCTION_DATA_BYTES_PER_UNIT; + + // 4. Programs execution cost: passed in (either limit for estimation, or actual consumed) + + // 5. Loaded accounts data size cost: 8 CU per 32KB page + // This is calculated based on the actual loaded account data size + const loaded_accounts_data_size_cost = calculateLoadedAccountsDataSizeCost( + loaded_accounts_data_size, + ); + + return .{ + .transaction = .{ + .signature_cost = signature_cost, + .write_lock_cost = write_lock_cost, + .data_bytes_cost = data_bytes_cost, + .programs_execution_cost = programs_execution_cost, + .loaded_accounts_data_size_cost = loaded_accounts_data_size_cost, + }, + }; +} + +/// Calculate the cost for loaded accounts data size. +/// Returns 8 CU per 32KB page (rounded up). +fn calculateLoadedAccountsDataSizeCost(loaded_accounts_data_size: u32) u64 { + if (loaded_accounts_data_size == 0) return 0; + + // Round up to the next 32KB page + const size: u64 = loaded_accounts_data_size; + const pages = (size + ACCOUNT_DATA_COST_PAGE_SIZE - 1) / ACCOUNT_DATA_COST_PAGE_SIZE; + return pages * LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K; +} + +test "runtime.cost_model.calculateLoadedAccountsDataSizeCost" { + // 0 bytes = 0 cost + try std.testing.expectEqual(@as(u64, 0), calculateLoadedAccountsDataSizeCost(0)); + + // 1 byte = 1 page = 8 CU + try std.testing.expectEqual(@as(u64, 8), calculateLoadedAccountsDataSizeCost(1)); + + // 32KB exactly = 1 page = 8 CU + try std.testing.expectEqual(@as(u64, 8), calculateLoadedAccountsDataSizeCost(32 * 1024)); + + // 32KB + 1 = 2 pages = 16 CU + try std.testing.expectEqual(@as(u64, 16), calculateLoadedAccountsDataSizeCost(32 * 1024 + 1)); + + // 64KB = 2 pages = 16 CU + try std.testing.expectEqual(@as(u64, 16), calculateLoadedAccountsDataSizeCost(64 * 1024)); +} + +test "runtime.cost_model.UsageCostDetails.total" { + const cost = UsageCostDetails{ + .signature_cost = SIGNATURE_COST, + .write_lock_cost = 2 * WRITE_LOCK_UNITS, + .data_bytes_cost = 10, + .programs_execution_cost = 200_000, + .loaded_accounts_data_size_cost = LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K, + }; + try std.testing.expectEqual(@as(u64, 201_338), cost.total()); +} + +test "runtime.cost_model.TransactionCost.total for simple_vote" { + const cost = TransactionCost{ .simple_vote = {} }; + try std.testing.expectEqual(@as(u64, SIMPLE_VOTE_USAGE_COST), cost.total()); +} + +test "runtime.cost_model.TransactionCost.total for transaction" { + const cost = TransactionCost{ + .transaction = .{ + .signature_cost = SIGNATURE_COST, + .write_lock_cost = 2 * WRITE_LOCK_UNITS, + .data_bytes_cost = 10, + .programs_execution_cost = 200_000, + .loaded_accounts_data_size_cost = LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K, + }, + }; + try std.testing.expectEqual(@as(u64, 201_338), cost.total()); +} + +test "runtime.cost_model.TransactionCost.programsExecutionCost for simple_vote" { + const cost = TransactionCost{ .simple_vote = {} }; + // Simple vote transactions use a static execution cost of 2100 CU (vote program default) + try std.testing.expectEqual( + @as(u64, sig.runtime.program.vote.COMPUTE_UNITS), + cost.programsExecutionCost(), + ); +} + +test "runtime.cost_model.TransactionCost.programsExecutionCost for transaction" { + const cost = TransactionCost{ + .transaction = .{ + .signature_cost = SIGNATURE_COST, + .write_lock_cost = 2 * WRITE_LOCK_UNITS, + .data_bytes_cost = 10, + .programs_execution_cost = 150_000, + .loaded_accounts_data_size_cost = LOADED_ACCOUNTS_DATA_SIZE_COST_PER_32K, + }, + }; + // Should return the actual programs_execution_cost from the details + try std.testing.expectEqual(@as(u64, 150_000), cost.programsExecutionCost()); +} diff --git a/src/runtime/executor.zig b/src/runtime/executor.zig index 18db23b4e5..e06eeefc45 100644 --- a/src/runtime/executor.zig +++ b/src/runtime/executor.zig @@ -46,7 +46,10 @@ pub fn executeNativeCpiInstruction( signers: []const Pubkey, ) (error{OutOfMemory} || InstructionError)!void { const instruction_info = try prepareCpiInstructionInfo(tc, instruction, signers); - defer instruction_info.deinit(tc.allocator); + // NOTE: We don't call instruction_info.deinit() here because the InstructionInfo is stored + // in the instruction_trace (by value copy in pushInstruction). The trace needs the account_metas + // memory to remain valid until the transaction completes. Cleanup happens in + // TransactionContext.deinit() which iterates over the trace and deinits each CPI entry. try executeInstruction(allocator, tc, instruction_info); } @@ -156,12 +159,14 @@ fn processNextInstruction( return InstructionError.UnsupportedProgramId; }; - const builtin = program.PRECOMPILE.get(&builtin_id) orelse blk: { + const builtin, const is_precompile = if (program.PRECOMPILE.get(&builtin_id)) |builtin| + .{ builtin, true } + else blk: { // Only clear the return data if it is a native program. const builtin = program.NATIVE.get(&builtin_id) orelse return InstructionError.UnsupportedProgramId; tc.return_data.data.len = 0; - break :blk builtin; + break :blk .{ builtin, false }; }; // Emulate Agave's program_map by checking the feature gates here. @@ -170,14 +175,19 @@ fn processNextInstruction( return InstructionError.UnsupportedProgramId; }; - // Invoke the program and log the result - // [agave] https://github.com/anza-xyz/agave/blob/v3.1.4/program-runtime/src/invoke_context.rs#L549 - // [fd] https://github.com/firedancer-io/firedancer/blob/913e47274b135963fe8433a1e94abb9b42ce6253/src/flamenco/runtime/fd_executor.c#L1347-L1359 - try stable_log.programInvoke( - ic.tc, - program_id, - ic.tc.instruction_stack.len, - ); + // NOTE: Precompiles do not log invocations because they are not considered "programs" in the same sense as BPF or native programs, and they may be called by other programs which would already log the invocation. + // Additionally, some precompiles are used for utility functions that may be called frequently, and logging every invocation could lead to excessive log spam. + // For example, the Keccak256 precompile is often used for hashing in other programs, and logging every call to it would generate a large number of logs that may not be useful for end users. + if (!is_precompile) { + // Invoke the program and log the result + // [agave] https://github.com/anza-xyz/agave/blob/v3.1.4/program-runtime/src/invoke_context.rs#L549 + // [fd] https://github.com/firedancer-io/firedancer/blob/913e47274b135963fe8433a1e94abb9b42ce6253/src/flamenco/runtime/fd_executor.c#L1347-L1359 + try stable_log.programInvoke( + ic.tc, + program_id, + ic.tc.instruction_stack.len, + ); + } { const program_execute = tracy.Zone.init(@src(), .{ .name = "runtime: execute program" }); @@ -188,7 +198,7 @@ fn processNextInstruction( // This approach to failure logging is used to prevent requiring all native programs to return // an ExecutionError. Instead, native programs return an InstructionError, and more granular // failure logging for bpf programs is handled in the BPF executor. - if (err != InstructionError.ProgramFailedToComplete) { + if (err != InstructionError.ProgramFailedToComplete and !is_precompile) { try stable_log.programFailure( ic.tc, program_id, @@ -199,11 +209,13 @@ fn processNextInstruction( }; } - // Log the success, if the execution did not return an error. - try stable_log.programSuccess( - ic.tc, - program_id, - ); + if (!is_precompile) { + // Log the success, if the execution did not return an error. + try stable_log.programSuccess( + ic.tc, + program_id, + ); + } } /// Pop an instruction from the instruction stack\ @@ -337,6 +349,16 @@ pub fn prepareCpiInstructionInfo( break :blk program_account_meta.index_in_transaction; }; + // Clone instruction data so the trace preserves each CPI's data after + // the caller's serialized VM memory is freed. + // Without this, trace entries hold dangling pointers into the caller's + // serialized input buffer (freed in executeBpfProgram via + // `defer serialized.deinit`), resulting in use-after-free when the + // trace is later read during transaction status construction. + // [agave] Uses Cow::Owned(instruction.data) for CPI instructions. + const owned_data = try tc.allocator.dupe(u8, callee.data); + errdefer tc.allocator.free(owned_data); + return .{ .program_meta = .{ .pubkey = callee.program_id, @@ -344,8 +366,8 @@ pub fn prepareCpiInstructionInfo( }, .account_metas = deduped_account_metas, .dedupe_map = dedupe_map, - .instruction_data = callee.data, - .owned_instruction_data = false, + .instruction_data = owned_data, + .owned_instruction_data = true, .initial_account_lamports = 0, }; } @@ -373,7 +395,7 @@ test pushInstruction { deinitAccountMap(cache, allocator); } - var instruction_info = try testing.createInstructionInfo( + const instruction_info = try testing.createInstructionInfo( &tc, system_program.ID, system_program.Instruction{ @@ -386,7 +408,12 @@ test pushInstruction { .{ .index_in_transaction = 1 }, }, ); - defer instruction_info.deinit(allocator); + // NOTE: instruction_info is not deinitialized here because it gets copied into + // tc.instruction_trace multiple times (sharing the same account_metas memory). + // The trace entry with depth > 1 will be cleaned up by tc.deinit(), which frees + // the shared account_metas. The depth == 1 entry is not cleaned up by tc.deinit() + // (as it's considered owned externally), but since both share the same memory, + // it's already freed when the depth > 1 entry is cleaned up. // Success try pushInstruction(&tc, instruction_info); diff --git a/src/runtime/ids.zig b/src/runtime/ids.zig index 8fc6461721..212346f529 100644 --- a/src/runtime/ids.zig +++ b/src/runtime/ids.zig @@ -22,3 +22,7 @@ pub const FEATURE_PROGRAM_SOURCE_ID: Pubkey = pub const ZK_TOKEN_PROOF_PROGRAM_ID: Pubkey = .parse("ZkTokenProof1111111111111111111111111111111"); pub const INCINERATOR: Pubkey = .parse("1nc1nerator11111111111111111111111111111111"); + +// SPL Token Program IDs +pub const TOKEN_PROGRAM_ID: Pubkey = .parse("TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"); +pub const TOKEN_2022_PROGRAM_ID: Pubkey = .parse("TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb"); diff --git a/src/runtime/lib.zig b/src/runtime/lib.zig index 6f399a2a76..8fb4147d22 100644 --- a/src/runtime/lib.zig +++ b/src/runtime/lib.zig @@ -2,6 +2,7 @@ pub const account_loader = @import("account_loader.zig"); pub const borrowed_account = @import("borrowed_account.zig"); pub const check_transactions = @import("check_transactions.zig"); pub const ComputeBudget = @import("ComputeBudget.zig"); +pub const cost_model = @import("cost_model.zig"); pub const executor = @import("executor.zig"); pub const ids = @import("ids.zig"); pub const instruction_context = @import("instruction_context.zig"); @@ -11,6 +12,7 @@ pub const nonce = @import("nonce.zig"); pub const program = @import("program/lib.zig"); pub const program_loader = @import("program_loader.zig"); pub const pubkey_utils = @import("pubkey_utils.zig"); +pub const spl_token = @import("spl_token.zig"); pub const stable_log = @import("stable_log.zig"); pub const sysvar = @import("sysvar/lib.zig"); pub const sysvar_cache = @import("sysvar_cache.zig"); diff --git a/src/runtime/program/address_lookup_table/execute.zig b/src/runtime/program/address_lookup_table/execute.zig index b1d315ca66..9909be3483 100644 --- a/src/runtime/program/address_lookup_table/execute.zig +++ b/src/runtime/program/address_lookup_table/execute.zig @@ -173,7 +173,7 @@ fn createLookupTable( table_key, required_lamports, ); - defer allocator.free(transfer_instruction.data); + defer transfer_instruction.deinit(allocator); try runtime.executor.executeNativeCpiInstruction( allocator, ic.tc, @@ -189,7 +189,7 @@ fn createLookupTable( table_key, LOOKUP_TABLE_META_SIZE, ); - defer allocator.free(allocate_instruction.data); + defer allocate_instruction.deinit(allocator); try runtime.executor.executeNativeCpiInstruction( allocator, ic.tc, @@ -201,7 +201,7 @@ fn createLookupTable( // [agave] https://github.com/anza-xyz/agave/blob/8116c10021f09c806159852f65d37ffe6d5a118e/programs/address-lookup-table/src/processor.rs#L157 { const assign_instruction = try system_program.assign(allocator, table_key, program.ID); - defer allocator.free(assign_instruction.data); + defer assign_instruction.deinit(allocator); try runtime.executor.executeNativeCpiInstruction( allocator, ic.tc, @@ -436,7 +436,7 @@ fn extendLookupTable( table_key, required_lamports, ); - defer allocator.free(transfer_instruction.data); + defer transfer_instruction.deinit(allocator); try runtime.executor.executeNativeCpiInstruction( allocator, ic.tc, diff --git a/src/runtime/program/bpf_loader/execute.zig b/src/runtime/program/bpf_loader/execute.zig index 698c3c6f3b..6febc018d8 100644 --- a/src/runtime/program/bpf_loader/execute.zig +++ b/src/runtime/program/bpf_loader/execute.zig @@ -2110,8 +2110,6 @@ pub fn deployProgram( if (tc.log_collector) |*lc| lc else null, ); - try tc.log("Deploying program {f}", .{program_id}); - // Remove from the program map since it should not be accessible on this slot anymore. if (try tc.program_map.fetchPut(tc.programs_allocator, program_id, .failed)) |old| { old.deinit(tc.programs_allocator); diff --git a/src/runtime/program/compute_budget/lib.zig b/src/runtime/program/compute_budget/lib.zig index b3f5de077d..c1d1e15e97 100644 --- a/src/runtime/program/compute_budget/lib.zig +++ b/src/runtime/program/compute_budget/lib.zig @@ -67,11 +67,13 @@ pub const ComputeBudgetLimits = struct { .loaded_accounts_bytes = MAX_LOADED_ACCOUNTS_DATA_SIZE_BYTES, }; - pub fn intoComputeBudget(self: ComputeBudgetLimits) sig.runtime.ComputeBudget { - // TODO: It would make sense for us to perform a similar refactor to Agave, - // and split up into seperate cost and budget structs. We hardcode SIMD-339 - // false here, since there is no other "good" alternative without a refactor. - var default = sig.runtime.ComputeBudget.init(self.compute_unit_limit, false); + pub fn intoComputeBudget( + self: ComputeBudgetLimits, + feature_set: *const sig.core.FeatureSet, + slot: sig.core.Slot, + ) sig.runtime.ComputeBudget { + const simd_0339_active = feature_set.active(.increase_cpi_account_info_limit, slot); + var default = sig.runtime.ComputeBudget.init(self.compute_unit_limit, simd_0339_active); default.heap_size = self.heap_size; return default; } diff --git a/src/runtime/program/precompiles/lib.zig b/src/runtime/program/precompiles/lib.zig index db81bdb3a3..f4b19aff1f 100644 --- a/src/runtime/program/precompiles/lib.zig +++ b/src/runtime/program/precompiles/lib.zig @@ -22,6 +22,10 @@ pub const SIGNATURE_COST: u64 = COMPUTE_UNIT_TO_US_RATIO * 24; pub const SECP256K1_VERIFY_COST: u64 = COMPUTE_UNIT_TO_US_RATIO * 223; /// Number of compute units for one ed25519 signature verification. pub const ED25519_VERIFY_COST: u64 = COMPUTE_UNIT_TO_US_RATIO * 76; +/// Number of compute units for one ed25519 strict signature verification. +pub const ED25519_VERIFY_STRICT_COST: u64 = COMPUTE_UNIT_TO_US_RATIO * 80; +/// Number of compute units for one secp256r1 signature verification. +pub const SECP256R1_VERIFY_COST: u64 = COMPUTE_UNIT_TO_US_RATIO * 160; pub const PRECOMPILES = [_]Precompile{ .{ @@ -66,8 +70,7 @@ pub fn verifyPrecompilesComputeCost( } } - return transaction.msg.signature_count *| SIGNATURE_COST +| - n_secp256k1_instruction_signatures *| SECP256K1_VERIFY_COST +| + return n_secp256k1_instruction_signatures *| SECP256K1_VERIFY_COST +| n_ed25519_instruction_signatures *| ED25519_VERIFY_COST; } @@ -273,9 +276,9 @@ test "verify cost" { .signatures = &.{}, }; - const expected_cost = 1 *| SIGNATURE_COST +| 1 *| ED25519_VERIFY_COST; - // cross-checked with agave (FeatureSet::default()) - try std.testing.expectEqual(3000, expected_cost); + const expected_cost = ED25519_VERIFY_COST; + // ED25519_VERIFY_COST = 2280 (30 * 76), non-strict ed25519 verification cost + try std.testing.expectEqual(2280, ED25519_VERIFY_COST); const compute_units = verifyPrecompilesComputeCost(ed25519_tx, &.ALL_DISABLED); try std.testing.expectEqual(expected_cost, compute_units); diff --git a/src/runtime/program/stake/lib.zig b/src/runtime/program/stake/lib.zig index be21ec14b4..a446dd747e 100644 --- a/src/runtime/program/stake/lib.zig +++ b/src/runtime/program/stake/lib.zig @@ -21,7 +21,8 @@ const VoteStateV3 = runtime.program.vote.state.VoteStateV3; const VoteStateV4 = runtime.program.vote.state.VoteStateV4; const ExecuteContextsParams = runtime.testing.ExecuteContextsParams; -const Instruction = instruction.Instruction; +pub const Instruction = instruction.Instruction; +pub const LockupArgs = instruction.LockupArgs; const InstructionContext = runtime.InstructionContext; const BorrowedAccount = runtime.BorrowedAccount; diff --git a/src/runtime/program/system/lib.zig b/src/runtime/program/system/lib.zig index 3828d4bddd..29a6248e43 100644 --- a/src/runtime/program/system/lib.zig +++ b/src/runtime/program/system/lib.zig @@ -29,14 +29,17 @@ pub fn transfer( to: Pubkey, lamports: u64, ) error{OutOfMemory}!sig.core.Instruction { + const accounts = try allocator.dupe(sig.core.instruction.InstructionAccount, &.{ + .{ .pubkey = from, .is_signer = true, .is_writable = true }, + .{ .pubkey = to, .is_signer = false, .is_writable = true }, + }); + errdefer allocator.free(accounts); + return try sig.core.Instruction.initUsingBincodeAlloc( allocator, Instruction, ID, - &.{ - .{ .pubkey = from, .is_signer = true, .is_writable = true }, - .{ .pubkey = to, .is_signer = false, .is_writable = true }, - }, + accounts, &.{ .transfer = .{ .lamports = lamports } }, ); } @@ -47,13 +50,18 @@ pub fn allocate( pubkey: Pubkey, space: u64, ) error{OutOfMemory}!sig.core.Instruction { + const accounts = try allocator.dupe(sig.core.instruction.InstructionAccount, &.{.{ + .pubkey = pubkey, + .is_signer = true, + .is_writable = true, + }}); + errdefer allocator.free(accounts); + return try sig.core.Instruction.initUsingBincodeAlloc( allocator, Instruction, ID, - &.{ - .{ .pubkey = pubkey, .is_signer = true, .is_writable = true }, - }, + accounts, &.{ .allocate = .{ .space = space } }, ); } @@ -64,13 +72,63 @@ pub fn assign( pubkey: Pubkey, owner: Pubkey, ) error{OutOfMemory}!sig.core.Instruction { + const accounts = try allocator.dupe(sig.core.instruction.InstructionAccount, &.{.{ + .pubkey = pubkey, + .is_signer = true, + .is_writable = true, + }}); + errdefer allocator.free(accounts); + return try sig.core.Instruction.initUsingBincodeAlloc( allocator, Instruction, ID, - &.{ - .{ .pubkey = pubkey, .is_signer = true, .is_writable = true }, - }, + accounts, &.{ .assign = .{ .owner = owner } }, ); } + +test "allocate creates instruction with correct program id and accounts" { + const allocator = std.testing.allocator; + const pubkey = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + + const ix = try allocate(allocator, pubkey, 1024); + defer ix.deinit(allocator); + + // Program ID should be the system program + try std.testing.expect(ix.program_id.equals(&ID)); + + // Should have exactly 1 account + try std.testing.expectEqual(@as(usize, 1), ix.accounts.len); + try std.testing.expect(ix.accounts[0].pubkey.equals(&pubkey)); + try std.testing.expect(ix.accounts[0].is_signer); + try std.testing.expect(ix.accounts[0].is_writable); + + // Data should deserialize back to the allocate instruction + const decoded = sig.bincode.readFromSlice(allocator, Instruction, ix.data, .{}) catch + return error.TestUnexpectedResult; + try std.testing.expectEqual(@as(u64, 1024), decoded.allocate.space); +} + +test "assign creates instruction with correct program id and accounts" { + const allocator = std.testing.allocator; + const pubkey = Pubkey{ .data = [_]u8{0xBB} ** 32 }; + const owner = Pubkey{ .data = [_]u8{0xCC} ** 32 }; + + const ix = try assign(allocator, pubkey, owner); + defer ix.deinit(allocator); + + // Program ID should be the system program + try std.testing.expect(ix.program_id.equals(&ID)); + + // Should have exactly 1 account + try std.testing.expectEqual(@as(usize, 1), ix.accounts.len); + try std.testing.expect(ix.accounts[0].pubkey.equals(&pubkey)); + try std.testing.expect(ix.accounts[0].is_signer); + try std.testing.expect(ix.accounts[0].is_writable); + + // Data should deserialize back to the assign instruction + const decoded = sig.bincode.readFromSlice(allocator, Instruction, ix.data, .{}) catch + return error.TestUnexpectedResult; + try std.testing.expect(decoded.assign.owner.equals(&owner)); +} diff --git a/src/runtime/spl_token.zig b/src/runtime/spl_token.zig new file mode 100644 index 0000000000..51d3ae4c4c --- /dev/null +++ b/src/runtime/spl_token.zig @@ -0,0 +1,1423 @@ +//! SPL Token account parsing for token balance extraction. +//! +//! This module provides parsing of SPL Token and Token-2022 account data +//! to extract token balances for transaction metadata (preTokenBalances/postTokenBalances). +//! +//! References: +//! - SPL Token: https://github.com/solana-labs/solana-program-library/tree/master/token/program +//! - Token-2022: https://github.com/solana-labs/solana-program-library/tree/master/token/program-2022 + +const std = @import("std"); +const std14 = @import("std14"); +const sig = @import("../sig.zig"); + +const account_loader = sig.runtime.account_loader; + +const Allocator = std.mem.Allocator; +const Pubkey = sig.core.Pubkey; + +const ids = sig.runtime.ids; +const TransactionTokenBalance = sig.ledger.transaction_status.TransactionTokenBalance; +const UiTokenAmount = sig.ledger.transaction_status.UiTokenAmount; + +// SPL Token account layout constants +pub const TOKEN_ACCOUNT_SIZE: usize = 165; +pub const MINT_ACCOUNT_SIZE: usize = 82; + +// Token account layout offsets +const MINT_OFFSET: usize = 0; +const OWNER_OFFSET: usize = 32; +const AMOUNT_OFFSET: usize = 64; +const STATE_OFFSET: usize = 108; + +// Mint account layout offsets +const MINT_DECIMALS_OFFSET: usize = 44; +const MINT_IS_INITIALIZED_OFFSET: usize = 45; + +/// Token account state enum +pub const TokenAccountState = enum(u8) { + uninitialized = 0, + initialized = 1, + frozen = 2, +}; + +/// Parsed SPL Token account data +pub const ParsedTokenAccount = struct { + mint: Pubkey, + owner: Pubkey, + amount: u64, + state: TokenAccountState, + + /// Parse a token account from raw account data. + /// Returns null if the data is invalid or the account is not initialized. + pub fn parse(data: []const u8) ?ParsedTokenAccount { + if (data.len < TOKEN_ACCOUNT_SIZE) return null; + + // Check state - must be initialized or frozen + const state_byte = data[STATE_OFFSET]; + const state: TokenAccountState = std.meta.intToEnum( + TokenAccountState, + state_byte, + ) catch return null; + if (state == .uninitialized) return null; + + return .{ + .mint = Pubkey{ .data = data[MINT_OFFSET..][0..32].* }, + .owner = Pubkey{ .data = data[OWNER_OFFSET..][0..32].* }, + .amount = std.mem.readInt(u64, data[AMOUNT_OFFSET..][0..8], .little), + .state = state, + }; + } +}; + +/// Parsed SPL Token mint data +pub const ParsedMint = struct { + decimals: u8, + is_initialized: bool, + + /// Parse a mint account from raw account data. + /// Returns null if the data is invalid or the mint is not initialized. + pub fn parse(data: []const u8) ?ParsedMint { + if (data.len < MINT_ACCOUNT_SIZE) return null; + + const is_initialized = data[MINT_IS_INITIALIZED_OFFSET] != 0; + if (!is_initialized) return null; + + return .{ + .decimals = data[MINT_DECIMALS_OFFSET], + .is_initialized = true, + }; + } +}; + +/// Check if the given program ID is a token program (SPL Token or Token-2022) +pub fn isTokenProgram(program_id: Pubkey) bool { + return program_id.equals(&ids.TOKEN_PROGRAM_ID) or + program_id.equals(&ids.TOKEN_2022_PROGRAM_ID); +} + +/// Raw token balance data captured during transaction execution. +/// This struct stores the essential token account information without +/// requiring mint decimals lookup, which can be deferred to later processing. +pub const RawTokenBalance = struct { + account_index: u8, + mint: Pubkey, + owner: Pubkey, + amount: u64, + program_id: Pubkey, +}; + +/// Bounded array type for storing raw token balances during execution. +/// Uses the same max size as account locks since each account can have at most one token balance. +pub const RawTokenBalances = std14.BoundedArray( + RawTokenBalance, + account_loader.MAX_TX_ACCOUNT_LOCKS, +); + +/// Collect raw token balance data from loaded accounts. +/// This is used during transaction execution to capture pre-execution token balances. +/// Unlike collectTokenBalances, this doesn't require mint decimals lookup. +/// +/// Arguments: +/// - accounts: Slice of loaded accounts to scan for token accounts +/// +/// Returns a bounded array of RawTokenBalance entries. +pub fn collectRawTokenBalances( + accounts: []const sig.runtime.account_loader.LoadedAccount, +) RawTokenBalances { + var result = RawTokenBalances{}; + + for (accounts, 0..) |account, idx| { + // Skip non-token accounts + if (!isTokenProgram(account.account.owner)) continue; + + // Skip if data is too short for a token account + if (account.account.data.len < TOKEN_ACCOUNT_SIZE) continue; + + // Try to parse as token account + const parsed = ParsedTokenAccount.parse( + account.account.data[0..TOKEN_ACCOUNT_SIZE], + ) orelse continue; + + // Add to result (won't fail since we can't have more token accounts than total accounts) + result.append(.{ + .account_index = @intCast(idx), + .mint = parsed.mint, + .owner = parsed.owner, + .amount = parsed.amount, + .program_id = account.account.owner, + }) catch unreachable; + } + + return result; +} + +/// Convert RawTokenBalances to TransactionTokenBalance slice for RPC responses. +/// This resolves mint decimals using the provided account reader. +/// +/// Arguments: +/// - allocator: Used for allocating the result +/// - raw_balances: Raw token balances captured during execution +/// - mint_decimals_cache: Cache for mint decimals +/// - account_reader: Reader to fetch mint accounts for decimals lookup +/// +/// Returns a slice of TransactionTokenBalance that must be freed by the caller. +/// Returns null if any mint lookup fails (graceful degradation). +pub fn resolveTokenBalances( + allocator: Allocator, + raw_balances: RawTokenBalances, + mint_decimals_cache: *MintDecimalsCache, + comptime AccountReaderType: type, + account_reader: AccountReaderType, +) error{OutOfMemory}!?[]TransactionTokenBalance { + if (raw_balances.len == 0) return null; + + var result = try std.ArrayList(TransactionTokenBalance).initCapacity( + allocator, + raw_balances.len, + ); + errdefer { + for (result.items) |item| item.deinit(allocator); + result.deinit(allocator); + } + + for (raw_balances.constSlice()) |raw| { + // Get decimals for this mint (skip if not found) + const decimals = getMintDecimals( + allocator, + mint_decimals_cache, + AccountReaderType, + account_reader, + raw.mint, + ) catch continue; // Skip tokens with missing mints + + // Format the token amount + const ui_token_amount = try formatTokenAmount( + allocator, + raw.amount, + decimals, + ); + errdefer ui_token_amount.deinit(allocator); + + result.appendAssumeCapacity(.{ + .account_index = raw.account_index, + .mint = raw.mint, + .owner = raw.owner, + .program_id = raw.program_id, + .ui_token_amount = ui_token_amount, + }); + } + + return try result.toOwnedSlice(allocator); +} + +/// Cache for mint decimals to avoid repeated lookups +pub const MintDecimalsCache = struct { + map: std.AutoHashMap(Pubkey, u8), + allocator: Allocator, + + pub fn init(allocator: Allocator) MintDecimalsCache { + return .{ + .map = std.AutoHashMap(Pubkey, u8).init(allocator), + .allocator = allocator, + }; + } + + pub fn deinit(self: *MintDecimalsCache) void { + self.map.deinit(); + } + + pub fn get(self: *MintDecimalsCache, mint: Pubkey) ?u8 { + return self.map.get(mint); + } + + pub fn put(self: *MintDecimalsCache, mint: Pubkey, decimals: u8) !void { + try self.map.put(mint, decimals); + } +}; + +/// Format a token amount as UiTokenAmount for RPC responses. +pub fn formatTokenAmount( + allocator: Allocator, + amount: u64, + decimals: u8, +) error{OutOfMemory}!UiTokenAmount { + // Convert amount to string + const amount_str = try std.fmt.allocPrint(allocator, "{d}", .{amount}); + errdefer allocator.free(amount_str); + + // Calculate UI amount + const divisor = std.math.pow(f64, 10.0, @floatFromInt(decimals)); + const ui_amount: f64 = @as(f64, @floatFromInt(amount)) / divisor; + + // Format UI amount string with proper decimal places (using integer math for full precision) + const ui_amount_string = try realNumberStringTrimmed(allocator, amount, decimals); + errdefer allocator.free(ui_amount_string); + + return .{ + .ui_amount = ui_amount, + .decimals = decimals, + .amount = amount_str, + .ui_amount_string = ui_amount_string, + }; +} + +/// Format an integer token amount as a decimal string with full precision. +/// Matches Agave's `real_number_string` from account-decoder-client-types/src/token.rs. +/// +/// Examples (amount, decimals) -> result: +/// (1_000_000_000, 9) -> "1.000000000" +/// (1_234_567_890, 3) -> "1234567.890" +/// (42, 0) -> "42" +fn realNumberString(allocator: Allocator, amount: u64, decimals: u8) error{OutOfMemory}![]const u8 { + if (decimals == 0) { + return try std.fmt.allocPrint(allocator, "{d}", .{amount}); + } + + // Format amount as string, left-padded with zeros to at least decimals+1 digits + const dec: usize = @intCast(decimals); + const raw = try std.fmt.allocPrint(allocator, "{d}", .{amount}); + defer allocator.free(raw); + + // Pad with leading zeros if needed so we have at least decimals+1 chars + const min_len = dec + 1; + const padded = if (raw.len < min_len) blk: { + const buf = try allocator.alloc(u8, min_len); + const pad_count = min_len - raw.len; + @memset(buf[0..pad_count], '0'); + @memcpy(buf[pad_count..], raw); + break :blk buf; + } else try allocator.dupe(u8, raw); + defer allocator.free(padded); + + // Insert decimal point at position len - decimals + const dot_pos = padded.len - dec; + const result = try allocator.alloc(u8, padded.len + 1); + @memcpy(result[0..dot_pos], padded[0..dot_pos]); + result[dot_pos] = '.'; + @memcpy(result[dot_pos + 1 ..], padded[dot_pos..]); + + return result; +} + +/// Format an integer token amount as a trimmed decimal string with full precision. +/// Matches Agave's `real_number_string_trimmed` from account-decoder-client-types/src/token.rs. +/// +/// Examples (amount, decimals) -> result: +/// (1_000_000_000, 9) -> "1" +/// (1_234_567_890, 3) -> "1234567.89" +/// (600010892365405206, 9) -> "600010892.365405206" +pub fn realNumberStringTrimmed( + allocator: Allocator, + amount: u64, + decimals: u8, +) error{OutOfMemory}![]const u8 { + const s = try realNumberString(allocator, amount, decimals); + + if (decimals == 0) return s; + + // Trim trailing zeros, then trailing dot + var end = s.len; + while (end > 0 and s[end - 1] == '0') { + end -= 1; + } + if (end > 0 and s[end - 1] == '.') { + end -= 1; + } + + if (end == s.len) return s; + + const trimmed = try allocator.dupe(u8, s[0..end]); + allocator.free(s); + return trimmed; +} + +/// Collect token balances from a list of loaded accounts. +/// +/// This function scans the accounts for SPL Token accounts, parses them, +/// and returns token balance information for RPC responses. +/// +/// Arguments: +/// - allocator: Used for allocating the result +/// - accounts: List of (pubkey, owner, data) tuples to scan +/// - account_reader: Reader to fetch mint accounts for decimals lookup +/// +/// Returns a slice of TransactionTokenBalance that must be freed by the caller. +pub fn collectTokenBalances( + allocator: Allocator, + account_pubkeys: []const Pubkey, + account_owners: []const Pubkey, + account_datas: []const []const u8, + mint_decimals_cache: *MintDecimalsCache, + comptime AccountReaderType: type, + account_reader: AccountReaderType, +) error{ OutOfMemory, MintNotFound }![]TransactionTokenBalance { + std.debug.assert(account_pubkeys.len == account_owners.len); + std.debug.assert(account_pubkeys.len == account_datas.len); + + var result = std.ArrayList(TransactionTokenBalance).init(allocator); + errdefer { + for (result.items) |item| item.deinit(allocator); + result.deinit(); + } + + for (account_pubkeys, account_owners, account_datas, 0..) |_, owner, data, idx| { + // Skip non-token accounts + if (!isTokenProgram(owner)) continue; + + // Try to parse as token account + const parsed = ParsedTokenAccount.parse(data) orelse continue; + + // Get decimals for this mint + const decimals = try getMintDecimals( + allocator, + mint_decimals_cache, + AccountReaderType, + account_reader, + parsed.mint, + ); + + // Format the token amount + const ui_token_amount = try formatTokenAmount(allocator, parsed.amount, decimals); + errdefer ui_token_amount.deinit(allocator); + + // Create the token balance entry + const mint_str = try allocator.dupe(u8, &parsed.mint.data); + errdefer allocator.free(mint_str); + + const owner_str = try allocator.dupe(u8, &parsed.owner.data); + errdefer allocator.free(owner_str); + + const program_id_str = try allocator.dupe(u8, &owner.data); + errdefer allocator.free(program_id_str); + + try result.append(.{ + .account_index = @intCast(idx), + .mint = mint_str, + .owner = owner_str, + .program_id = program_id_str, + .ui_token_amount = ui_token_amount, + }); + } + + return try result.toOwnedSlice(); +} + +/// Get decimals for a mint, using cache or fetching from account reader. +fn getMintDecimals( + allocator: Allocator, + cache: *MintDecimalsCache, + comptime AccountReaderType: type, + account_reader: AccountReaderType, + mint: Pubkey, +) error{ OutOfMemory, MintNotFound }!u8 { + // Check cache first + if (cache.get(mint)) |decimals| return decimals; + + // Fetch mint account + const mint_account = account_reader.get(allocator, mint) catch return error.MintNotFound; + defer if (mint_account) |acct| acct.deinit(allocator); + + if (mint_account) |acct| { + const data = acct.data.constSlice(); + const parsed_mint = ParsedMint.parse(data) orelse return error.MintNotFound; + + // Cache the result + try cache.put(mint, parsed_mint.decimals); + return parsed_mint.decimals; + } + + return error.MintNotFound; +} + +// Tests +test "runtime.spl_token.ParsedTokenAccount.parse" { + const testing = std.testing; + + // Create a valid token account data blob + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + // Set mint (first 32 bytes) + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + + // Set owner (next 32 bytes) + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + + // Set amount (8 bytes at offset 64) + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], 1_000_000, .little); + + // Set state to initialized (byte at offset 108) + data[STATE_OFFSET] = 1; + + const parsed = ParsedTokenAccount.parse(&data); + try testing.expect(parsed != null); + try testing.expectEqual(mint, parsed.?.mint); + try testing.expectEqual(owner, parsed.?.owner); + try testing.expectEqual(@as(u64, 1_000_000), parsed.?.amount); + try testing.expectEqual(TokenAccountState.initialized, parsed.?.state); +} + +test "runtime.spl_token.ParsedTokenAccount.parse rejects uninitialized" { + const testing = std.testing; + + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + // State = 0 (uninitialized) + data[STATE_OFFSET] = 0; + + const parsed = ParsedTokenAccount.parse(&data); + try testing.expect(parsed == null); +} + +test "runtime.spl_token.ParsedTokenAccount.parse rejects short data" { + const testing = std.testing; + + // Test with data that's too short - parse should return null + var data: [100]u8 = undefined; // Too short (TOKEN_ACCOUNT_SIZE is 165) + @memset(&data, 0); + + const parsed = ParsedTokenAccount.parse(&data); + try testing.expect(parsed == null); +} + +test "runtime.spl_token.ParsedMint.parse" { + const testing = std.testing; + + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + // Set decimals + data[MINT_DECIMALS_OFFSET] = 6; + // Set is_initialized + data[MINT_IS_INITIALIZED_OFFSET] = 1; + + const parsed = ParsedMint.parse(&data); + try testing.expect(parsed != null); + try testing.expectEqual(@as(u8, 6), parsed.?.decimals); + try testing.expectEqual(true, parsed.?.is_initialized); +} + +test "runtime.spl_token.formatTokenAmount" { + const testing = std.testing; + const allocator = testing.allocator; + + // Test with 6 decimals (like USDC) + { + const result = try formatTokenAmount(allocator, 1_000_000, 6); + defer result.deinit(allocator); + + try testing.expectEqualStrings("1000000", result.amount); + try testing.expectEqual(@as(u8, 6), result.decimals); + try testing.expectApproxEqRel(@as(f64, 1.0), result.ui_amount.?, 0.0001); + } + + // Test with 9 decimals (like SOL) + { + const result = try formatTokenAmount(allocator, 1_500_000_000, 9); + defer result.deinit(allocator); + + try testing.expectEqualStrings("1500000000", result.amount); + try testing.expectEqual(@as(u8, 9), result.decimals); + try testing.expectApproxEqRel(@as(f64, 1.5), result.ui_amount.?, 0.0001); + } + + // Test with 0 decimals + { + const result = try formatTokenAmount(allocator, 42, 0); + defer result.deinit(allocator); + + try testing.expectEqualStrings("42", result.amount); + try testing.expectEqual(@as(u8, 0), result.decimals); + try testing.expectApproxEqRel(@as(f64, 42.0), result.ui_amount.?, 0.0001); + } +} + +test "runtime.spl_token.isTokenProgram" { + const testing = std.testing; + + try testing.expect(isTokenProgram(ids.TOKEN_PROGRAM_ID)); + try testing.expect(isTokenProgram(ids.TOKEN_2022_PROGRAM_ID)); + try testing.expect(!isTokenProgram(Pubkey.ZEROES)); + try testing.expect(!isTokenProgram(sig.runtime.program.system.ID)); +} + +test "runtime.spl_token.realNumberString: zero decimals" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, 42, 0); + defer allocator.free(result); + try std.testing.expectEqualStrings("42", result); +} + +test "runtime.spl_token.realNumberString: 9 decimals with exact SOL" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, 1_000_000_000, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("1.000000000", result); +} + +test "runtime.spl_token.realNumberString: 3 decimals" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, 1_234_567_890, 3); + defer allocator.free(result); + try std.testing.expectEqualStrings("1234567.890", result); +} + +test "runtime.spl_token.realNumberString: amount smaller than decimals requires padding" { + const allocator = std.testing.allocator; + // amount=42, decimals=6 -> "0.000042" + const result = try realNumberString(allocator, 42, 6); + defer allocator.free(result); + try std.testing.expectEqualStrings("0.000042", result); +} + +test "runtime.spl_token.realNumberString: zero amount with decimals" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, 0, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("0.000000000", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: trims trailing zeros" { + const allocator = std.testing.allocator; + // 1 SOL = 1_000_000_000 with 9 decimals -> "1" (all trailing zeros trimmed including dot) + const result = try realNumberStringTrimmed(allocator, 1_000_000_000, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("1", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: partial trailing zeros" { + const allocator = std.testing.allocator; + // 1_234_567_890 with 3 decimals -> "1234567.89" (one trailing zero trimmed) + const result = try realNumberStringTrimmed(allocator, 1_234_567_890, 3); + defer allocator.free(result); + try std.testing.expectEqualStrings("1234567.89", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: no trailing zeros" { + const allocator = std.testing.allocator; + // Agave example: 600010892365405206, 9 -> "600010892.365405206" + const result = try realNumberStringTrimmed(allocator, 600010892365405206, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("600010892.365405206", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: zero decimals" { + const allocator = std.testing.allocator; + const result = try realNumberStringTrimmed(allocator, 42, 0); + defer allocator.free(result); + try std.testing.expectEqualStrings("42", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: zero amount" { + const allocator = std.testing.allocator; + const result = try realNumberStringTrimmed(allocator, 0, 6); + defer allocator.free(result); + try std.testing.expectEqualStrings("0", result); +} + +test "runtime.spl_token.formatTokenAmount: ui_amount_string uses trimmed format" { + const allocator = std.testing.allocator; + // 1.5 SOL -> ui_amount_string should be "1.5", not "1.500000000" + const result = try formatTokenAmount(allocator, 1_500_000_000, 9); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("1500000000", result.amount); + try std.testing.expectEqualStrings("1.5", result.ui_amount_string); + try std.testing.expectEqual(@as(u8, 9), result.decimals); +} + +test "runtime.spl_token.formatTokenAmount: small fractional amount" { + const allocator = std.testing.allocator; + // 1 lamport = 0.000000001 SOL -> trimmed to "0.000000001" + const result = try formatTokenAmount(allocator, 1, 9); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("1", result.amount); + try std.testing.expectEqualStrings("0.000000001", result.ui_amount_string); +} + +test "runtime.spl_token.ParsedMint.parse: uninitialized returns null" { + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = 6; + data[MINT_IS_INITIALIZED_OFFSET] = 0; // uninitialized + + try std.testing.expect(ParsedMint.parse(&data) == null); +} + +test "runtime.spl_token.ParsedMint.parse: short data returns null" { + var data: [50]u8 = undefined; + @memset(&data, 0); + try std.testing.expect(ParsedMint.parse(&data) == null); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: frozen state" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + const owner = Pubkey{ .data = [_]u8{2} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], 500, .little); + data[STATE_OFFSET] = 2; // frozen + + const parsed = ParsedTokenAccount.parse(&data); + try std.testing.expect(parsed != null); + try std.testing.expectEqual(TokenAccountState.frozen, parsed.?.state); + try std.testing.expectEqual(@as(u64, 500), parsed.?.amount); +} + +test "runtime.spl_token.MintDecimalsCache: basic usage" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + try std.testing.expectEqual(@as(?u8, null), cache.get(mint)); + + try cache.put(mint, 6); + try std.testing.expectEqual(@as(?u8, 6), cache.get(mint)); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: invalid state byte rejects" { + // State byte = 3 is not a valid TokenAccountState variant + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[STATE_OFFSET] = 3; + try std.testing.expect(ParsedTokenAccount.parse(&data) == null); + + // State byte = 255 is also invalid + data[STATE_OFFSET] = 255; + try std.testing.expect(ParsedTokenAccount.parse(&data) == null); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: max amount (u64 max)" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + const mint = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + const owner = Pubkey{ .data = [_]u8{0xBB} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], std.math.maxInt(u64), .little); + data[STATE_OFFSET] = 1; // initialized + + const parsed = ParsedTokenAccount.parse(&data).?; + try std.testing.expectEqual(std.math.maxInt(u64), parsed.amount); + try std.testing.expectEqual(mint, parsed.mint); + try std.testing.expectEqual(owner, parsed.owner); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: data exactly TOKEN_ACCOUNT_SIZE" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[STATE_OFFSET] = 1; + try std.testing.expect(ParsedTokenAccount.parse(&data) != null); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: data larger than Token-2022 with extensions" { + // Token-2022 accounts can be larger than 165 bytes with extensions + var data: [TOKEN_ACCOUNT_SIZE + 100]u8 = undefined; + @memset(&data, 0); + + const mint = Pubkey{ .data = [_]u8{0xCC} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + const owner = Pubkey{ .data = [_]u8{0xDD} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], 42, .little); + data[STATE_OFFSET] = 1; + + const parsed = ParsedTokenAccount.parse(&data).?; + try std.testing.expectEqual(@as(u64, 42), parsed.amount); + try std.testing.expectEqual(mint, parsed.mint); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: data one byte too short" { + var data: [TOKEN_ACCOUNT_SIZE - 1]u8 = undefined; + @memset(&data, 0); + data[STATE_OFFSET] = 1; + try std.testing.expect(ParsedTokenAccount.parse(&data) == null); +} + +test "runtime.spl_token.ParsedTokenAccount.parse: zero amount initialized" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[STATE_OFFSET] = 1; + // Amount is already 0 from @memset + + const parsed = ParsedTokenAccount.parse(&data).?; + try std.testing.expectEqual(@as(u64, 0), parsed.amount); + try std.testing.expectEqual(TokenAccountState.initialized, parsed.state); +} + +test "runtime.spl_token.ParsedMint.parse: various decimal values" { + const test_decimals = [_]u8{ 0, 1, 6, 9, 18, 255 }; + for (test_decimals) |dec| { + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = dec; + data[MINT_IS_INITIALIZED_OFFSET] = 1; + + const parsed = ParsedMint.parse(&data).?; + try std.testing.expectEqual(dec, parsed.decimals); + } +} + +test "runtime.spl_token.ParsedMint.parse: data exactly MINT_ACCOUNT_SIZE" { + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = 9; + data[MINT_IS_INITIALIZED_OFFSET] = 1; + try std.testing.expect(ParsedMint.parse(&data) != null); +} + +test "runtime.spl_token.ParsedMint.parse: data larger than Token-2022 mint with extensions" { + var data: [MINT_ACCOUNT_SIZE + 200]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = 18; + data[MINT_IS_INITIALIZED_OFFSET] = 1; + + const parsed = ParsedMint.parse(&data).?; + try std.testing.expectEqual(@as(u8, 18), parsed.decimals); +} + +test "runtime.spl_token.ParsedMint.parse: data one byte too short" { + var data: [MINT_ACCOUNT_SIZE - 1]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = 6; + data[MINT_IS_INITIALIZED_OFFSET] = 1; + try std.testing.expect(ParsedMint.parse(&data) == null); +} + +test "runtime.spl_token.ParsedMint.parse: non-zero is_initialized byte" { + // Any non-zero value should count as initialized (Agave uses bool) + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = 6; + data[MINT_IS_INITIALIZED_OFFSET] = 255; // any non-zero + + const parsed = ParsedMint.parse(&data); + try std.testing.expect(parsed != null); +} + +test "runtime.spl_token.realNumberString: single digit amount with many decimals" { + const allocator = std.testing.allocator; + // Agave test case: amount=1, decimals=9 -> "0.000000001" + const result = try realNumberString(allocator, 1, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("0.000000001", result); +} + +test "runtime.spl_token.realNumberString: large amount (u64 max)" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, std.math.maxInt(u64), 0); + defer allocator.free(result); + try std.testing.expectEqualStrings("18446744073709551615", result); +} + +test "runtime.spl_token.realNumberString: large amount with decimals" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, std.math.maxInt(u64), 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("18446744073.709551615", result); +} + +test "runtime.spl_token.realNumberString: 1 decimal" { + const allocator = std.testing.allocator; + const result = try realNumberString(allocator, 15, 1); + defer allocator.free(result); + try std.testing.expectEqualStrings("1.5", result); +} + +test "runtime.spl_token.realNumberString: amount exactly equals decimals digits" { + const allocator = std.testing.allocator; + // amount=123, decimals=3 -> "0.123" + const result = try realNumberString(allocator, 123, 3); + defer allocator.free(result); + try std.testing.expectEqualStrings("0.123", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: single lamport (Agave test)" { + const allocator = std.testing.allocator; + // Agave test: amount=1, decimals=9 -> "0.000000001" + const result = try realNumberStringTrimmed(allocator, 1, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("0.000000001", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: exact round number (Agave test)" { + const allocator = std.testing.allocator; + // Agave test: amount=1_000_000_000, decimals=9 -> "1" + const result = try realNumberStringTrimmed(allocator, 1_000_000_000, 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("1", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: large amount with high precision (Agave test)" { + const allocator = std.testing.allocator; + // Agave test: 1_234_567_890 with 3 decimals -> "1234567.89" + const result = try realNumberStringTrimmed(allocator, 1_234_567_890, 3); + defer allocator.free(result); + try std.testing.expectEqualStrings("1234567.89", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: u64 max with 9 decimals" { + const allocator = std.testing.allocator; + const result = try realNumberStringTrimmed(allocator, std.math.maxInt(u64), 9); + defer allocator.free(result); + try std.testing.expectEqualStrings("18446744073.709551615", result); +} + +test "runtime.spl_token.formatTokenAmount: zero amount zero decimals" { + const allocator = std.testing.allocator; + const result = try formatTokenAmount(allocator, 0, 0); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("0", result.amount); + try std.testing.expectEqualStrings("0", result.ui_amount_string); + try std.testing.expectEqual(@as(u8, 0), result.decimals); + try std.testing.expectApproxEqRel(@as(f64, 0.0), result.ui_amount.?, 0.0001); +} + +test "runtime.spl_token.formatTokenAmount: zero amount 9 decimals" { + const allocator = std.testing.allocator; + const result = try formatTokenAmount(allocator, 0, 9); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("0", result.amount); + try std.testing.expectEqualStrings("0", result.ui_amount_string); + try std.testing.expectEqual(@as(u8, 9), result.decimals); +} + +test "runtime.spl_token.formatTokenAmount: USDC style (6 decimals, 1 million)" { + const allocator = std.testing.allocator; + // 1 USDC = 1_000_000 with 6 decimals + const result = try formatTokenAmount(allocator, 1_000_000, 6); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("1000000", result.amount); + try std.testing.expectEqualStrings("1", result.ui_amount_string); + try std.testing.expectApproxEqRel(@as(f64, 1.0), result.ui_amount.?, 0.0001); +} + +test "runtime.spl_token.formatTokenAmount: max u64 amount" { + const allocator = std.testing.allocator; + const result = try formatTokenAmount(allocator, std.math.maxInt(u64), 0); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("18446744073709551615", result.amount); + try std.testing.expectEqualStrings("18446744073709551615", result.ui_amount_string); +} + +test "runtime.spl_token.formatTokenAmount: ui_amount precision (Agave pattern)" { + const allocator = std.testing.allocator; + // 1.234567890 SOL + const result = try formatTokenAmount(allocator, 1_234_567_890, 9); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("1234567890", result.amount); + try std.testing.expectApproxEqRel(@as(f64, 1.23456789), result.ui_amount.?, 0.0001); + // Trimmed string should not have trailing zero + try std.testing.expectEqualStrings("1.23456789", result.ui_amount_string); +} + +test "runtime.spl_token.MintDecimalsCache: multiple mints" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + + const mint1 = Pubkey{ .data = [_]u8{1} ** 32 }; + const mint2 = Pubkey{ .data = [_]u8{2} ** 32 }; + const mint3 = Pubkey{ .data = [_]u8{3} ** 32 }; + + try cache.put(mint1, 6); + try cache.put(mint2, 9); + try cache.put(mint3, 0); + + try std.testing.expectEqual(@as(?u8, 6), cache.get(mint1)); + try std.testing.expectEqual(@as(?u8, 9), cache.get(mint2)); + try std.testing.expectEqual(@as(?u8, 0), cache.get(mint3)); +} + +test "runtime.spl_token.MintDecimalsCache: overwrite existing entry" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + + const mint = Pubkey{ .data = [_]u8{1} ** 32 }; + try cache.put(mint, 6); + try std.testing.expectEqual(@as(?u8, 6), cache.get(mint)); + + // Overwrite with new value + try cache.put(mint, 9); + try std.testing.expectEqual(@as(?u8, 9), cache.get(mint)); +} + +test "runtime.spl_token.MintDecimalsCache: unknown mint returns null" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + + const unknown = Pubkey{ .data = [_]u8{0xFF} ** 32 }; + try std.testing.expectEqual(@as(?u8, null), cache.get(unknown)); +} + +test "runtime.spl_token.TokenAccountState: all enum values" { + try std.testing.expectEqual(@as(u8, 0), @intFromEnum(TokenAccountState.uninitialized)); + try std.testing.expectEqual(@as(u8, 1), @intFromEnum(TokenAccountState.initialized)); + try std.testing.expectEqual(@as(u8, 2), @intFromEnum(TokenAccountState.frozen)); +} + +test "runtime.spl_token.collectRawTokenBalances: empty accounts" { + const accounts: []const account_loader.LoadedAccount = &.{}; + const result = collectRawTokenBalances(accounts); + try std.testing.expectEqual(@as(usize, 0), result.len); +} + +test "runtime.spl_token.collectRawTokenBalances: non-token accounts skipped" { + // Create accounts owned by the system program (not a token program) + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[STATE_OFFSET] = 1; + + const accounts = [_]account_loader.LoadedAccount{.{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &data, + .owner = sig.runtime.program.system.ID, // not a token program + .executable = false, + .rent_epoch = 0, + }, + }}; + const result = collectRawTokenBalances(&accounts); + try std.testing.expectEqual(@as(usize, 0), result.len); +} + +test "runtime.spl_token.collectRawTokenBalances: token account collected" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + const mint = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + const owner = Pubkey{ .data = [_]u8{0xBB} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], 5_000_000, .little); + data[STATE_OFFSET] = 1; + + const accounts = [_]account_loader.LoadedAccount{.{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &data, + .owner = ids.TOKEN_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }}; + const result = collectRawTokenBalances(&accounts); + try std.testing.expectEqual(@as(usize, 1), result.len); + try std.testing.expectEqual(@as(u8, 0), result.constSlice()[0].account_index); + try std.testing.expectEqual(mint, result.constSlice()[0].mint); + try std.testing.expectEqual(owner, result.constSlice()[0].owner); + try std.testing.expectEqual(@as(u64, 5_000_000), result.constSlice()[0].amount); + try std.testing.expectEqual(ids.TOKEN_PROGRAM_ID, result.constSlice()[0].program_id); +} + +test "runtime.spl_token.collectRawTokenBalances: Token-2022 account collected" { + var data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + + const mint = Pubkey{ .data = [_]u8{0x11} ** 32 }; + @memcpy(data[MINT_OFFSET..][0..32], &mint.data); + const owner = Pubkey{ .data = [_]u8{0x22} ** 32 }; + @memcpy(data[OWNER_OFFSET..][0..32], &owner.data); + std.mem.writeInt(u64, data[AMOUNT_OFFSET..][0..8], 100, .little); + data[STATE_OFFSET] = 1; + + const accounts = [_]account_loader.LoadedAccount{.{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &data, + .owner = ids.TOKEN_2022_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }}; + const result = collectRawTokenBalances(&accounts); + try std.testing.expectEqual(@as(usize, 1), result.len); + try std.testing.expectEqual(ids.TOKEN_2022_PROGRAM_ID, result.constSlice()[0].program_id); +} + +test "runtime.spl_token.collectRawTokenBalances: mixed token and non-token accounts" { + // Account 0: system program (not token): should be skipped + var system_data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&system_data, 0); + system_data[STATE_OFFSET] = 1; + + // Account 1: SPL Token account: should be collected + var token_data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&token_data, 0); + const mint1 = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + @memcpy(token_data[MINT_OFFSET..][0..32], &mint1.data); + const owner1 = Pubkey{ .data = [_]u8{0xBB} ** 32 }; + @memcpy(token_data[OWNER_OFFSET..][0..32], &owner1.data); + std.mem.writeInt(u64, token_data[AMOUNT_OFFSET..][0..8], 1000, .little); + token_data[STATE_OFFSET] = 1; + + // Account 2: Token-2022 account: should be collected + var token2022_data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&token2022_data, 0); + const mint2 = Pubkey{ .data = [_]u8{0xCC} ** 32 }; + @memcpy(token2022_data[MINT_OFFSET..][0..32], &mint2.data); + const owner2 = Pubkey{ .data = [_]u8{0xDD} ** 32 }; + @memcpy(token2022_data[OWNER_OFFSET..][0..32], &owner2.data); + std.mem.writeInt(u64, token2022_data[AMOUNT_OFFSET..][0..8], 2000, .little); + token2022_data[STATE_OFFSET] = 2; // frozen + + // Account 3: uninitialized token account: should be skipped + var uninit_data: [TOKEN_ACCOUNT_SIZE]u8 = undefined; + @memset(&uninit_data, 0); + uninit_data[STATE_OFFSET] = 0; // uninitialized + + const accounts = [_]account_loader.LoadedAccount{ + .{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &system_data, + .owner = sig.runtime.program.system.ID, + .executable = false, + .rent_epoch = 0, + }, + }, + .{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &token_data, + .owner = ids.TOKEN_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }, + .{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &token2022_data, + .owner = ids.TOKEN_2022_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }, + .{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &uninit_data, + .owner = ids.TOKEN_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }, + }; + + const result = collectRawTokenBalances(&accounts); + // Only accounts 1 and 2 should be collected (system skipped, uninitialized skipped) + try std.testing.expectEqual(@as(usize, 2), result.len); + try std.testing.expectEqual(@as(u8, 1), result.constSlice()[0].account_index); + try std.testing.expectEqual(@as(u8, 2), result.constSlice()[1].account_index); + try std.testing.expectEqual(@as(u64, 1000), result.constSlice()[0].amount); + try std.testing.expectEqual(@as(u64, 2000), result.constSlice()[1].amount); + try std.testing.expectEqual(ids.TOKEN_PROGRAM_ID, result.constSlice()[0].program_id); + try std.testing.expectEqual(ids.TOKEN_2022_PROGRAM_ID, result.constSlice()[1].program_id); +} + +test "runtime.spl_token.collectRawTokenBalances: short data account skipped" { + // Token program owner but data too short + var short_data: [100]u8 = undefined; + @memset(&short_data, 0); + + const accounts = [_]account_loader.LoadedAccount{.{ + .pubkey = Pubkey.ZEROES, + .account = .{ + .lamports = 1_000_000, + .data = &short_data, + .owner = ids.TOKEN_PROGRAM_ID, + .executable = false, + .rent_epoch = 0, + }, + }}; + const result = collectRawTokenBalances(&accounts); + try std.testing.expectEqual(@as(usize, 0), result.len); +} + +test "runtime.spl_token.isTokenProgram: distinct pubkeys" { + // Verify TOKEN_PROGRAM_ID and TOKEN_2022_PROGRAM_ID are different + try std.testing.expect(!ids.TOKEN_PROGRAM_ID.equals(&ids.TOKEN_2022_PROGRAM_ID)); + + // Random pubkeys should not be token programs + const random_key = Pubkey{ .data = [_]u8{0xDE} ** 32 }; + try std.testing.expect(!isTokenProgram(random_key)); +} + +test "runtime.spl_token.RawTokenBalance struct layout" { + // Verify RawTokenBalance fields are properly accessible + const balance = RawTokenBalance{ + .account_index = 5, + .mint = Pubkey{ .data = [_]u8{1} ** 32 }, + .owner = Pubkey{ .data = [_]u8{2} ** 32 }, + .amount = 999_999, + .program_id = ids.TOKEN_PROGRAM_ID, + }; + try std.testing.expectEqual(@as(u8, 5), balance.account_index); + try std.testing.expectEqual(@as(u64, 999_999), balance.amount); +} + +test "runtime.spl_token.realNumberString: 2 decimals (Agave USDC-like)" { + const allocator = std.testing.allocator; + // Agave tests token amounts with 2 decimals + const result = try realNumberString(allocator, 4200, 2); + defer allocator.free(result); + try std.testing.expectEqualStrings("42.00", result); +} + +test "runtime.spl_token.realNumberString: 18 decimals (high precision token)" { + const allocator = std.testing.allocator; + // Some tokens use 18 decimals (like ETH-bridged tokens) + const result = try realNumberString(allocator, 1_000_000_000_000_000_000, 18); + defer allocator.free(result); + try std.testing.expectEqualStrings("1.000000000000000000", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: 2 decimals trims" { + const allocator = std.testing.allocator; + const result = try realNumberStringTrimmed(allocator, 4200, 2); + defer allocator.free(result); + try std.testing.expectEqualStrings("42", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: 18 decimals large amount" { + const allocator = std.testing.allocator; + const result = try realNumberStringTrimmed(allocator, 1_000_000_000_000_000_000, 18); + defer allocator.free(result); + try std.testing.expectEqualStrings("1", result); +} + +test "runtime.spl_token.realNumberStringTrimmed: 18 decimals with fractional" { + const allocator = std.testing.allocator; + // 1.5 in 18 decimals + const result = try realNumberStringTrimmed(allocator, 1_500_000_000_000_000_000, 18); + defer allocator.free(result); + try std.testing.expectEqualStrings("1.5", result); +} + +test "runtime.spl_token.formatTokenAmount: all fields consistent" { + const allocator = std.testing.allocator; + // 42.5 USDC (6 decimals) + const result = try formatTokenAmount(allocator, 42_500_000, 6); + defer result.deinit(allocator); + + try std.testing.expectEqualStrings("42500000", result.amount); + try std.testing.expectEqual(@as(u8, 6), result.decimals); + try std.testing.expectApproxEqRel(@as(f64, 42.5), result.ui_amount.?, 0.0001); + try std.testing.expectEqualStrings("42.5", result.ui_amount_string); +} + +/// Mock account reader for testing getMintDecimals and resolveTokenBalances. +/// Mimics the interface of FallbackAccountReader used in production. +const MockAccountReader = struct { + mint_data: std.AutoHashMap(Pubkey, [MINT_ACCOUNT_SIZE]u8), + + const MockAccount = struct { + data: DataHandle, + + const DataHandle = struct { + slice: []const u8, + pub fn constSlice(self: DataHandle) []const u8 { + return self.slice; + } + }; + + pub fn deinit(self: MockAccount, allocator: Allocator) void { + allocator.free(self.data.slice); + } + }; + + fn init(allocator: Allocator) MockAccountReader { + return .{ .mint_data = std.AutoHashMap(Pubkey, [MINT_ACCOUNT_SIZE]u8).init(allocator) }; + } + + fn deinit(self: *MockAccountReader) void { + self.mint_data.deinit(); + } + + /// Register a mint with the given decimals. + fn addMint(self: *MockAccountReader, mint: Pubkey, decimals: u8) !void { + var data: [MINT_ACCOUNT_SIZE]u8 = undefined; + @memset(&data, 0); + data[MINT_DECIMALS_OFFSET] = decimals; + data[MINT_IS_INITIALIZED_OFFSET] = 1; + try self.mint_data.put(mint, data); + } + + pub fn get(self: MockAccountReader, allocator: Allocator, pubkey: Pubkey) !?MockAccount { + const data = self.mint_data.get(pubkey) orelse return null; + return MockAccount{ + .data = .{ .slice = try allocator.dupe(u8, &data) }, + }; + } +}; + +test "runtime.spl_token.getMintDecimals: cache hit" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const mint = Pubkey{ .data = [_]u8{0x01} ** 32 }; + try cache.put(mint, 9); + + // Should return cached value without hitting the reader + const decimals = try getMintDecimals(allocator, &cache, MockAccountReader, reader, mint); + try std.testing.expectEqual(@as(u8, 9), decimals); +} + +test "runtime.spl_token.getMintDecimals: cache miss fetches from reader" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const mint = Pubkey{ .data = [_]u8{0x02} ** 32 }; + try reader.addMint(mint, 6); + + const decimals = try getMintDecimals(allocator, &cache, MockAccountReader, reader, mint); + try std.testing.expectEqual(@as(u8, 6), decimals); + + // Should now be cached + try std.testing.expectEqual(@as(?u8, 6), cache.get(mint)); +} + +test "runtime.spl_token.getMintDecimals: unknown mint returns MintNotFound" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const unknown_mint = Pubkey{ .data = [_]u8{0xFF} ** 32 }; + const result = getMintDecimals(allocator, &cache, MockAccountReader, reader, unknown_mint); + try std.testing.expectError(error.MintNotFound, result); +} + +test "runtime.spl_token.resolveTokenBalances: empty raw balances returns null" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const raw = RawTokenBalances{}; + const result = try resolveTokenBalances(allocator, raw, &cache, MockAccountReader, reader); + try std.testing.expectEqual(@as(?[]TransactionTokenBalance, null), result); +} + +test "runtime.spl_token.resolveTokenBalances: resolves token balances with mint lookup" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const mint1 = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + const mint2 = Pubkey{ .data = [_]u8{0xBB} ** 32 }; + try reader.addMint(mint1, 6); + try reader.addMint(mint2, 9); + + var raw = RawTokenBalances{}; + raw.appendAssumeCapacity(.{ + .account_index = 1, + .mint = mint1, + .owner = Pubkey{ .data = [_]u8{0x11} ** 32 }, + .amount = 1_000_000, // 1.0 with 6 decimals + .program_id = ids.TOKEN_PROGRAM_ID, + }); + raw.appendAssumeCapacity(.{ + .account_index = 3, + .mint = mint2, + .owner = Pubkey{ .data = [_]u8{0x22} ** 32 }, + .amount = 1_500_000_000, // 1.5 with 9 decimals + .program_id = ids.TOKEN_2022_PROGRAM_ID, + }); + + const result = (try resolveTokenBalances(allocator, raw, &cache, MockAccountReader, reader)).?; + defer { + for (result) |item| item.deinit(allocator); + allocator.free(result); + } + + try std.testing.expectEqual(@as(usize, 2), result.len); + + // First token balance + try std.testing.expectEqual(@as(u8, 1), result[0].account_index); + try std.testing.expectEqual(mint1, result[0].mint); + try std.testing.expectEqual(@as(u8, 6), result[0].ui_token_amount.decimals); + try std.testing.expectEqualStrings("1000000", result[0].ui_token_amount.amount); + try std.testing.expectEqualStrings("1", result[0].ui_token_amount.ui_amount_string); + + // Second token balance + try std.testing.expectEqual(@as(u8, 3), result[1].account_index); + try std.testing.expectEqual(mint2, result[1].mint); + try std.testing.expectEqual(@as(u8, 9), result[1].ui_token_amount.decimals); + try std.testing.expectEqualStrings("1500000000", result[1].ui_token_amount.amount); + try std.testing.expectEqualStrings("1.5", result[1].ui_token_amount.ui_amount_string); +} + +test "runtime.spl_token.resolveTokenBalances: skips tokens with missing mints" { + const allocator = std.testing.allocator; + var cache = MintDecimalsCache.init(allocator); + defer cache.deinit(); + var reader = MockAccountReader.init(allocator); + defer reader.deinit(); + + const known_mint = Pubkey{ .data = [_]u8{0xAA} ** 32 }; + const unknown_mint = Pubkey{ .data = [_]u8{0xFF} ** 32 }; + try reader.addMint(known_mint, 6); + // unknown_mint is NOT added to reader + + var raw = RawTokenBalances{}; + raw.appendAssumeCapacity(.{ + .account_index = 0, + .mint = unknown_mint, // This one will be skipped + .owner = Pubkey{ .data = [_]u8{0x11} ** 32 }, + .amount = 100, + .program_id = ids.TOKEN_PROGRAM_ID, + }); + raw.appendAssumeCapacity(.{ + .account_index = 2, + .mint = known_mint, // This one will succeed + .owner = Pubkey{ .data = [_]u8{0x22} ** 32 }, + .amount = 500_000, + .program_id = ids.TOKEN_PROGRAM_ID, + }); + + const result = (try resolveTokenBalances(allocator, raw, &cache, MockAccountReader, reader)).?; + defer { + for (result) |item| item.deinit(allocator); + allocator.free(result); + } + + // Only the known mint should be in the result (unknown is skipped via catch continue) + try std.testing.expectEqual(@as(usize, 1), result.len); + try std.testing.expectEqual(@as(u8, 2), result[0].account_index); + try std.testing.expectEqual(known_mint, result[0].mint); +} diff --git a/src/runtime/transaction_context.zig b/src/runtime/transaction_context.zig index e563c21aed..a05d5f3956 100644 --- a/src/runtime/transaction_context.zig +++ b/src/runtime/transaction_context.zig @@ -116,6 +116,15 @@ pub const TransactionContext = struct { self.allocator.free(self.accounts); if (self.log_collector) |*lc| lc.deinit(self.allocator); + + // Clean up CPI instruction infos stored in the trace. + // Top-level instructions (depth == 1) are owned by ResolvedTransaction and cleaned up there. + // CPI instructions (depth > 1) are created during execution and owned by this trace. + for (self.instruction_trace.slice()) |entry| { + if (entry.depth > 1) { + entry.ixn_info.deinit(self.allocator); + } + } } /// [agave] https://github.com/anza-xyz/agave/blob/134be7c14066ea00c9791187d6bbc4795dd92f0e/sdk/src/transaction_context.rs#L233 diff --git a/src/runtime/transaction_execution.zig b/src/runtime/transaction_execution.zig index 1893439eff..938f3f2e50 100644 --- a/src/runtime/transaction_execution.zig +++ b/src/runtime/transaction_execution.zig @@ -1,4 +1,5 @@ const std = @import("std"); +const std14 = @import("std14"); const sig = @import("../sig.zig"); const tracy = @import("tracy"); @@ -6,6 +7,7 @@ const account_loader = sig.runtime.account_loader; const program_loader = sig.runtime.program_loader; const executor = sig.runtime.executor; const compute_budget_program = sig.runtime.program.compute_budget; +const cost_model = sig.runtime.cost_model; const vm = sig.vm; const Ancestors = sig.core.Ancestors; @@ -68,6 +70,27 @@ pub const RuntimeTransaction = struct { accounts: std.MultiArrayList(AccountMeta) = .{}, compute_budget_instruction_details: ComputeBudgetInstructionDetails = .{}, num_lookup_tables: u64, + + /// Check if this transaction is a simple vote transaction. + /// A simple vote transaction has: + /// - Exactly 1 instruction + /// - That instruction is a Vote program instruction + /// - 1 or 2 signatures + /// - No address lookup tables (legacy message) + pub fn isSimpleVoteTransaction(self: *const RuntimeTransaction) bool { + // Must have exactly 1 instruction + if (self.instructions.len != 1) return false; + + // Must have 1 or 2 signatures + if (self.signature_count == 0 or self.signature_count > 2) return false; + + // Must be a legacy message (no lookup tables) + if (self.num_lookup_tables > 0) return false; + + // First instruction must be vote program + const instr = self.instructions[0]; + return instr.program_meta.pubkey.equals(&sig.runtime.program.vote.ID); + } }; pub const TransactionExecutionEnvironment = struct { @@ -118,6 +141,10 @@ pub const ExecutedTransaction = struct { pub fn deinit(self: *ExecutedTransaction, allocator: std.mem.Allocator) void { if (self.log_collector) |*lc| lc.deinit(allocator); } + + pub fn total_cost(self: *const ExecutedTransaction) u64 { + return self.compute_limit - self.compute_meter; + } }; pub const ProcessedTransaction = struct { @@ -130,8 +157,20 @@ pub const ProcessedTransaction = struct { /// If null, the transaction did not execute, due to a failure before /// execution could begin. outputs: ?ExecutedTransaction, + /// Pre-execution lamport balances for all accounts in the transaction. + /// Order matches the transaction's account keys. + pre_balances: PreBalances, + /// Pre-execution token balances for SPL Token accounts in the transaction. + /// Used for RPC transaction status metadata. + pre_token_balances: PreTokenBalances, + /// Total cost units for this transaction, used for block scheduling/packing. + /// This is the sum of signature_cost + write_lock_cost + data_bytes_cost + + /// programs_execution_cost + loaded_accounts_data_size_cost. + cost_units: u64, pub const Writes = LoadedTransactionAccounts.Accounts; + pub const PreBalances = std14.BoundedArray(u64, account_loader.MAX_TX_ACCOUNT_LOCKS); + pub const PreTokenBalances = sig.runtime.spl_token.RawTokenBalances; pub fn deinit(self: ProcessedTransaction, allocator: std.mem.Allocator) void { for (self.writes.slice()) |account| account.deinit(allocator); @@ -241,18 +280,48 @@ pub fn loadAndExecuteTransaction( try wrapDB(account_store.put(item.pubkey, item.account)); loaded_accounts_data_size += @intCast(rollback.account.data.len); } - return .{ .ok = .{ - .fees = fees, - .rent = 0, - .writes = writes, - .err = err, - .loaded_accounts_data_size = loaded_accounts_data_size, - .outputs = null, - } }; + // Calculate cost units even for failed transactions + const tx_cost = cost_model.calculateTransactionCost( + transaction, + &compute_budget_limits, + loaded_accounts_data_size, + env.feature_set, + env.slot, + ); + return .{ + .ok = .{ + .fees = fees, + .rent = 0, + .writes = writes, + .err = err, + .loaded_accounts_data_size = loaded_accounts_data_size, + .outputs = null, + .pre_balances = .{}, // Empty - accounts failed to load + .pre_token_balances = .{}, // Empty - accounts failed to load + .cost_units = tx_cost.total(), + }, + }; }, }; errdefer for (loaded_accounts.accounts.slice()) |acct| acct.deinit(tmp_allocator); + // Capture pre-execution balances for all accounts (for RPC transaction status) + // Note: The fee payer (index 0) has already had the fee deducted by checkFeePayer, + // so we add it back to get the true pre-execution balance. + var pre_balances = ProcessedTransaction.PreBalances{}; + for (loaded_accounts.accounts.slice(), 0..) |account, idx| { + const balance = if (idx == 0) + account.account.lamports + fees.total() + else + account.account.lamports; + pre_balances.append(balance) catch unreachable; + } + + // Capture pre-execution token balances for SPL Token accounts + const pre_token_balances = sig.runtime.spl_token.collectRawTokenBalances( + loaded_accounts.accounts.slice(), + ); + for (loaded_accounts.accounts.slice()) |account| try program_loader.loadIfProgram( programs_allocator, program_map, @@ -295,6 +364,19 @@ pub fn loadAndExecuteTransaction( for (writes.slice()) |*acct| try wrapDB(account_store.put(acct.pubkey, acct.account)); + // Calculate cost units for executed transaction using actual consumed CUs. + // Pass only the raw executed compute units (compute_limit - compute_meter remaining). + // Signature costs (transaction + precompile) are computed inside the cost model, + // matching Agave's architecture. + // [agave] https://github.com/anza-xyz/agave/blob/2717084afeeb7baad4342468c27f528ef617a3cf/cost-model/src/cost_model.rs#L61 + const tx_cost = cost_model.calculateCostForExecutedTransaction( + transaction, + executed_transaction.total_cost(), + loaded_accounts.loaded_accounts_data_size, + env.feature_set, + env.slot, + ); + return .{ .ok = .{ .fees = fees, @@ -303,6 +385,9 @@ pub fn loadAndExecuteTransaction( .err = executed_transaction.err, .loaded_accounts_data_size = loaded_accounts.loaded_accounts_data_size, .outputs = executed_transaction, + .pre_balances = pre_balances, + .pre_token_balances = pre_token_balances, + .cost_units = tx_cost.total(), }, }; } @@ -359,7 +444,10 @@ pub fn executeTransaction( var zone = tracy.Zone.init(@src(), .{ .name = "executeTransaction" }); defer zone.deinit(); - const compute_budget = compute_budget_limits.intoComputeBudget(); + const compute_budget = compute_budget_limits.intoComputeBudget( + environment.feature_set, + environment.slot, + ); const log_collector = if (config.log) try LogCollector.init(allocator, config.log_messages_byte_limit) diff --git a/src/vm/syscalls/cpi.zig b/src/vm/syscalls/cpi.zig index bba1390010..dac47b831e 100644 --- a/src/vm/syscalls/cpi.zig +++ b/src/vm/syscalls/cpi.zig @@ -1087,7 +1087,10 @@ pub fn invokeSigned(AccountInfo: type) sig.vm.SyscallFn { instruction, signers.slice(), ); - defer info.deinit(ic.tc.allocator); + // NOTE: We don't call info.deinit() here because the InstructionInfo is stored + // in the instruction_trace (by value copy). The trace needs the account_metas + // memory to remain valid until the transaction completes. Cleanup happens in + // TransactionContext.deinit() which iterates over the trace and deinits each entry. var accounts = try translateAccounts( AccountInfo, diff --git a/src/vm/syscalls/lib.zig b/src/vm/syscalls/lib.zig index 5804fff312..7631bc3b21 100644 --- a/src/vm/syscalls/lib.zig +++ b/src/vm/syscalls/lib.zig @@ -1180,11 +1180,10 @@ test getProcessedSiblingInstruction { cache.deinit(allocator); } - var allocated_account_metas: std.ArrayListUnmanaged(InstructionInfo.AccountMetas) = .empty; - defer { - for (allocated_account_metas.items) |*account_metas| account_metas.deinit(allocator); - allocated_account_metas.deinit(allocator); - } + // Track the first (depth==1) instruction's account_metas for manual cleanup. + // tc.deinit() handles depth > 1 entries automatically. + var first_account_metas: ?InstructionInfo.AccountMetas = null; + defer if (first_account_metas) |*am| am.deinit(allocator); const trace_indexes: [8]u8 = std.simd.iota(u8, 8); for ([_]u8{ 1, 2, 3, 2, 2, 3, 4, 3 }, 0..) |stack_height, index_in_trace| { @@ -1212,17 +1211,23 @@ test getProcessedSiblingInstruction { .is_writable = false, }); - try allocated_account_metas.append(allocator, info.account_metas); - tc.instruction_stack.appendAssumeCapacity(.{ .tc = &tc, .ixn_info = info, .depth = @intCast(tc.instruction_stack.len), }); + + const depth: u8 = @intCast(tc.instruction_stack.len); tc.instruction_trace.appendAssumeCapacity(.{ .ixn_info = info, - .depth = @intCast(tc.instruction_stack.len), + .depth = depth, }); + + // Track the first (depth==1) instruction's account_metas for manual cleanup. + // tc.deinit() handles depth > 1 entries automatically. + if (depth == 1) { + first_account_metas = info.account_metas; + } } }